Source-Changes-HG archive

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]

[src/trunk]: src/sys/arch/alpha/include alpha: Add missing barriers in cpu_sw...



details:   https://anonhg.NetBSD.org/src/rev/13d34c05683a
branches:  trunk
changeset: 373666:13d34c05683a
user:      riastradh <riastradh%NetBSD.org@localhost>
date:      Thu Feb 23 14:55:10 2023 +0000

description:
alpha: Add missing barriers in cpu_switchto.

Details in comments.

PR kern/57240

XXX pullup-8
XXX pullup-9
XXX pullup-10

diffstat:

 sys/arch/alpha/include/asm.h |  22 +++++++++++++++++++++-
 1 files changed, 21 insertions(+), 1 deletions(-)

diffs (40 lines):

diff -r 3610dbaaa764 -r 13d34c05683a sys/arch/alpha/include/asm.h
--- a/sys/arch/alpha/include/asm.h      Thu Feb 23 14:54:57 2023 +0000
+++ b/sys/arch/alpha/include/asm.h      Thu Feb 23 14:55:10 2023 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: asm.h,v 1.44 2020/09/04 03:53:12 thorpej Exp $ */
+/* $NetBSD: asm.h,v 1.45 2023/02/23 14:55:10 riastradh Exp $ */
 
 /*
  * Copyright (c) 1991,1990,1989,1994,1995,1996 Carnegie Mellon University
@@ -669,10 +669,30 @@
 #define        GET_CURLWP                                                      \
        call_pal PAL_OSF1_rdval
 
+/*
+ * Issue barriers to coordinate mutex_exit on this CPU with
+ * mutex_vector_enter on another CPU.
+ *
+ * 1. Any prior mutex_exit by oldlwp must be visible to other
+ *    CPUs before we set ci_curlwp := newlwp on this one,
+ *    requiring a store-before-store barrier.
+ *
+ * 2. ci_curlwp := newlwp must be visible on all other CPUs
+ *    before any subsequent mutex_exit by newlwp can even test
+ *    whether there might be waiters, requiring a
+ *    store-before-load barrier.
+ *
+ * See kern_mutex.c for details -- this is necessary for
+ * adaptive mutexes to detect whether the lwp is on the CPU in
+ * order to safely block without requiring atomic r/m/w in
+ * mutex_exit.
+ */
 #define        SET_CURLWP(r)                                                   \
        ldq     v0, L_CPU(r)                                    ;       \
        mov     r, a0                                           ;       \
+       wmb     /* store-before-store XXX patch out if !MP? */  ;       \
        stq     r, CPU_INFO_CURLWP(v0)                          ;       \
+       mb      /* store-before-load XXX patch out if !MP? */   ;       \
        call_pal PAL_OSF1_wrval
 
 #else  /* if not MULTIPROCESSOR... */



Home | Main Index | Thread Index | Old Index