Source-Changes-HG archive

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]

[src/trunk]: src Actually use the assembly version of the atomic function if ...



details:   https://anonhg.NetBSD.org/src/rev/2d310da896be
branches:  trunk
changeset: 781055:2d310da896be
user:      matt <matt%NetBSD.org@localhost>
date:      Thu Aug 16 16:49:10 2012 +0000

description:
Actually use the assembly version of the atomic function if compiling
for armv6 or armv7 cpus.  Use atomic_cas_ptr instead of _lock_cas so
we pick up the assembly version when it's used.

diffstat:

 common/lib/libc/arch/arm/atomic/Makefile.inc   |  35 +++++++++++++---
 common/lib/libc/arch/arm/atomic/atomic_cas_8.S |  53 ++++++++++---------------
 common/lib/libc/arch/arm/atomic/atomic_swap.S  |  52 +++++++++++++++++++------
 common/lib/libc/arch/arm/atomic/membar_ops.S   |  10 ++++-
 sys/arch/arm/arm/lock_cas.S                    |  14 +-----
 sys/arch/arm/include/mutex.h                   |  11 ++--
 6 files changed, 107 insertions(+), 68 deletions(-)

diffs (truncated from 312 to 300 lines):

diff -r e9b8f5d47b3f -r 2d310da896be common/lib/libc/arch/arm/atomic/Makefile.inc
--- a/common/lib/libc/arch/arm/atomic/Makefile.inc      Thu Aug 16 16:41:53 2012 +0000
+++ b/common/lib/libc/arch/arm/atomic/Makefile.inc      Thu Aug 16 16:49:10 2012 +0000
@@ -1,19 +1,40 @@
-#      $NetBSD: Makefile.inc,v 1.8 2009/01/04 17:54:29 pooka Exp $
+#      $NetBSD: Makefile.inc,v 1.9 2012/08/16 16:49:10 matt Exp $
+
+ARMV6= ${CPUFLAGS:M-march=armv7*} ${CPUFLAGS:M-mcpu=cortex*}
+ARMV6+= ${CPUFLAGS:M-march=armv6*} ${CPUFLAGS:M-mcpu=arm11*}
+ARMV6+= ${CFLAGS:M-march=armv7*:} ${CFLAGS:M-mcpu=cortex*}
+ARMV6+= ${CFLAGS:M-march=armv6*:} ${CFLAGS:M-mcpu=arm11*}
+ARMV6+= ${CPPFLAGS:M-march=armv7*:} ${CPPFLAGS:M-mcpu=cortex*}
+ARMV6+= ${CPPFLAGS:M-march=armv6*:} ${CPPFLAGS:M-mcpu=arm11*}
 
 .if defined(LIB) && (${LIB} == "kern" || ${LIB} == "c" || ${LIB} == "pthread" \
        || ${LIB} == "rump")
 
-SRCS+= atomic_add_32_cas.c atomic_add_32_nv_cas.c atomic_and_32_cas.c \
-       atomic_and_32_nv_cas.c atomic_dec_32_cas.c atomic_dec_32_nv_cas.c \
-       atomic_inc_32_cas.c atomic_inc_32_nv_cas.c atomic_or_32_cas.c \
-       atomic_or_32_nv_cas.c atomic_swap_32_cas.c membar_ops_nop.c
+.if empty(ARMV6)
+SRCS.atomic+=  atomic_add_32_cas.c atomic_add_32_nv_cas.c \
+               atomic_and_32_cas.c atomic_and_32_nv_cas.c \
+               atomic_dec_32_cas.c atomic_dec_32_nv_cas.c \
+               atomic_inc_32_cas.c atomic_inc_32_nv_cas.c \
+               atomic_or_32_cas.c atomic_or_32_nv_cas.c \
+               atomic_swap_32_cas.c membar_ops_nop.c
+.else
+SRCS.atomic+=  atomic_add_32.S atomic_and_32.S atomic_cas_32.S \
+               atomic_dec_32.S atomic_inc_32.S atomic_or_32.S \
+               atomic_swap.S membar_ops.S
+.endif
 
 .endif
 
 .if defined(LIB) && (${LIB} == "c" || ${LIB} == "pthread")
 
-SRCS+= atomic_init_testset.c
-SRCS+= atomic_cas_up.S
+.if empty(ARMV6)
+SRCS.atomic+=  atomic_init_testset.c
+SRCS.atomic+=  atomic_cas_up.S
 CPPFLAGS+= -D__HAVE_ASM_ATOMIC_CAS_UP
+.else
+SRCS.atomic+=  atomic_init_cas.c
+.endif
 
 .endif
+
+SRCS+= ${SRCS.atomic}
diff -r e9b8f5d47b3f -r 2d310da896be common/lib/libc/arch/arm/atomic/atomic_cas_8.S
--- a/common/lib/libc/arch/arm/atomic/atomic_cas_8.S    Thu Aug 16 16:41:53 2012 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_cas_8.S    Thu Aug 16 16:49:10 2012 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: atomic_cas_8.S,v 1.1 2008/11/18 15:22:56 matt Exp $ */
+/* $NetBSD: atomic_cas_8.S,v 1.2 2012/08/16 16:49:10 matt Exp $ */
 /*-
  * Copyright (c) 2008 The NetBSD Foundation, Inc.
  * All rights reserved.
@@ -27,37 +27,28 @@
  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  * POSSIBILITY OF SUCH DAMAGE.
  */
-#include <machine/asm.h>
 
-RCSID("$NetBSD: atomic_cas_8.S,v 1.1 2008/11/18 15:22:56 matt Exp $")
+#include "atomic_op_asm.h"
 
-ENTRY(atomic_cas_8)
-       XPUSH   {r4,r5}                 /* we need some more registers */
-       and     r3, r0, #3              /* which byte do we replace? */
-#if __ARMEB__
-       eor     r3, r3, #3              /* bytes are reversed on BE */
-#endif
-       mov     r3, r3, lsl #3          /* multiply by 8 */
-       mov     r1, r1, lsl r3          /* mov old value to correct byte */
-       eor     r2, r1, r2, lsl r3      /* move new value to correct byte */
-/*     eor     r2, r2, r1 */           /* new value is now (old ^ new) */
-       mov     r5, #0xff               /* load mask */
-       mov     r5, r5, lsl r3          /* and move to correct byte */
-       mov     r3, r0                  /* move pointer */
+#if defined(_ARM_ARCH_6)
+/*
+ * ARMv6 has load-exclusive/store-exclusive which works for both user
+ * and kernel.
+ */
+ENTRY_NP(_atomic_cas_8)
+       mov     r3, r0                  /* we need r0 for return value */
+1:
+       ldrexb  r0, [r3]                /* load old value */
+       teq     r0, r1                  /*   compare? */
+       RETc(ne)                        /*     return if different */
+       strexb  ip, r2, [r3]            /* store new value */
+       cmp     ip, #0                  /*   succeed? */
+       bne     1b                      /*     nope, try again. */
+       RET                             /*    yes, return. */
+       END(_atomic_cas_8)
 
-1:     ldrex   r4, [r3]                /* load 32bit value */
-       and     r0, r4, r5              /* clear other bytes */
-       teq     r0, r1                  /* equal old value? */
-       bne     2f                      /*   nope, bail. */
-       eor     r4, r4, r2              /* new == old ^ (old ^ new) */
-       strex   ip, r4, [r3]            /* attempt to store it */
-       cmp     ip, #0                  /*   succeed? */
-       bne     1b                      /* nope, try again. */
+ATOMIC_OP_ALIAS(atomic_cas_8,_atomic_cas_8)
+STRONG_ALIAS(_atomic_cas_char,_atomic_cas_8)
+STRONG_ALIAS(_atomic_cas_uchar,_atomic_cas_8)
 
-2:     XPOP    {r4,r5}                 /* don't need these anymore */
-       and     r1, r3, #3
-#if __ARMEB__
-       eor     r1, r1, #3
-#endif
-       mov     r0, r0, lsr r1          /* shift it back to lsb byte */
-       RET
+#endif /* _ARCH_ARM_6 */
diff -r e9b8f5d47b3f -r 2d310da896be common/lib/libc/arch/arm/atomic/atomic_swap.S
--- a/common/lib/libc/arch/arm/atomic/atomic_swap.S     Thu Aug 16 16:41:53 2012 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_swap.S     Thu Aug 16 16:49:10 2012 +0000
@@ -1,7 +1,7 @@
-/*     $NetBSD: atomic_swap.S,v 1.2 2008/08/16 07:12:40 matt Exp $     */
+/*     $NetBSD: atomic_swap.S,v 1.3 2012/08/16 16:49:10 matt Exp $     */
 
 /*-
- * Copyright (c) 2007 The NetBSD Foundation, Inc.
+ * Copyright (c) 2007,2012 The NetBSD Foundation, Inc.
  * All rights reserved.
  *
  * This code is derived from software contributed to The NetBSD Foundation
@@ -15,13 +15,6 @@
  * 2. Redistributions in binary form must reproduce the above copyright
  *    notice, this list of conditions and the following disclaimer in the
  *    documentation and/or other materials provided with the distribution.
- * 3. All advertising materials mentioning features or use of this software
- *    must display the following acknowledgement:
- *     This product includes software developed by the NetBSD
- *     Foundation, Inc. and its contributors.
- * 4. Neither the name of The NetBSD Foundation nor the names of its
- *    contributors may be used to endorse or promote products derived
- *    from this software without specific prior written permission.
  *      
  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
@@ -38,8 +31,31 @@
 
 #include "atomic_op_asm.h"
 
+/*
+ * While SWP{B} is sufficient on its own for pre-ARMv7 CPUs, on MP ARMv7 cores
+ * SWP{B} is disabled since it's no longer atomic among multiple CPUs.  They
+ * will actually raise an UNDEFINED exception.
+ *
+ * So if we use the LDREX/STREX template, but use a SWP instruction followed
+ * by a MOV instruction (using a temporary register), that gives a handler
+ * for the SWP UNDEFINED exception enough information to "patch" this instance
+ * SWP with correct forms of LDREX/STREX.  (note that this would happen even
+ * "read-only" pages.  If the page gets tossed, we will get another exception
+ * and fix yet again).
+ */
+
 ENTRY_NP(_atomic_swap_32)
-       swp     r0, r1, [r0]
+       mov     r2, r0
+1:
+#ifdef _ARM_ARCH_6
+       ldrex   r0, [r2]
+       strex   r3, r1, [r2]
+#else
+       swp     r0, r1, [r2]
+       mov     r3, #0
+#endif
+       cmp     r3, #0
+       bne     1b
        RET
        END(_atomic_swap_32)
 ATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32)
@@ -51,11 +67,21 @@
 STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32)
 
 ENTRY_NP(_atomic_swap_8)
-       swpb    r0, r1, [r0]
+       mov     r2, r0
+1:
+#ifdef _ARM_ARCH_6
+       ldrexb  r0, [r2]
+       strexb  r3, r1, [r2]
+#else
+       swpb    r0, r1, [r2]
+       mov     r3, #0
+#endif
+       cmp     r3, #0
+       bne     1b
        RET
        END(_atomic_swap_8)
 ATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8)
 ATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8)
 ATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8)
-STRONG_ALIAS(_atomic_swap_char,_atomic_swap_32)
-STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_32)
+STRONG_ALIAS(_atomic_swap_char,_atomic_swap_8)
+STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8)
diff -r e9b8f5d47b3f -r 2d310da896be common/lib/libc/arch/arm/atomic/membar_ops.S
--- a/common/lib/libc/arch/arm/atomic/membar_ops.S      Thu Aug 16 16:41:53 2012 +0000
+++ b/common/lib/libc/arch/arm/atomic/membar_ops.S      Thu Aug 16 16:49:10 2012 +0000
@@ -1,4 +1,4 @@
-/*     $NetBSD: membar_ops.S,v 1.2 2008/08/16 07:12:40 matt Exp $      */
+/*     $NetBSD: membar_ops.S,v 1.3 2012/08/16 16:49:10 matt Exp $      */
 /*-
  * Copyright (c) 2008 The NetBSD Foundation, Inc.
  * All rights reserved.
@@ -33,7 +33,11 @@
 #ifdef _ARM_ARCH_6
 
 ENTRY_NP(_membar_producer)
+#ifdef _ARM_ARCH_7
+       dsb
+#else
        mcr     p15, 0, r0, c7, c10, 4   /* Data Synchronization Barrier */
+#endif
        RET
        END(_membar_producer)
 ATOMIC_OP_ALIAS(membar_producer,_membar_producer)
@@ -41,7 +45,11 @@
 STRONG_ALIAS(_membar_write,_membar_producer)
 
 ENTRY_NP(_membar_sync)
+#ifdef _ARM_ARCH_7
+       dmb
+#else
        mcr     p15, 0, r0, c7, c10, 5  /* Data Memory Barrier */
+#endif
        RET
        END(_membar_sync)
 ATOMIC_OP_ALIAS(membar_sync,_membar_sync)
diff -r e9b8f5d47b3f -r 2d310da896be sys/arch/arm/arm/lock_cas.S
--- a/sys/arch/arm/arm/lock_cas.S       Thu Aug 16 16:41:53 2012 +0000
+++ b/sys/arch/arm/arm/lock_cas.S       Thu Aug 16 16:49:10 2012 +0000
@@ -1,4 +1,4 @@
-/*     $NetBSD: lock_cas.S,v 1.7 2010/07/07 01:17:26 chs Exp $ */
+/*     $NetBSD: lock_cas.S,v 1.8 2012/08/16 16:49:10 matt Exp $        */
 
 /*-
  * Copyright (c) 2007 The NetBSD Foundation, Inc.
@@ -50,6 +50,7 @@
        .word   _C_LABEL(_lock_cas_fail)
 #endif /* ARM_LOCK_CAS_DEBUG */
 
+#ifndef _ARM_ARCH_6
 /*
  * _lock_cas:
  *
@@ -66,15 +67,6 @@
  */
        .globl  _C_LABEL(_lock_cas_end)
 ENTRY_NP(_lock_cas)
-#ifdef _ARCH_ARM_6
-        mov    ip, r0
-1:      ldrex  r0, [ip]                /* eventual return value */
-        cmp    r1, r0
-       RETc(ne)
-        strex  r3, r2, [ip]
-        cmp    r3, #0
-        bne    1b
-#else
        ldr     r3, [r0]
        teq     r3, r1
        streq   r2, [r0]
@@ -93,7 +85,6 @@
 #endif /* __ARMEB__ */
        stmia   r3, {r1-r2}             /* store ev_count */
 #endif /* ARM_LOCK_CAS_DEBUG */
-#endif
        RET
 END(_lock_cas)
 
@@ -114,6 +105,7 @@
 STRONG_ALIAS(atomic_cas_uint_ni,_lock_cas)
 STRONG_ALIAS(_atomic_cas_ptr_ni,_lock_cas)
 STRONG_ALIAS(atomic_cas_ptr_ni,_lock_cas)
+#endif /* !_ARM_ARCH_6 */
 
 #ifdef __PROG32
 #define SAVE_REGS      stmfd   sp!, {r4-r6}
diff -r e9b8f5d47b3f -r 2d310da896be sys/arch/arm/include/mutex.h
--- a/sys/arch/arm/include/mutex.h      Thu Aug 16 16:41:53 2012 +0000
+++ b/sys/arch/arm/include/mutex.h      Thu Aug 16 16:49:10 2012 +0000
@@ -1,4 +1,4 @@
-/*     $NetBSD: mutex.h,v 1.10 2008/04/28 20:23:14 martin Exp $        */
+/*     $NetBSD: mutex.h,v 1.11 2012/08/16 16:49:10 matt Exp $  */
 
 /*-
  * Copyright (c) 2002, 2007 The NetBSD Foundation, Inc.
@@ -90,11 +90,12 @@
  */
 #define        MUTEX_GIVE(mtx)                 /* nothing */
 
-unsigned long  _lock_cas(volatile unsigned long *,



Home | Main Index | Thread Index | Old Index