Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/common/lib/libc/arch/arm/atomic Add support for the gcc __sy...
details: https://anonhg.NetBSD.org/src/rev/70e317555073
branches: trunk
changeset: 791214:70e317555073
user: matt <matt%NetBSD.org@localhost>
date: Fri Nov 08 22:42:52 2013 +0000
description:
Add support for the gcc __sync builtins.
Note that these need earmv6 or later to get the ldrex/strex instructions
diffstat:
common/lib/libc/arch/arm/atomic/Makefile.inc | 26 +-
common/lib/libc/arch/arm/atomic/atomic_add_16.S | 100 ++++++++++
common/lib/libc/arch/arm/atomic/atomic_add_32.S | 38 +++-
common/lib/libc/arch/arm/atomic/atomic_add_64.S | 3 +-
common/lib/libc/arch/arm/atomic/atomic_add_8.S | 100 ++++++++++
common/lib/libc/arch/arm/atomic/atomic_and_16.S | 76 +++++++
common/lib/libc/arch/arm/atomic/atomic_and_32.S | 11 +-
common/lib/libc/arch/arm/atomic/atomic_and_64.S | 3 +-
common/lib/libc/arch/arm/atomic/atomic_and_8.S | 76 +++++++
common/lib/libc/arch/arm/atomic/atomic_cas_16.S | 64 ++++++
common/lib/libc/arch/arm/atomic/atomic_cas_32.S | 3 +-
common/lib/libc/arch/arm/atomic/atomic_cas_64.S | 3 +-
common/lib/libc/arch/arm/atomic/atomic_cas_8.S | 6 +-
common/lib/libc/arch/arm/atomic/atomic_inc_32.S | 6 +-
common/lib/libc/arch/arm/atomic/atomic_nand_16.S | 78 +++++++
common/lib/libc/arch/arm/atomic/atomic_nand_32.S | 82 ++++++++
common/lib/libc/arch/arm/atomic/atomic_nand_64.S | 64 ++++++
common/lib/libc/arch/arm/atomic/atomic_nand_8.S | 78 +++++++
common/lib/libc/arch/arm/atomic/atomic_op_asm.h | 6 +-
common/lib/libc/arch/arm/atomic/atomic_or_16.S | 75 +++++++
common/lib/libc/arch/arm/atomic/atomic_or_32.S | 11 +-
common/lib/libc/arch/arm/atomic/atomic_or_64.S | 3 +-
common/lib/libc/arch/arm/atomic/atomic_or_8.S | 76 +++++++
common/lib/libc/arch/arm/atomic/atomic_sub_64.S | 63 ++++++
common/lib/libc/arch/arm/atomic/atomic_swap.S | 16 +-
common/lib/libc/arch/arm/atomic/atomic_swap_16.S | 62 ++++++
common/lib/libc/arch/arm/atomic/atomic_swap_64.S | 12 +-
common/lib/libc/arch/arm/atomic/atomic_xor_16.S | 76 +++++++
common/lib/libc/arch/arm/atomic/atomic_xor_32.S | 79 +++++++
common/lib/libc/arch/arm/atomic/atomic_xor_64.S | 62 ++++++
common/lib/libc/arch/arm/atomic/atomic_xor_8.S | 76 +++++++
common/lib/libc/arch/arm/atomic/membar_ops.S | 7 +-
common/lib/libc/arch/arm/atomic/sync_bool_compare_and_swap_1.S | 62 ++++++
common/lib/libc/arch/arm/atomic/sync_bool_compare_and_swap_2.S | 61 ++++++
common/lib/libc/arch/arm/atomic/sync_bool_compare_and_swap_4.S | 61 ++++++
common/lib/libc/arch/arm/atomic/sync_bool_compare_and_swap_8.S | 73 +++++++
common/lib/libc/arch/arm/atomic/sync_fetch_and_add_8.S | 58 +++++
common/lib/libc/arch/arm/atomic/sync_fetch_and_and_8.S | 58 +++++
common/lib/libc/arch/arm/atomic/sync_fetch_and_nand_8.S | 60 ++++++
common/lib/libc/arch/arm/atomic/sync_fetch_and_or_8.S | 58 +++++
common/lib/libc/arch/arm/atomic/sync_fetch_and_sub_8.S | 58 +++++
common/lib/libc/arch/arm/atomic/sync_fetch_and_xor_8.S | 58 +++++
42 files changed, 2009 insertions(+), 39 deletions(-)
diffs (truncated from 2459 to 300 lines):
diff -r a80a48130ab7 -r 70e317555073 common/lib/libc/arch/arm/atomic/Makefile.inc
--- a/common/lib/libc/arch/arm/atomic/Makefile.inc Fri Nov 08 19:22:19 2013 +0000
+++ b/common/lib/libc/arch/arm/atomic/Makefile.inc Fri Nov 08 22:42:52 2013 +0000
@@ -1,4 +1,4 @@
-# $NetBSD: Makefile.inc,v 1.13 2013/08/19 03:55:12 matt Exp $
+# $NetBSD: Makefile.inc,v 1.14 2013/11/08 22:42:52 matt Exp $
ARMV6= ${CPUFLAGS:M-march=armv6*} ${CPUFLAGS:M-mcpu=arm11*}
ARMV6+= ${CFLAGS:M-march=armv6*:} ${CFLAGS:M-mcpu=arm11*}
@@ -6,8 +6,7 @@
ARMV7= ${CPUFLAGS:M-march=armv7*} ${CPUFLAGS:M-mcpu=cortex*}
ARMV7+= ${CFLAGS:M-march=armv7*:} ${CFLAGS:M-mcpu=cortex*}
ARMV7+= ${CPPFLAGS:M-march=armv7*:} ${CPPFLAGS:M-mcpu=cortex*}
-.if empty(CPPFLAGS:M-D_STANDALONE) \
- && empty(CFLAGS:M-march=*) && empty(CFLAGS:M-mcpu=*) \
+.if empty(CFLAGS:M-march=*) && empty(CFLAGS:M-mcpu=*) \
&& empty(CPPFLAGS:M-march=*) && empty(CPPFLAGS:M-mcpu=*) \
&& empty(CPUFLAGS:M-march=*) && empty(CPUFLAGS:M-mcpu=*)
ARMV6+= ${MACHINE_ARCH:Mearmv6*}
@@ -25,12 +24,21 @@
atomic_or_32_cas.c atomic_or_32_nv_cas.c \
atomic_swap_32_cas.c membar_ops_nop.c
.else
-SRCS.atomic+= atomic_add_32.S atomic_and_32.S atomic_cas_32.S
-SRCS.atomic+= atomic_dec_32.S atomic_inc_32.S atomic_or_32.S
-SRCS.atomic+= atomic_swap.S membar_ops.S
-SRCS.atomic+= atomic_add_64.S atomic_and_64.S atomic_cas_64.S
-SRCS.atomic+= atomic_dec_64.S atomic_inc_64.S atomic_or_64.S
-SRCS.atomic+= atomic_swap_64.S
+.for op in add and cas nand or xor
+.for sz in 8 16 32 64
+SRCS.atomic+= atomic_${op}_${sz}.S
+.endfor
+.endfor
+SRCS.atomic+= atomic_dec_32.S atomic_dec_64.S
+SRCS.atomic+= atomic_inc_32.S atomic_inc_64.S
+SRCS.atomic+= atomic_swap.S atomic_swap_16.S atomic_swap_64.S
+SRCS.atomic+= membar_ops.S
+.for op in add and nand or sub xor
+SRCS.atomic+= sync_fetch_and_${op}_8.S
+.endfor
+.for sz in 1 2 4 8
+SRCS.atomic+= sync_bool_compare_and_swap_${sz}.S
+.endfor
.endif
.endif
diff -r a80a48130ab7 -r 70e317555073 common/lib/libc/arch/arm/atomic/atomic_add_16.S
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_add_16.S Fri Nov 08 22:42:52 2013 +0000
@@ -0,0 +1,100 @@
+/* $NetBSD: atomic_add_16.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
+
+/*-
+ * Copyright (c) 2013 The NetBSD Foundation, Inc.
+ * All rights reserved.
+ *
+ * This code is derived from software contributed to The NetBSD Foundation
+ * by Matt Thomas <matt%3am-software.com@localhost>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "atomic_op_asm.h"
+
+#ifdef _ARM_ARCH_6
+
+ENTRY_NP(_atomic_sub_16)
+ negs r1, r1
+ /* FALLTHROUGH */
+ENTRY_NP(_atomic_add_16)
+ mov ip, r0
+1: ldrexh r0, [ip] /* load old value */
+ adds r3, r0, r1 /* calculate new value */
+ strexh r2, r3, [ip] /* try to store */
+ cmp r2, #0 /* succeed? */
+ bne 1b /* no, try again */
+#ifdef _ARM_ARCH_7
+ dmb
+#else
+ mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
+#endif
+ RET /* return old value */
+END(_atomic_add_16)
+END(_atomic_sub_16)
+
+ATOMIC_OP_ALIAS(atomic_add_16,_atomic_add_16)
+ATOMIC_OP_ALIAS(atomic_add_short,_atomic_add_16)
+ATOMIC_OP_ALIAS(atomic_add_ushort,_atomic_add_16)
+STRONG_ALIAS(__sync_fetch_and_add_2,_atomic_add_16)
+STRONG_ALIAS(_atomic_add_short,_atomic_add_16)
+STRONG_ALIAS(_atomic_add_ushort,_atomic_add_16)
+
+ATOMIC_OP_ALIAS(atomic_sub_16,_atomic_sub_16)
+ATOMIC_OP_ALIAS(atomic_sub_short,_atomic_sub_16)
+ATOMIC_OP_ALIAS(atomic_sub_ushort,_atomic_sub_16)
+STRONG_ALIAS(__sync_fetch_and_sub_2,_atomic_sub_16)
+STRONG_ALIAS(_atomic_sub_short,_atomic_sub_16)
+STRONG_ALIAS(_atomic_sub_ushort,_atomic_sub_16)
+
+ENTRY_NP(_atomic_sub_16_nv)
+ negs r1, r1
+ /* FALLTHROUGH */
+ENTRY_NP(_atomic_add_16_nv)
+ mov ip, r0 /* need r0 for return value */
+1: ldrexh r0, [ip] /* load old value */
+ adds r0, r0, r1 /* calculate new value (return value) */
+ strexh r2, r0, [ip] /* try to store */
+ cmp r2, #0 /* succeed? */
+ bne 1b /* no, try again? */
+#ifdef _ARM_ARCH_7
+ dmb
+#else
+ mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
+#endif
+ RET /* return new value */
+END(_atomic_add_16_nv)
+END(_atomic_sub_16_nv)
+ATOMIC_OP_ALIAS(atomic_add_16_nv,_atomic_add_16_nv)
+ATOMIC_OP_ALIAS(atomic_add_short_nv,_atomic_add_16_nv)
+ATOMIC_OP_ALIAS(atomic_add_ushort_nv,_atomic_add_16_nv)
+STRONG_ALIAS(__sync_add_and_fetch_2,_atomic_add_16_nv)
+STRONG_ALIAS(_atomic_add_short_nv,_atomic_add_16_nv)
+STRONG_ALIAS(_atomic_add_ushort_nv,_atomic_add_16_nv)
+
+ATOMIC_OP_ALIAS(atomic_sub_16_nv,_atomic_sub_16_nv)
+ATOMIC_OP_ALIAS(atomic_sub_short_nv,_atomic_sub_16_nv)
+ATOMIC_OP_ALIAS(atomic_sub_ushort_nv,_atomic_sub_16_nv)
+STRONG_ALIAS(__sync_sub_and_fetch_2,_atomic_sub_16_nv)
+STRONG_ALIAS(_atomic_sub_short_nv,_atomic_sub_16_nv)
+STRONG_ALIAS(_atomic_sub_ushort_nv,_atomic_sub_16_nv)
+#endif /* _ARM_ARCH_6 */
diff -r a80a48130ab7 -r 70e317555073 common/lib/libc/arch/arm/atomic/atomic_add_32.S
--- a/common/lib/libc/arch/arm/atomic/atomic_add_32.S Fri Nov 08 19:22:19 2013 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_add_32.S Fri Nov 08 22:42:52 2013 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: atomic_add_32.S,v 1.5 2013/08/11 04:41:17 matt Exp $ */
+/* $NetBSD: atomic_add_32.S,v 1.6 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2008 The NetBSD Foundation, Inc.
@@ -33,10 +33,14 @@
#ifdef _ARM_ARCH_6
+ENTRY_NP(_atomic_sub_32)
+ negs r1, r1
+ /* FALLTHROUGH */
ENTRY_NP(_atomic_add_32)
-1: ldrex r3, [r0] /* load old value */
- adds r3, r3, r1 /* calculate new value */
- strex r2, r3, [r0] /* try to store */
+ mov ip, r0
+1: ldrex r0, [ip] /* load old value */
+ adds r3, r0, r1 /* calculate new value */
+ strex r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
@@ -46,15 +50,29 @@
#endif
RET /* return old value */
END(_atomic_add_32)
+END(_atomic_sub_32)
ATOMIC_OP_ALIAS(atomic_add_32,_atomic_add_32)
ATOMIC_OP_ALIAS(atomic_add_int,_atomic_add_32)
ATOMIC_OP_ALIAS(atomic_add_long,_atomic_add_32)
ATOMIC_OP_ALIAS(atomic_add_ptr,_atomic_add_32)
+STRONG_ALIAS(__sync_fetch_and_add_4,_atomic_add_32)
STRONG_ALIAS(_atomic_add_int,_atomic_add_32)
STRONG_ALIAS(_atomic_add_long,_atomic_add_32)
STRONG_ALIAS(_atomic_add_ptr,_atomic_add_32)
+ATOMIC_OP_ALIAS(atomic_sub_32,_atomic_sub_32)
+ATOMIC_OP_ALIAS(atomic_sub_int,_atomic_sub_32)
+ATOMIC_OP_ALIAS(atomic_sub_long,_atomic_sub_32)
+ATOMIC_OP_ALIAS(atomic_sub_ptr,_atomic_sub_32)
+STRONG_ALIAS(__sync_fetch_and_sub_4,_atomic_sub_32)
+STRONG_ALIAS(_atomic_sub_int,_atomic_sub_32)
+STRONG_ALIAS(_atomic_sub_long,_atomic_sub_32)
+STRONG_ALIAS(_atomic_sub_ptr,_atomic_sub_32)
+
+ENTRY_NP(_atomic_sub_32_nv)
+ negs r1, r1
+ /* FALLTHROUGH */
ENTRY_NP(_atomic_add_32_nv)
mov ip, r0 /* need r0 for return value */
1: ldrex r0, [ip] /* load old value */
@@ -68,13 +86,23 @@
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
- END(_atomic_add_32_nv)
+END(_atomic_add_32_nv)
+END(_atomic_sub_32_nv)
ATOMIC_OP_ALIAS(atomic_add_32_nv,_atomic_add_32_nv)
ATOMIC_OP_ALIAS(atomic_add_int_nv,_atomic_add_32_nv)
ATOMIC_OP_ALIAS(atomic_add_long_nv,_atomic_add_32_nv)
ATOMIC_OP_ALIAS(atomic_add_ptr_nv,_atomic_add_32_nv)
+STRONG_ALIAS(__sync_add_and_fetch_4,_atomic_add_32_nv)
STRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv)
STRONG_ALIAS(_atomic_add_long_nv,_atomic_add_32_nv)
STRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_32_nv)
+ATOMIC_OP_ALIAS(atomic_sub_32_nv,_atomic_sub_32_nv)
+ATOMIC_OP_ALIAS(atomic_sub_int_nv,_atomic_sub_32_nv)
+ATOMIC_OP_ALIAS(atomic_sub_long_nv,_atomic_sub_32_nv)
+ATOMIC_OP_ALIAS(atomic_sub_ptr_nv,_atomic_sub_32_nv)
+STRONG_ALIAS(__sync_sub_and_fetch_4,_atomic_sub_32_nv)
+STRONG_ALIAS(_atomic_sub_int_nv,_atomic_sub_32_nv)
+STRONG_ALIAS(_atomic_sub_long_nv,_atomic_sub_32_nv)
+STRONG_ALIAS(_atomic_sub_ptr_nv,_atomic_sub_32_nv)
#endif /* _ARM_ARCH_6 */
diff -r a80a48130ab7 -r 70e317555073 common/lib/libc/arch/arm/atomic/atomic_add_64.S
--- a/common/lib/libc/arch/arm/atomic/atomic_add_64.S Fri Nov 08 19:22:19 2013 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_add_64.S Fri Nov 08 22:42:52 2013 +0000
@@ -1,4 +1,4 @@
-/* $NetBSD: atomic_add_64.S,v 1.8 2013/08/20 07:52:31 matt Exp $ */
+/* $NetBSD: atomic_add_64.S,v 1.9 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2012 The NetBSD Foundation, Inc.
* All rights reserved.
@@ -57,5 +57,6 @@
STRONG_ALIAS(_atomic_add_64,_atomic_add_64_nv)
ATOMIC_OP_ALIAS(atomic_add_64_nv,_atomic_add_64_nv)
ATOMIC_OP_ALIAS(atomic_add_64,_atomic_add_64)
+STRONG_ALIAS(__sync_add_and_fetch_8,_atomic_add_64_nv)
#endif /* _ARM_ARCH_6 */
diff -r a80a48130ab7 -r 70e317555073 common/lib/libc/arch/arm/atomic/atomic_add_8.S
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/common/lib/libc/arch/arm/atomic/atomic_add_8.S Fri Nov 08 22:42:52 2013 +0000
@@ -0,0 +1,100 @@
+/* $NetBSD: atomic_add_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
+
+/*-
+ * Copyright (c) 2013 The NetBSD Foundation, Inc.
+ * All rights reserved.
+ *
+ * This code is derived from software contributed to The NetBSD Foundation
+ * by Matt Thomas <matt%3am-software.com@localhost>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "atomic_op_asm.h"
+
+#ifdef _ARM_ARCH_6
+
+ENTRY_NP(_atomic_sub_8)
+ negs r1, r1
+ /* FALLTHROUGH */
+ENTRY_NP(_atomic_add_8)
+ mov ip, r0
+1: ldrexb r0, [ip] /* load old value */
+ adds r3, r0, r1 /* calculate new value */
+ strexb r2, r3, [ip] /* try to store */
+ cmp r2, #0 /* succeed? */
+ bne 1b /* no, try again */
+#ifdef _ARM_ARCH_7
+ dmb
Home |
Main Index |
Thread Index |
Old Index