Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/sys/arch Part III of ad's performance improvements for aarch64
details: https://anonhg.NetBSD.org/src/rev/117e4c271a7c
branches: trunk
changeset: 937229:117e4c271a7c
user: skrll <skrll%NetBSD.org@localhost>
date: Wed Aug 12 13:28:46 2020 +0000
description:
Part III of ad's performance improvements for aarch64
- Assembly language stubs for mutex_enter() and mutex_exit().
diffstat:
sys/arch/aarch64/aarch64/lock_stubs.S | 81 +++++++++++++++++++++++++++++++++++
sys/arch/aarch64/conf/files.aarch64 | 3 +-
sys/arch/aarch64/include/mutex.h | 4 +-
sys/arch/evbarm/include/mutex.h | 6 ++-
4 files changed, 91 insertions(+), 3 deletions(-)
diffs (124 lines):
diff -r 640b7a5ea62e -r 117e4c271a7c sys/arch/aarch64/aarch64/lock_stubs.S
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/sys/arch/aarch64/aarch64/lock_stubs.S Wed Aug 12 13:28:46 2020 +0000
@@ -0,0 +1,81 @@
+/* $NetBSD: lock_stubs.S,v 1.1 2020/08/12 13:28:46 skrll Exp $ */
+
+/*-
+ * Copyright (c) 2014, 2020 The NetBSD Foundation, Inc.
+ * All rights reserved.
+ *
+ * This code is derived from software contributed to The NetBSD Foundation
+ * by Matt Thomas of 3am Software Foundry, and by Andrew Doran.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
+ * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "opt_lockdebug.h"
+
+#include <aarch64/asm.h>
+
+#include "assym.h"
+
+RCSID("$NetBSD: lock_stubs.S,v 1.1 2020/08/12 13:28:46 skrll Exp $")
+
+#ifndef LOCKDEBUG
+/*
+ * mutex_enter(): the compare-and-set must be atomic with respect to
+ * interrupts and with respect to other CPUs.
+ */
+ENTRY(mutex_enter)
+ mrs x1, tpidr_el1 /* x1 = curlwp */
+1:
+ ldxr x2, [x0] /* load old value */
+ cbnz x2, 3f /* equals zero? */
+ stxr w3, x1, [x0] /* store curlwp as new value */
+ cbnz w3, 2f /* succeed? nope, try again. */
+ dmb sy /* membar_enter() */
+ ret
+2:
+ b 1b
+3:
+ b _C_LABEL(mutex_vector_enter)
+END(mutex_enter)
+
+/*
+ * mutex_exit(): the compare-and-set need only be atomic with respect
+ * to interrupts. the cheapest way to achieve that may be to use a
+ * restartable sequence, but the code do that would be quite involved,
+ * so just use ldxr+stxr to achieve the same.
+ */
+ENTRY(mutex_exit)
+ dmb sy /* membar_exit() */
+ mrs x1, tpidr_el1 /* x1 = curlwp */
+1:
+ ldxr x2, [x0] /* load old value */
+ cmp x1, x2 /* equals curlwp? */
+ b.ne 3f /* slow path if different */
+ stxr w3, xzr, [x0] /* store zero as new value */
+ cbnz w3, 2f /* succeed? nope, try again. */
+ ret
+2:
+ b 1b
+3:
+ b _C_LABEL(mutex_vector_exit)
+END(mutex_exit)
+#endif /* !LOCKDEBUG */
diff -r 640b7a5ea62e -r 117e4c271a7c sys/arch/aarch64/conf/files.aarch64
--- a/sys/arch/aarch64/conf/files.aarch64 Wed Aug 12 13:19:35 2020 +0000
+++ b/sys/arch/aarch64/conf/files.aarch64 Wed Aug 12 13:28:46 2020 +0000
@@ -1,4 +1,4 @@
-# $NetBSD: files.aarch64,v 1.26 2020/07/25 22:51:57 riastradh Exp $
+# $NetBSD: files.aarch64,v 1.27 2020/08/12 13:28:46 skrll Exp $
defflag opt_cpuoptions.h AARCH64_ALIGNMENT_CHECK
defflag opt_cpuoptions.h AARCH64_EL0_STACK_ALIGNMENT_CHECK
@@ -100,6 +100,7 @@
file arch/aarch64/aarch64/fusu.S
file arch/aarch64/aarch64/idle_machdep.S
file arch/aarch64/aarch64/kobj_machdep.c modular
+file arch/aarch64/aarch64/lock_stubs.S
file arch/aarch64/aarch64/process_machdep.c
file arch/aarch64/aarch64/procfs_machdep.c procfs
file arch/aarch64/aarch64/sig_machdep.c
diff -r 640b7a5ea62e -r 117e4c271a7c sys/arch/aarch64/include/mutex.h
--- a/sys/arch/aarch64/include/mutex.h Wed Aug 12 13:19:35 2020 +0000
+++ b/sys/arch/aarch64/include/mutex.h Wed Aug 12 13:28:46 2020 +0000
@@ -1,3 +1,5 @@
-/* $NetBSD: mutex.h,v 1.1 2014/08/10 05:47:38 matt Exp $ */
+/* $NetBSD: mutex.h,v 1.2 2020/08/12 13:28:46 skrll Exp $ */
#include <arm/mutex.h>
+
+#define __HAVE_MUTEX_STUBS 1
diff -r 640b7a5ea62e -r 117e4c271a7c sys/arch/evbarm/include/mutex.h
--- a/sys/arch/evbarm/include/mutex.h Wed Aug 12 13:19:35 2020 +0000
+++ b/sys/arch/evbarm/include/mutex.h Wed Aug 12 13:28:46 2020 +0000
@@ -1,3 +1,7 @@
-/* $NetBSD: mutex.h,v 1.2 2007/02/09 21:55:03 ad Exp $ */
+/* $NetBSD: mutex.h,v 1.3 2020/08/12 13:28:46 skrll Exp $ */
+#ifdef __aarch64__
+#include <aarch64/mutex.h>
+#else
#include <arm/mutex.h>
+#endif
Home |
Main Index |
Thread Index |
Old Index