Module Name:    src
Committed By:   martin
Date:           Fri Apr 30 13:54:00 UTC 2021

Modified Files:
        src/common/lib/libc/arch/arm/atomic [netbsd-9]: atomic_swap.S
            atomic_swap_16.S atomic_swap_64.S membar_ops.S
        src/sys/arch/arm/include [netbsd-9]: lock.h

Log Message:
Pull up following revision(s) (requested by skrll in ticket #1261):

        sys/arch/arm/include/lock.h: revision 1.38
        common/lib/libc/arch/arm/atomic/membar_ops.S: revision 1.7
        common/lib/libc/arch/arm/atomic/atomic_swap_16.S: revision 1.5
        common/lib/libc/arch/arm/atomic/atomic_swap_64.S: revision 1.12
        common/lib/libc/arch/arm/atomic/atomic_swap.S: revision 1.17

Add the appropriate memory barrier before the lock is cleared in
__sync_lock_release_{1,2,4,8}.  That is, all reads and write for the
inner shareability domain before the lock clear store.

Improve the membar_ops barriers - no need to use dsb and wait for
completion.  Also, we only to act on the inner shareability domain.

Fix the barrier confusion.  From Riastradh - thanks!.


To generate a diff of this commit:
cvs rdiff -u -r1.14.18.1 -r1.14.18.2 \
    src/common/lib/libc/arch/arm/atomic/atomic_swap.S
cvs rdiff -u -r1.4 -r1.4.18.1 \
    src/common/lib/libc/arch/arm/atomic/atomic_swap_16.S
cvs rdiff -u -r1.10.18.1 -r1.10.18.2 \
    src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S
cvs rdiff -u -r1.6 -r1.6.28.1 \
    src/common/lib/libc/arch/arm/atomic/membar_ops.S
cvs rdiff -u -r1.33.8.1 -r1.33.8.2 src/sys/arch/arm/include/lock.h

Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.

Modified files:

Index: src/common/lib/libc/arch/arm/atomic/atomic_swap.S
diff -u src/common/lib/libc/arch/arm/atomic/atomic_swap.S:1.14.18.1 src/common/lib/libc/arch/arm/atomic/atomic_swap.S:1.14.18.2
--- src/common/lib/libc/arch/arm/atomic/atomic_swap.S:1.14.18.1	Mon Apr 26 18:34:28 2021
+++ src/common/lib/libc/arch/arm/atomic/atomic_swap.S	Fri Apr 30 13:54:00 2021
@@ -1,4 +1,4 @@
-/*	$NetBSD: atomic_swap.S,v 1.14.18.1 2021/04/26 18:34:28 martin Exp $	*/
+/*	$NetBSD: atomic_swap.S,v 1.14.18.2 2021/04/30 13:54:00 martin Exp $	*/
 
 /*-
  * Copyright (c) 2007,2012 The NetBSD Foundation, Inc.
@@ -88,7 +88,7 @@ STRONG_ALIAS(_atomic_swap_ptr,_atomic_sw
 ENTRY_NP(__sync_lock_release_4)
 	mov	r1, #0
 #ifdef _ARM_ARCH_7
-	dmb
+	dmb	ishst
 #else
 	mcr	p15, 0, r1, c7, c10, 5	/* data memory barrier */
 #endif
@@ -129,7 +129,7 @@ STRONG_ALIAS(_atomic_swap_uchar,_atomic_
 ENTRY_NP(__sync_lock_release_1)
 	mov	r1, #0
 #ifdef _ARM_ARCH_7
-	dmb
+	dmb	ishst
 #else
 	mcr	p15, 0, r1, c7, c10, 5	/* data memory barrier */
 #endif

Index: src/common/lib/libc/arch/arm/atomic/atomic_swap_16.S
diff -u src/common/lib/libc/arch/arm/atomic/atomic_swap_16.S:1.4 src/common/lib/libc/arch/arm/atomic/atomic_swap_16.S:1.4.18.1
--- src/common/lib/libc/arch/arm/atomic/atomic_swap_16.S:1.4	Sun May 17 20:57:11 2015
+++ src/common/lib/libc/arch/arm/atomic/atomic_swap_16.S	Fri Apr 30 13:54:00 2021
@@ -1,4 +1,4 @@
-/*	$NetBSD: atomic_swap_16.S,v 1.4 2015/05/17 20:57:11 justin Exp $ */
+/*	$NetBSD: atomic_swap_16.S,v 1.4.18.1 2021/04/30 13:54:00 martin Exp $ */
 
 /*-
  * Copyright (c) 2013 The NetBSD Foundation, Inc.
@@ -58,6 +58,11 @@ STRONG_ALIAS(_atomic_swap_ushort,_atomic
 #if (!defined(_KERNEL) || !defined(_RUMPKERNEL)) && !defined(_STANDALONE)
 ENTRY_NP(__sync_lock_release_2)
 	mov	r1, #0
+#ifdef _ARM_ARCH_7
+	dmb	ishst
+#else
+	mcr	p15, 0, r1, c7, c10, 5	/* data memory barrier */
+#endif
 	strh	r1, [r0]
 	RET
 END(__sync_lock_release_2)

Index: src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S
diff -u src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S:1.10.18.1 src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S:1.10.18.2
--- src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S:1.10.18.1	Mon Apr 26 18:34:28 2021
+++ src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S	Fri Apr 30 13:54:00 2021
@@ -1,4 +1,4 @@
-/*	$NetBSD: atomic_swap_64.S,v 1.10.18.1 2021/04/26 18:34:28 martin Exp $	*/
+/*	$NetBSD: atomic_swap_64.S,v 1.10.18.2 2021/04/30 13:54:00 martin Exp $	*/
 /*-
  * Copyright (c) 2012 The NetBSD Foundation, Inc.
  * All rights reserved.
@@ -57,6 +57,11 @@ CRT_ALIAS(__atomic_exchange_8,_atomic_sw
 ENTRY_NP(__sync_lock_release_8)
 	mov	r2, #0
 	mov	r3, #0
+#ifdef _ARM_ARCH_7
+	dmb	ishst
+#else
+	mcr	p15, 0, r2, c7, c10, 5	/* data memory barrier */
+#endif
 	strd	r2, r3, [r0]
 	RET
 END(__sync_lock_release_8)

Index: src/common/lib/libc/arch/arm/atomic/membar_ops.S
diff -u src/common/lib/libc/arch/arm/atomic/membar_ops.S:1.6 src/common/lib/libc/arch/arm/atomic/membar_ops.S:1.6.28.1
--- src/common/lib/libc/arch/arm/atomic/membar_ops.S:1.6	Fri Mar 28 21:32:41 2014
+++ src/common/lib/libc/arch/arm/atomic/membar_ops.S	Fri Apr 30 13:54:00 2021
@@ -1,4 +1,4 @@
-/*	$NetBSD: membar_ops.S,v 1.6 2014/03/28 21:32:41 skrll Exp $	*/
+/*	$NetBSD: membar_ops.S,v 1.6.28.1 2021/04/30 13:54:00 martin Exp $	*/
 /*-
  * Copyright (c) 2008 The NetBSD Foundation, Inc.
  * All rights reserved.
@@ -34,10 +34,10 @@
 
 ENTRY_NP(_membar_producer)
 #ifdef _ARM_ARCH_7
-	dsb
+	dmb	ishst
 #else
 	mov	r0, #0
-	mcr	p15, 0, r0, c7, c10, 4	 /* Data Synchronization Barrier */
+	mcr	p15, 0, r0, c7, c10, 5	/* Data Memory Barrier */
 #endif
 	RET
 END(_membar_producer)
@@ -47,7 +47,7 @@ STRONG_ALIAS(_membar_write,_membar_produ
 
 ENTRY_NP(_membar_sync)
 #ifdef _ARM_ARCH_7
-	dmb
+	dmb	ish
 #else
 	mov	r0, #0
 	mcr	p15, 0, r0, c7, c10, 5	/* Data Memory Barrier */

Index: src/sys/arch/arm/include/lock.h
diff -u src/sys/arch/arm/include/lock.h:1.33.8.1 src/sys/arch/arm/include/lock.h:1.33.8.2
--- src/sys/arch/arm/include/lock.h:1.33.8.1	Mon Apr 26 18:34:28 2021
+++ src/sys/arch/arm/include/lock.h	Fri Apr 30 13:54:00 2021
@@ -1,4 +1,4 @@
-/*	$NetBSD: lock.h,v 1.33.8.1 2021/04/26 18:34:28 martin Exp $	*/
+/*	$NetBSD: lock.h,v 1.33.8.2 2021/04/30 13:54:00 martin Exp $	*/
 
 /*-
  * Copyright (c) 2000, 2001 The NetBSD Foundation, Inc.
@@ -139,32 +139,34 @@ __swp(int __val, __cpu_simple_lock_t *__
 }
 #endif /* !_ARM_ARCH_6 */
 
+/* load/dmb implies load-acquire */
 static __inline void
-__arm_membar_producer(void)
+__arm_load_dmb(void)
 {
 #if defined(_ARM_ARCH_7)
-	__asm __volatile("dsb" ::: "memory");
+	__asm __volatile("dmb ish" ::: "memory");
 #elif defined(_ARM_ARCH_6)
-	__asm __volatile("mcr\tp15,0,%0,c7,c10,4" :: "r"(0) : "memory");
+	__asm __volatile("mcr\tp15,0,%0,c7,c10,5" :: "r"(0) : "memory");
 #endif
 }
 
+/* dmb/store implies store-release */
 static __inline void
-__arm_membar_consumer(void)
+__arm_dmb_store(void)
 {
 #if defined(_ARM_ARCH_7)
-	__asm __volatile("dmb" ::: "memory");
+	__asm __volatile("dmb ish" ::: "memory");
 #elif defined(_ARM_ARCH_6)
 	__asm __volatile("mcr\tp15,0,%0,c7,c10,5" :: "r"(0) : "memory");
 #endif
 }
 
+
 static __inline void __unused
 __cpu_simple_lock_init(__cpu_simple_lock_t *__alp)
 {
 
 	*__alp = __SIMPLELOCK_UNLOCKED;
-	__arm_membar_producer();
 }
 
 #if !defined(__thumb__) || defined(_ARM_ARCH_T2)
@@ -172,12 +174,11 @@ static __inline void __unused
 __cpu_simple_lock(__cpu_simple_lock_t *__alp)
 {
 #if defined(_ARM_ARCH_6)
-	__arm_membar_consumer();
 	do {
 		/* spin */
 	} while (__arm_load_exclusive(__alp) != __SIMPLELOCK_UNLOCKED
 		 || __arm_store_exclusive(__alp, __SIMPLELOCK_LOCKED));
-	__arm_membar_producer();
+	__arm_load_dmb();
 #else
 	while (__swp(__SIMPLELOCK_LOCKED, __alp) != __SIMPLELOCK_UNLOCKED)
 		continue;
@@ -192,13 +193,12 @@ static __inline int __unused
 __cpu_simple_lock_try(__cpu_simple_lock_t *__alp)
 {
 #if defined(_ARM_ARCH_6)
-	__arm_membar_consumer();
 	do {
 		if (__arm_load_exclusive(__alp) != __SIMPLELOCK_UNLOCKED) {
 			return 0;
 		}
 	} while (__arm_store_exclusive(__alp, __SIMPLELOCK_LOCKED));
-	__arm_membar_producer();
+	__arm_load_dmb();
 	return 1;
 #else
 	return (__swp(__SIMPLELOCK_LOCKED, __alp) == __SIMPLELOCK_UNLOCKED);
@@ -221,9 +221,8 @@ __cpu_simple_unlock(__cpu_simple_lock_t 
 		    :: "r"(__SIMPLELOCK_UNLOCKED), "r"(__alp) : "memory");
 	}
 #else
-	__arm_membar_consumer();
+	__arm_dmb_store();
 	*__alp = __SIMPLELOCK_UNLOCKED;
-	__arm_membar_producer();
 #endif
 }
 

Reply via email to