about summary refs log tree commit diff
path: root/sysdeps/powerpc/powerpc64/bits
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2003-05-13 21:14:28 +0000
committerUlrich Drepper <drepper@redhat.com>2003-05-13 21:14:28 +0000
commit7158eae4a8c8f15261e50ecab8929050df0a0435 (patch)
treec6edb8b3be4c23ce0cb1984e32c5b7fecf51db74 /sysdeps/powerpc/powerpc64/bits
parentedf205d5ef1528247fde0637572a4631e0f1a5c2 (diff)
downloadglibc-7158eae4a8c8f15261e50ecab8929050df0a0435.tar.gz
glibc-7158eae4a8c8f15261e50ecab8929050df0a0435.tar.xz
glibc-7158eae4a8c8f15261e50ecab8929050df0a0435.zip
Update.
2003-05-12  Steven Munroe  <sjmunroe@us.ibm.com>

	* sysdeps/powerpc/bits/atomic.h
	(__arch_compare_and_exchange_bool_8_rel): Define.
	(__arch_compare_and_exchange_bool_16_rel): Define.
	(__ARCH_REL_INSTR): Define if not already defined.
	(__arch_atomic_exchange_and_add_32): Add "memory" to clobber list.
	(__arch_atomic_decrement_if_positive_32):
	Add "memory" to clobber list.
	(__arch_compare_and_exchange_val_32_acq): Remove release sync.
	(__arch_compare_and_exchange_val_32_rel): Define.
	(__arch_atomic_exchange_32): Remove.
	(__arch_atomic_exchange_32_acq): Define.
	(__arch_atomic_exchange_32_rel): Define.
	(atomic_compare_and_exchange_val_rel): Define.
	(atomic_exchange_acq): Use __arch_atomic_exchange_*_acq forms.
	(atomic_exchange_rel): Define.
	* sysdeps/powerpc/powerpc32/bits/atomic.h
	(__arch_compare_and_exchange_bool_32_acq): Remove release sync.
	(__arch_compare_and_exchange_bool_32_rel): Define.
	(__arch_compare_and_exchange_bool_64_rel): Define.
	(__arch_compare_and_exchange_val_64_rel): Define.
	(__arch_atomic_exchange_64): Remove.
	(__arch_atomic_exchange_64_acq): Define.
	(__arch_atomic_exchange_64_rel): Define.
	* sysdeps/powerpc/powerpc64/bits/atomic.h
	(__arch_compare_and_exchange_bool_32_rel): Define.
	(__arch_compare_and_exchange_bool_64_acq): Remove release sync.
	(__arch_compare_and_exchange_bool_64_rel): Define.
	(__arch_compare_and_exchange_val_64_acq): Remove release sync.
	(__arch_compare_and_exchange_val_64_rel): Define.
	(__arch_atomic_exchange_64): Remove.
	(__arch_atomic_exchange_64_acq): Define.
	(__arch_atomic_exchange_64_rel): Define.
	(__arch_atomic_exchange_and_add_64): Add "memory" to clobber list.
	(__arch_atomic_decrement_if_positive_64):
	Add "memory" to clobber list.
	[!UP](__ARCH_REL_INSTR): Define as lwsync.

	the space-padded-by-default conversion specifiers, %e, %k, %l.
Diffstat (limited to 'sysdeps/powerpc/powerpc64/bits')
-rw-r--r--sysdeps/powerpc/powerpc64/bits/atomic.h84
1 files changed, 77 insertions, 7 deletions
diff --git a/sysdeps/powerpc/powerpc64/bits/atomic.h b/sysdeps/powerpc/powerpc64/bits/atomic.h
index 938e5ee3f0..76be01edf2 100644
--- a/sysdeps/powerpc/powerpc64/bits/atomic.h
+++ b/sysdeps/powerpc/powerpc64/bits/atomic.h
@@ -30,7 +30,7 @@
 # define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \
 ({									      \
   unsigned int __tmp;							      \
-  __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+  __asm __volatile (							      \
 		    "1:	lwarx	%0,0,%1\n"				      \
 		    "	extsw	%0,%0\n"				      \
 		    "	subf.	%0,%2,%0\n"				      \
@@ -44,6 +44,23 @@
   __tmp != 0;								      \
 })
 
+# define __arch_compare_and_exchange_bool_32_rel(mem, newval, oldval) \
+({									      \
+  unsigned int __tmp;							      \
+  __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+		    "1:	lwarx	%0,0,%1\n"				      \
+		    "	extsw	%0,%0\n"				      \
+		    "	subf.	%0,%2,%0\n"				      \
+		    "	bne	2f\n"					      \
+		    "	stwcx.	%3,0,%1\n"				      \
+		    "	bne-	1b\n"					      \
+		    "2:	"						      \
+		    : "=&r" (__tmp)					      \
+		    : "b" (mem), "r" (oldval), "r" (newval)		      \
+		    : "cr0", "memory");					      \
+  __tmp != 0;								      \
+})
+
 /* 
  * Only powerpc64 processors support Load doubleword and reserve index (ldarx) 
  * and Store doubleword conditional indexed (stdcx) instructions.  So here
@@ -52,7 +69,7 @@
 # define __arch_compare_and_exchange_bool_64_acq(mem, newval, oldval) \
 ({									      \
   unsigned long	__tmp;							      \
-  __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+  __asm __volatile (							      \
 		    "1:	ldarx	%0,0,%1\n"				      \
 		    "	subf.	%0,%2,%0\n"				      \
 		    "	bne	2f\n"					      \
@@ -65,11 +82,27 @@
   __tmp != 0;								      \
 })
 
+# define __arch_compare_and_exchange_bool_64_rel(mem, newval, oldval) \
+({									      \
+  unsigned long	__tmp;							      \
+  __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+		    "1:	ldarx	%0,0,%1\n"				      \
+		    "	subf.	%0,%2,%0\n"				      \
+		    "	bne	2f\n"					      \
+		    "	stdcx.	%3,0,%1\n"				      \
+		    "	bne-	1b\n"					      \
+		    "2:	"						      \
+		    : "=&r" (__tmp)					      \
+		    : "b" (mem), "r" (oldval), "r" (newval)		      \
+		    : "cr0", "memory");					      \
+  __tmp != 0;								      \
+})
+
 #define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
   ({									      \
       __typeof (*(mem)) __tmp;						      \
       __typeof (mem)  __memp = (mem);					      \
-      __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+      __asm __volatile (						      \
 		        "1:	ldarx	%0,0,%1\n"			      \
 		        "	cmpd	%0,%2\n"			      \
 		        "	bne	2f\n"				      \
@@ -82,7 +115,38 @@
       __tmp;								      \
   })
 
-# define __arch_atomic_exchange_64(mem, value) \
+#define __arch_compare_and_exchange_val_64_rel(mem, newval, oldval) \
+  ({									      \
+      __typeof (*(mem)) __tmp;						      \
+      __typeof (mem)  __memp = (mem);					      \
+      __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+		        "1:	ldarx	%0,0,%1\n"			      \
+		        "	cmpd	%0,%2\n"			      \
+		        "	bne	2f\n"				      \
+		        "	stdcx.	%3,0,%1\n"			      \
+		        "	bne-	1b\n"				      \
+		        "2:	"					      \
+		        : "=&r" (__tmp)					      \
+		        : "b" (__memp), "r" (oldval), "r" (newval)	      \
+		        : "cr0", "memory");				      \
+      __tmp;								      \
+  })
+
+# define __arch_atomic_exchange_64_acq(mem, value) \
+    ({									      \
+      __typeof (*mem) __val;						      \
+      __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+			"1:	ldarx	%0,0,%2\n"			      \
+			"	stdcx.	%3,0,%2\n"			      \
+			"	bne-	1b\n"				      \
+		  " " __ARCH_ACQ_INSTR					      \
+			: "=&r" (__val), "=m" (*mem)			      \
+			: "b" (mem), "r" (value), "1" (*mem)		      \
+			: "cr0", "memory");				      \
+      __val;								      \
+    })
+
+# define __arch_atomic_exchange_64_rel(mem, value) \
     ({									      \
       __typeof (*mem) __val;						      \
       __asm __volatile (__ARCH_REL_INSTR "\n"				      \
@@ -91,7 +155,7 @@
 			"	bne-	1b"				      \
 			: "=&r" (__val), "=m" (*mem)			      \
 			: "b" (mem), "r" (value), "1" (*mem)		      \
-			: "cr0");					      \
+			: "cr0", "memory");				      \
       __val;								      \
     })
 
@@ -104,7 +168,7 @@
 			"	bne-	1b"				      \
 			: "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
 			: "b" (mem), "r" (value), "2" (*mem)		      \
-			: "cr0");					      \
+			: "cr0", "memory");				      \
       __val;								      \
     })
 
@@ -119,7 +183,7 @@
 		       "2:	" __ARCH_ACQ_INSTR			      \
 		       : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
 		       : "b" (mem), "2" (*mem)				      \
-		       : "cr0");					      \
+		       : "cr0", "memory");				      \
      __val;								      \
   })
 
@@ -127,6 +191,12 @@
  * All powerpc64 processors support the new "light weight"  sync (lwsync).   
  */
 # define atomic_read_barrier()	__asm ("lwsync" ::: "memory")
+/* 
+ * "light weight" sync can also be used for the release barrier.   
+ */
+# ifndef UP
+#  define __ARCH_REL_INSTR	"lwsync"
+# endif
 
 /*
  * Include the rest of the atomic ops macros which are common to both