about summary refs log tree commit diff
diff options
context:
space:
mode:
authorRoland McGrath <roland@gnu.org>2003-03-25 22:40:21 +0000
committerRoland McGrath <roland@gnu.org>2003-03-25 22:40:21 +0000
commit3e195d9371956446c8182af812723300010f0bb8 (patch)
tree90c76aae8c7a804f8533385a7cc04b70649b4de2
parentbacb02966f9e992d923ea988bb11c51ab297b10c (diff)
downloadglibc-3e195d9371956446c8182af812723300010f0bb8.tar.gz
glibc-3e195d9371956446c8182af812723300010f0bb8.tar.xz
glibc-3e195d9371956446c8182af812723300010f0bb8.zip
2003-03-25 Roland McGrath <roland@redhat.com>
	* sysdeps/powerpc/bits/atomic.h (__arch_atomic_exchange_32): New macro.
	(__arch_atomic_exchange_64): New macro.
	(atomic_exchange): Use them.
	(__arch_atomic_exchange_and_add_32): New macro.
	(__arch_atomic_exchange_and_add_64): New macro.
	(atomic_exchange_and_add): Use them.
	Original patch from Steven Munroe <sjmunroe@us.ibm.com>.
-rw-r--r--sysdeps/powerpc/bits/atomic.h99
1 files changed, 75 insertions, 24 deletions
diff --git a/sysdeps/powerpc/bits/atomic.h b/sysdeps/powerpc/bits/atomic.h
index 49f1c14b9d..e0f2bd4881 100644
--- a/sysdeps/powerpc/bits/atomic.h
+++ b/sysdeps/powerpc/bits/atomic.h
@@ -102,38 +102,90 @@ typedef uintmax_t uatomic_max_t;
   __tmp != 0;								      \
 })
 
+# define __arch_atomic_exchange_64(mem, value)				      \
+    ({									      \
+      __typeof (*mem) __val;						      \
+      __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+			"1:	ldarx	%0,0,%2\n"			      \
+			"	stdcx.	%3,0,%2\n"			      \
+			"	bne-	1b"				      \
+			: "=&r" (__val), "=m" (*mem)			      \
+			: "r" (mem), "r" (value), "1" (*mem)		      \
+			: "cr0");					      \
+      __val;								      \
+    })
+
+# define __arch_atomic_exchange_and_add_64(mem, value)			      \
+    ({									      \
+      __typeof (*mem) __val, __tmp;					      \
+      __asm __volatile ("1:	ldarx	%0,0,%3\n"			      \
+			"	addi	%1,%0,%4\n"			      \
+			"	stdcx.	%1,0,%3\n"			      \
+			"	bne-	1b"				      \
+			: "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
+			: "r" (mem), "I" (value), "2" (*mem)		      \
+			: "cr0");					      \
+      __val;								      \
+    })
+
 #else /* powerpc32 */
 # define __arch_compare_and_exchange_bool_64_acq(mem, newval, oldval) \
   (abort (), 0)
+
+# define __arch_atomic_exchange_64(mem, value) \
+    ({ abort (); (*mem) = (value); })
+# define __arch_atomic_exchange_and_add_64(mem, value) \
+    ({ abort (); (*mem) = (value); })
 #endif
 
+#define __arch_atomic_exchange_32(mem, value)				      \
+  ({									      \
+    __typeof (*mem) __val;						      \
+    __asm __volatile (__ARCH_REL_INSTR "\n"				      \
+		      "1:	lwarx	%0,0,%2\n"			      \
+		      "	stwcx.	%3,0,%2\n"				      \
+		      "	bne-	1b"					      \
+		      : "=&r" (__val), "=m" (*mem)			      \
+		      : "r" (mem), "r" (value), "1" (*mem)		      \
+		      : "cr0");						      \
+    __val;								      \
+  })
 
-#define atomic_exchange(mem, value) \
-  ({ if (sizeof (*mem) != 4)						      \
-       abort ();							      \
-     int __val;								      \
-      __asm __volatile (__ARCH_REL_INSTR "\n"				      \
-			"1:	lwarx	%0,0,%2\n"			      \
-			"	stwcx.	%3,0,%2\n"			      \
-			"	bne-	1b"				      \
-			: "=&r" (__val), "=m" (*mem)			      \
-			: "r" (mem), "r" (value), "1" (*mem)		      \
-			: "cr0");					      \
-      __val; })
+#define __arch_atomic_exchange_and_add_32(mem, value)			      \
+  ({									      \
+    __typeof (*mem) __val, __tmp;					      \
+    __asm __volatile ("1:	lwarx	%0,0,%3\n"			      \
+		      "	addi	%1,%0,%4\n"				      \
+		      "	stwcx.	%1,0,%3\n"				      \
+		      "	bne-	1b"					      \
+		      : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
+		      : "r" (mem), "I" (value), "2" (*mem)		      \
+		      : "cr0");						      \
+    __val;								      \
+  })
 
+#define atomic_exchange(mem, value)					      \
+  ({									      \
+    __typeof (*(mem)) __result;						      \
+    if (sizeof (*mem) == 4)						      \
+      __result = __arch_atomic_exchange_32 ((mem), (value));		      \
+    else if (sizeof (*mem) == 8)					      \
+      __result = __arch_atomic_exchange_64 ((mem), (value));		      \
+    else 								      \
+       abort ();							      \
+    __result;								      \
+  })
 
-#define atomic_exchange_and_add(mem, value) \
-  ({ if (sizeof (*mem) != 4)						      \
+#define atomic_exchange_and_add(mem, value)				      \
+  ({									      \
+    __typeof (*(mem)) __result;						      \
+    if (sizeof (*mem) == 4)						      \
+      __result = __arch_atomic_exchange_and_add_32 ((mem), (value));	      \
+    else if (sizeof (*mem) == 8)					      \
+      __result = __arch_atomic_exchange_and_add_64 ((mem), (value));	      \
+    else 								      \
        abort ();							      \
-     int __val, __tmp;							      \
-     __asm __volatile ("1:	lwarx	%0,0,%3\n"			      \
-		       "	addi	%1,%0,%4\n"			      \
-		       "	stwcx.	%1,0,%3\n"			      \
-		       "	bne-	1b"				      \
-		       : "=&b" (__val), "=&r" (__tmp), "=m" (*mem)	      \
-		       : "r" (mem), "I" (value), "2" (*mem)		      \
-		       : "cr0");					      \
-     __val;								      \
+    __result;								      \
   })
 
 
@@ -156,7 +208,6 @@ typedef uintmax_t uatomic_max_t;
   })
 
 
-
 #define atomic_full_barrier()	__asm ("sync" ::: "memory")
 #ifdef __powerpc64__
 # define atomic_read_barrier()	__asm ("lwsync" ::: "memory")