about summary refs log tree commit diff
path: root/sysdeps/powerpc/powerpc64
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2007-03-26 20:16:39 +0000
committerUlrich Drepper <drepper@redhat.com>2007-03-26 20:16:39 +0000
commitfa6e3bc38aaf82c80759f5dbdc9ec0d60ce7ee51 (patch)
treeeb6cb888ca49ac0cb0c90745aa57ae7e2e20aa2b /sysdeps/powerpc/powerpc64
parentc7693af7ef36972aa266030da26211db21fc82fb (diff)
downloadglibc-fa6e3bc38aaf82c80759f5dbdc9ec0d60ce7ee51.tar.gz
glibc-fa6e3bc38aaf82c80759f5dbdc9ec0d60ce7ee51.tar.xz
glibc-fa6e3bc38aaf82c80759f5dbdc9ec0d60ce7ee51.zip
* sysdeps/powerpc/bits/atomic.h
	[!MUTEX_HINT_ACQ]: Define MUTEX_HINT_ACQ.
	[!MUTEX_HINT_REL]: Define MUTEX_HINT_REL.
	(__arch_compare_and_exchange_val_32_acq): Add MUTEX_HINT_ACQ to lwarx.
	(__arch_compare_and_exchange_val_32_rel): Add MUTEX_HINT_REL to lwarx.
	(__arch_atomic_exchange_val_32_acq): Add MUTEX_HINT_ACQ to lwarx.
	(__arch_atomic_exchange_rel_32_rel): Add MUTEX_HINT_REL to lwarx.
	* sysdeps/powerpc/powerpc32/bits/atomic.h [_ARCH_PWR6 || _ARCH_PWR6X]:
	Define MUTEX_HINT_ACQ as ",1" and MUTEX_HINT_REL as ",0".
	(__arch_compare_and_exchange_bool_32_acq): Add MUTEX_HINT_ACQ to lwarx.
	(__arch_compare_and_exchange_bool_32_rel): Add MUTEX_HINT_REL to lwarx.
	* sysdeps/powerpc/powerpc64/bits/atomic.h [_ARCH_PWR6 || _ARCH_PWR6D]:
	Define MUTEX_HINT_ACQ as ",1" and MUTEX_HINT_REL as ",0".
	(__arch_compare_and_exchange_bool_32_acq): Add MUTEX_HINT_ACQ to lwarx.
	(__arch_compare_and_exchange_bool_32_rel): Add MUTEX_HINT_REL to lwarx.
	(__arch_compare_and_exchange_bool_64_acq): Add MUTEX_HINT_ACQ to lwarx.
	(__arch_compare_and_exchange_bool_64_rel): Add MUTEX_HINT_REL to lwarx.
	(__arch_compare_and_exchange_val_64_acq): Add MUTEX_HINT_ACQ to lwarx.
	(__arch_compare_and_exchange_val_64_rel): Add MUTEX_HINT_REL to lwarx.
	(__arch_atomic_exchange_val_64_acq): Add MUTEX_HINT_ACQ to lwarx.
	(__arch_atomic_exchange_rel_64_rel): Add MUTEX_HINT_REL to lwarx.

2007-03-20  Jakub Jelinek  <jakub@redhat.com>
Diffstat (limited to 'sysdeps/powerpc/powerpc64')
-rw-r--r--sysdeps/powerpc/powerpc64/bits/atomic.h62
1 files changed, 39 insertions, 23 deletions
diff --git a/sysdeps/powerpc/powerpc64/bits/atomic.h b/sysdeps/powerpc/powerpc64/bits/atomic.h
index e46dc1e4d7..3465bb31b4 100644
--- a/sysdeps/powerpc/powerpc64/bits/atomic.h
+++ b/sysdeps/powerpc/powerpc64/bits/atomic.h
@@ -1,5 +1,5 @@
 /* Atomic operations.  PowerPC64 version.
-   Copyright (C) 2003, 2004 Free Software Foundation, Inc.
+   Copyright (C) 2003, 2004, 2007 Free Software Foundation, Inc.
    This file is part of the GNU C Library.
    Contributed by Paul Mackerras <paulus@au.ibm.com>, 2003.
 
@@ -18,17 +18,33 @@
    Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
    02111-1307 USA.  */
 
+/*  POWER6 adds a "Mutex Hint" to the Load and Reserve instruction.
+    This is a hint to the hardware to expect additional updates adjacent
+    to the lock word or not.  If we are acquiring a Mutex, the hint
+    should be true. Otherwise we releasing a Mutex or doing a simple
+    atomic operation.  In that case we don't expect addtional updates
+    adjacent to the lock word after the Store Conditional and the hint
+    should be false.  */
+
+#if defined _ARCH_PWR6 || defined _ARCH_PWR6X
+# define MUTEX_HINT_ACQ	",1"
+# define MUTEX_HINT_REL	",0"
+#else
+# define MUTEX_HINT_ACQ
+# define MUTEX_HINT_REL
+#endif
+
 /* The 32-bit exchange_bool is different on powerpc64 because the subf
    does signed 64-bit arthmatic while the lwarx is 32-bit unsigned
    (a load word and zero (high 32) form) load.
    In powerpc64 register values are 64-bit by default,  including oldval.
    The value in old val unknown sign extension, lwarx loads the 32-bit
    value as unsigned.  So we explicitly clear the high 32 bits in oldval.  */
-# define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \
+#define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \
 ({									      \
   unsigned int __tmp, __tmp2;						      \
   __asm __volatile ("   clrldi  %1,%1,32\n"				      \
-		    "1:	lwarx	%0,0,%2\n"				      \
+		    "1:	lwarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	 	      \
 		    "	subf.	%0,%1,%0\n"				      \
 		    "	bne	2f\n"					      \
 		    "	stwcx.	%4,0,%2\n"				      \
@@ -40,12 +56,12 @@
   __tmp != 0;								      \
 })
 
-# define __arch_compare_and_exchange_bool_32_rel(mem, newval, oldval) \
+#define __arch_compare_and_exchange_bool_32_rel(mem, newval, oldval) \
 ({									      \
   unsigned int __tmp, __tmp2;						      \
   __asm __volatile (__ARCH_REL_INSTR "\n"				      \
 		    "   clrldi  %1,%1,32\n"				      \
-		    "1:	lwarx	%0,0,%2\n"				      \
+		    "1:	lwarx	%0,0,%2" MUTEX_HINT_REL "\n"		      \
 		    "	subf.	%0,%1,%0\n"				      \
 		    "	bne	2f\n"					      \
 		    "	stwcx.	%4,0,%2\n"				      \
@@ -62,11 +78,11 @@
  * and Store doubleword conditional indexed (stdcx) instructions.  So here
  * we define the 64-bit forms.
  */
-# define __arch_compare_and_exchange_bool_64_acq(mem, newval, oldval) \
+#define __arch_compare_and_exchange_bool_64_acq(mem, newval, oldval) \
 ({									      \
   unsigned long	__tmp;							      \
   __asm __volatile (							      \
-		    "1:	ldarx	%0,0,%1\n"				      \
+		    "1:	ldarx	%0,0,%1" MUTEX_HINT_ACQ "\n"		      \
 		    "	subf.	%0,%2,%0\n"				      \
 		    "	bne	2f\n"					      \
 		    "	stdcx.	%3,0,%1\n"				      \
@@ -78,11 +94,11 @@
   __tmp != 0;								      \
 })
 
-# define __arch_compare_and_exchange_bool_64_rel(mem, newval, oldval) \
+#define __arch_compare_and_exchange_bool_64_rel(mem, newval, oldval) \
 ({									      \
   unsigned long	__tmp;							      \
   __asm __volatile (__ARCH_REL_INSTR "\n"				      \
-		    "1:	ldarx	%0,0,%1\n"				      \
+		    "1:	ldarx	%0,0,%2" MUTEX_HINT_REL "\n"		      \
 		    "	subf.	%0,%2,%0\n"				      \
 		    "	bne	2f\n"					      \
 		    "	stdcx.	%3,0,%1\n"				      \
@@ -99,7 +115,7 @@
       __typeof (*(mem)) __tmp;						      \
       __typeof (mem)  __memp = (mem);					      \
       __asm __volatile (						      \
-		        "1:	ldarx	%0,0,%1\n"			      \
+		        "1:	ldarx	%0,0,%1" MUTEX_HINT_ACQ "\n"	      \
 		        "	cmpd	%0,%2\n"			      \
 		        "	bne	2f\n"				      \
 		        "	stdcx.	%3,0,%1\n"			      \
@@ -116,7 +132,7 @@
       __typeof (*(mem)) __tmp;						      \
       __typeof (mem)  __memp = (mem);					      \
       __asm __volatile (__ARCH_REL_INSTR "\n"				      \
-		        "1:	ldarx	%0,0,%1\n"			      \
+		        "1:	ldarx	%0,0,%1" MUTEX_HINT_REL "\n"	      \
 		        "	cmpd	%0,%2\n"			      \
 		        "	bne	2f\n"				      \
 		        "	stdcx.	%3,0,%1\n"			      \
@@ -128,11 +144,11 @@
       __tmp;								      \
   })
 
-# define __arch_atomic_exchange_64_acq(mem, value) \
+#define __arch_atomic_exchange_64_acq(mem, value) \
     ({									      \
       __typeof (*mem) __val;						      \
       __asm __volatile (__ARCH_REL_INSTR "\n"				      \
-			"1:	ldarx	%0,0,%2\n"			      \
+			"1:	ldarx	%0,0,%2" MUTEX_HINT_ACQ "\n"	      \
 			"	stdcx.	%3,0,%2\n"			      \
 			"	bne-	1b\n"				      \
 		  " " __ARCH_ACQ_INSTR					      \
@@ -142,11 +158,11 @@
       __val;								      \
     })
 
-# define __arch_atomic_exchange_64_rel(mem, value) \
+#define __arch_atomic_exchange_64_rel(mem, value) \
     ({									      \
       __typeof (*mem) __val;						      \
       __asm __volatile (__ARCH_REL_INSTR "\n"				      \
-			"1:	ldarx	%0,0,%2\n"			      \
+			"1:	ldarx	%0,0,%2" MUTEX_HINT_REL "\n"	      \
 			"	stdcx.	%3,0,%2\n"			      \
 			"	bne-	1b"				      \
 			: "=&r" (__val), "=m" (*mem)			      \
@@ -155,7 +171,7 @@
       __val;								      \
     })
 
-# define __arch_atomic_exchange_and_add_64(mem, value) \
+#define __arch_atomic_exchange_and_add_64(mem, value) \
     ({									      \
       __typeof (*mem) __val, __tmp;					      \
       __asm __volatile ("1:	ldarx	%0,0,%3\n"			      \
@@ -168,7 +184,7 @@
       __val;								      \
     })
 
-# define __arch_atomic_increment_val_64(mem) \
+#define __arch_atomic_increment_val_64(mem) \
     ({									      \
       __typeof (*(mem)) __val;						      \
       __asm __volatile ("1:	ldarx	%0,0,%2\n"			      \
@@ -181,7 +197,7 @@
       __val;								      \
     })
 
-# define __arch_atomic_decrement_val_64(mem) \
+#define __arch_atomic_decrement_val_64(mem) \
     ({									      \
       __typeof (*(mem)) __val;						      \
       __asm __volatile ("1:	ldarx	%0,0,%2\n"			      \
@@ -194,7 +210,7 @@
       __val;								      \
     })
 
-# define __arch_atomic_decrement_if_positive_64(mem) \
+#define __arch_atomic_decrement_if_positive_64(mem) \
   ({ int __val, __tmp;							      \
      __asm __volatile ("1:	ldarx	%0,0,%3\n"			      \
 		       "	cmpdi	0,%0,0\n"			      \
@@ -212,13 +228,13 @@
 /*
  * All powerpc64 processors support the new "light weight"  sync (lwsync).
  */
-# define atomic_read_barrier()	__asm ("lwsync" ::: "memory")
+#define atomic_read_barrier()	__asm ("lwsync" ::: "memory")
 /*
  * "light weight" sync can also be used for the release barrier.
  */
-# ifndef UP
-#  define __ARCH_REL_INSTR	"lwsync"
-# endif
+#ifndef UP
+# define __ARCH_REL_INSTR	"lwsync"
+#endif
 
 /*
  * Include the rest of the atomic ops macros which are common to both