about summary refs log tree commit diff
diff options
context:
space:
mode:
authorAndreas Schwab <schwab@redhat.com>2011-11-28 13:38:19 +0100
committerAndreas Schwab <schwab@redhat.com>2011-11-30 11:03:19 +0100
commitc5a0802a682dba23f92d47f0f99775aebfbe2539 (patch)
treef662acd317fdc5195592161d27ed1fd1ad74d0bc
parent9d65ea3a9b83ac3961229ba296a7caf90abce68d (diff)
downloadglibc-c5a0802a682dba23f92d47f0f99775aebfbe2539.tar.gz
glibc-c5a0802a682dba23f92d47f0f99775aebfbe2539.tar.xz
glibc-c5a0802a682dba23f92d47f0f99775aebfbe2539.zip
Handle EAGAIN from FUTEX_WAIT_REQUEUE_PI
-rw-r--r--ChangeLog6
-rw-r--r--nptl/sysdeps/unix/sysv/linux/i386/i486/pthread_cond_wait.S80
-rw-r--r--nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S76
3 files changed, 160 insertions, 2 deletions
diff --git a/ChangeLog b/ChangeLog
index d9866def31..47446886f3 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,9 @@
+2011-11-28  Andreas Schwab  <schwab@redhat.com>
+
+	* sysdeps/unix/sysv/linux/i386/i486/pthread_cond_wait.S: Handle
+	EAGAIN from FUTEX_WAIT_REQUEUE_PI.
+	* sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S: Likewise.
+
 2011-11-17  Ulrich Drepper  <drepper@gmail.com>
 
 	* Makefile.in: Remove CVSOPT handling.
diff --git a/nptl/sysdeps/unix/sysv/linux/i386/i486/pthread_cond_wait.S b/nptl/sysdeps/unix/sysv/linux/i386/i486/pthread_cond_wait.S
index 53970d755f..54590b7b8b 100644
--- a/nptl/sysdeps/unix/sysv/linux/i386/i486/pthread_cond_wait.S
+++ b/nptl/sysdeps/unix/sysv/linux/i386/i486/pthread_cond_wait.S
@@ -134,6 +134,7 @@ __pthread_cond_wait:
 	cmpl	$PI_BIT, %eax
 	jne	18f
 
+90:
 	movl	$(FUTEX_WAIT_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %ecx
 	movl	%ebp, %edx
 	xorl	%esi, %esi
@@ -147,6 +148,9 @@ __pthread_cond_wait:
 	sete	16(%esp)
 	je	19f
 
+	cmpl	$-EAGAIN, %eax
+	je	91f
+
 	/* Normal and PI futexes dont mix. Use normal futex functions only
 	   if the kernel does not support the PI futex functions.  */
 	cmpl	$-ENOSYS, %eax
@@ -391,6 +395,78 @@ __pthread_cond_wait:
 #endif
 	call	__lll_unlock_wake
 	jmp	11b
+
+91:
+.LcleanupSTART2:
+	/* FUTEX_WAIT_REQUEUE_PI returned EAGAIN.  We need to
+	   call it again.  */
+
+	/* Get internal lock.  */
+	movl	$1, %edx
+	xorl	%eax, %eax
+	LOCK
+#if cond_lock == 0
+	cmpxchgl %edx, (%ebx)
+#else
+	cmpxchgl %edx, cond_lock(%ebx)
+#endif
+	jz	92f
+
+#if cond_lock == 0
+	movl	%ebx, %edx
+#else
+	leal	cond_lock(%ebx), %edx
+#endif
+#if (LLL_SHARED-LLL_PRIVATE) > 255
+	xorl	%ecx, %ecx
+#endif
+	cmpl	$-1, dep_mutex(%ebx)
+	setne	%cl
+	subl	$1, %ecx
+	andl	$(LLL_SHARED-LLL_PRIVATE), %ecx
+#if LLL_PRIVATE != 0
+	addl	$LLL_PRIVATE, %ecx
+#endif
+	call	__lll_lock_wait
+
+92:
+	/* Increment the cond_futex value again, so it can be used as a new
+	   expected value. */
+	addl	$1, cond_futex(%ebx)
+	movl	cond_futex(%ebx), %ebp
+
+	/* Unlock.  */
+	LOCK
+#if cond_lock == 0
+	subl	$1, (%ebx)
+#else
+	subl	$1, cond_lock(%ebx)
+#endif
+	je	93f
+#if cond_lock == 0
+	movl	%ebx, %eax
+#else
+	leal	cond_lock(%ebx), %eax
+#endif
+#if (LLL_SHARED-LLL_PRIVATE) > 255
+	xorl	%ecx, %ecx
+#endif
+	cmpl	$-1, dep_mutex(%ebx)
+	setne	%cl
+	subl	$1, %ecx
+	andl	$(LLL_SHARED-LLL_PRIVATE), %ecx
+#if LLL_PRIVATE != 0
+	addl	$LLL_PRIVATE, %ecx
+#endif
+	call	__lll_unlock_wake
+
+93:
+	/* Set the rest of SYS_futex args for FUTEX_WAIT_REQUEUE_PI. */
+	xorl	%ecx, %ecx
+	movl	dep_mutex(%ebx), %edi
+	jmp	90b
+.LcleanupEND2:
+
 	.size	__pthread_cond_wait, .-__pthread_cond_wait
 versioned_symbol (libpthread, __pthread_cond_wait, pthread_cond_wait,
 		  GLIBC_2_3_2)
@@ -563,6 +639,10 @@ __condvar_w_cleanup:
 	.long	.LcleanupEND-.Lsub_cond_futex
 	.long	__condvar_w_cleanup-.LSTARTCODE
 	.uleb128  0
+	.long	.LcleanupSTART2-.LSTARTCODE
+	.long	.LcleanupEND2-.LcleanupSTART2
+	.long	__condvar_w_cleanup-.LSTARTCODE
+	.uleb128  0
 	.long	.LcallUR-.LSTARTCODE
 	.long	.LENDCODE-.LcallUR
 	.long	0
diff --git a/nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S b/nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S
index 7535baa786..d837d158a4 100644
--- a/nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S
+++ b/nptl/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S
@@ -23,6 +23,7 @@
 #include <lowlevelcond.h>
 #include <tcb-offsets.h>
 #include <pthread-pi-defines.h>
+#include <pthread-errnos.h>
 
 #include <kernel-features.h>
 
@@ -133,11 +134,14 @@ __pthread_cond_wait:
 	cmpl	$PI_BIT, %eax
 	jne	61f
 
+90:
 	movl	$(FUTEX_WAIT_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %esi
 	movl	$SYS_futex, %eax
 	syscall
 
 	movl	$1, %r8d
+	cmpq	$-EAGAIN, %rax
+	je	91f
 #ifdef __ASSUME_REQUEUE_PI
 	jmp	62f
 #else
@@ -324,6 +328,70 @@ __pthread_cond_wait:
 
 13:	movq	%r10, %rax
 	jmp	14b
+
+91:
+.LcleanupSTART2:
+	/* FUTEX_WAIT_REQUEUE_PI returned EAGAIN.  We need to
+	   call it again.  */
+	movq	8(%rsp), %rdi
+
+	/* Get internal lock.  */
+	movl	$1, %esi
+	xorl	%eax, %eax
+	LOCK
+#if cond_lock == 0
+	cmpxchgl %esi, (%rdi)
+#else
+	cmpxchgl %esi, cond_lock(%rdi)
+#endif
+	jz	92f
+
+#if cond_lock != 0
+	addq	$cond_lock, %rdi
+#endif
+	cmpq	$-1, dep_mutex-cond_lock(%rdi)
+	movl	$LLL_PRIVATE, %eax
+	movl	$LLL_SHARED, %esi
+	cmovne	%eax, %esi
+	callq	__lll_lock_wait
+#if cond_lock != 0
+	subq	$cond_lock, %rdi
+#endif
+92:
+	/* Increment the cond_futex value again, so it can be used as a new
+	   expected value. */
+	incl	cond_futex(%rdi)
+	movl	cond_futex(%rdi), %edx
+
+	/* Release internal lock.  */
+	LOCK
+#if cond_lock == 0
+	decl	(%rdi)
+#else
+	decl	cond_lock(%rdi)
+#endif
+	jz	93f
+
+#if cond_lock != 0
+	addq	$cond_lock, %rdi
+#endif
+	cmpq	$-1, dep_mutex-cond_lock(%rdi)
+	movl	$LLL_PRIVATE, %eax
+	movl	$LLL_SHARED, %esi
+	cmovne	%eax, %esi
+	/* The call preserves %rdx.  */
+	callq	__lll_unlock_wake
+#if cond_lock != 0
+	subq	$cond_lock, %rdi
+#endif
+93:
+	/* Set the rest of SYS_futex args for FUTEX_WAIT_REQUEUE_PI. */
+	xorq	%r10, %r10
+	movq	dep_mutex(%rdi), %r8
+	leaq	cond_futex(%rdi), %rdi
+	jmp	90b
+.LcleanupEND2:
+
 	.size	__pthread_cond_wait, .-__pthread_cond_wait
 versioned_symbol (libpthread, __pthread_cond_wait, pthread_cond_wait,
 		  GLIBC_2_3_2)
@@ -476,11 +544,15 @@ __condvar_cleanup1:
 	.uleb128 .LcleanupSTART-.LSTARTCODE
 	.uleb128 .LcleanupEND-.LcleanupSTART
 	.uleb128 __condvar_cleanup1-.LSTARTCODE
-	.uleb128  0
+	.uleb128 0
+	.uleb128 .LcleanupSTART2-.LSTARTCODE
+	.uleb128 .LcleanupEND2-.LcleanupSTART2
+	.uleb128 __condvar_cleanup1-.LSTARTCODE
+	.uleb128 0
 	.uleb128 .LcallUR-.LSTARTCODE
 	.uleb128 .LENDCODE-.LcallUR
 	.uleb128 0
-	.uleb128  0
+	.uleb128 0
 .Lcstend: