about summary refs log tree commit diff
path: root/sysdeps
diff options
context:
space:
mode:
authorAndreas Jaeger <aj@suse.de>2002-11-12 21:43:26 +0000
committerAndreas Jaeger <aj@suse.de>2002-11-12 21:43:26 +0000
commit6e14793db059304e39ec825c2f8adf2f6a140b22 (patch)
tree0c313a03b4b0e346dfb881b75787de679f9152ec /sysdeps
parent44df0cea622127fe41496b9f7ff808501683e856 (diff)
downloadglibc-6e14793db059304e39ec825c2f8adf2f6a140b22.tar.gz
glibc-6e14793db059304e39ec825c2f8adf2f6a140b22.tar.xz
glibc-6e14793db059304e39ec825c2f8adf2f6a140b22.zip
Update.
	* sysdeps/x86_64/strchr.S: Don't use one register for two
	purposes, this fixes a bug noticed by test-strchr.c.
Diffstat (limited to 'sysdeps')
-rw-r--r--sysdeps/x86_64/strchr.S12
1 files changed, 6 insertions, 6 deletions
diff --git a/sysdeps/x86_64/strchr.S b/sysdeps/x86_64/strchr.S
index 391f575aa5..f862cb2739 100644
--- a/sysdeps/x86_64/strchr.S
+++ b/sysdeps/x86_64/strchr.S
@@ -36,14 +36,14 @@ ENTRY (BP_SYM (strchr))
 	      8-byte alignment guarantees that we never access illegal
 	      memory if this would not also be done by the trivial
 	      implementation (this is because all processor inherent
-	      boundaries are multiples of 8.  */
+	      boundaries are multiples of 8).  */
 
-	movq	%rdi, %rcx
-	andl	$7, %ecx	/* Mask alignment bits  */
+	movq	%rdi, %rdx
+	andl	$7, %edx	/* Mask alignment bits  */
 	movq	%rdi, %rax	/* duplicate destination.  */
 	jz	1f		/* aligned => start loop */
-	neg	%ecx
-	addl	$8, %ecx	/* Align to 8 bytes.  */
+	neg	%edx
+	addl	$8, %edx	/* Align to 8 bytes.  */
 
 	/* Search the first bytes directly.  */
 0:	movb	(%rax), %cl	/* load byte  */
@@ -52,7 +52,7 @@ ENTRY (BP_SYM (strchr))
 	testb	%cl,%cl		/* is byte NUL? */
 	je	7f		/* yes => return NULL */
 	incq	%rax		/* increment pointer */
-	decl	%ecx
+	decl	%edx
 	jnz	0b