diff options
-rw-r--r-- | ChangeLog | 3 | ||||
-rw-r--r-- | sysdeps/x86_64/strchr.S | 12 |
2 files changed, 9 insertions, 6 deletions
diff --git a/ChangeLog b/ChangeLog index afd1e539c3..e7a526c491 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,5 +1,8 @@ 2002-11-12 Andreas Jaeger <aj@suse.de> + * sysdeps/x86_64/strchr.S: Don't use one register for two + purposes, this fixes a bug noticed by test-strchr.c. + * sysdeps/x86_64/strcat.S: Fix algorithm to align source pointer correctly. diff --git a/sysdeps/x86_64/strchr.S b/sysdeps/x86_64/strchr.S index 391f575aa5..f862cb2739 100644 --- a/sysdeps/x86_64/strchr.S +++ b/sysdeps/x86_64/strchr.S @@ -36,14 +36,14 @@ ENTRY (BP_SYM (strchr)) 8-byte alignment guarantees that we never access illegal memory if this would not also be done by the trivial implementation (this is because all processor inherent - boundaries are multiples of 8. */ + boundaries are multiples of 8). */ - movq %rdi, %rcx - andl $7, %ecx /* Mask alignment bits */ + movq %rdi, %rdx + andl $7, %edx /* Mask alignment bits */ movq %rdi, %rax /* duplicate destination. */ jz 1f /* aligned => start loop */ - neg %ecx - addl $8, %ecx /* Align to 8 bytes. */ + neg %edx + addl $8, %edx /* Align to 8 bytes. */ /* Search the first bytes directly. */ 0: movb (%rax), %cl /* load byte */ @@ -52,7 +52,7 @@ ENTRY (BP_SYM (strchr)) testb %cl,%cl /* is byte NUL? */ je 7f /* yes => return NULL */ incq %rax /* increment pointer */ - decl %ecx + decl %edx jnz 0b |