diff options
author | Jakub Jelinek <jakub@redhat.com> | 2010-12-09 10:38:18 -0500 |
---|---|---|
committer | Ulrich Drepper <drepper@gmail.com> | 2010-12-09 10:38:18 -0500 |
commit | 42acbb92c861e97a6e1293ea853db88342a1bf53 (patch) | |
tree | 88e096f0d2144fd9f2e9d6ba8cc093bcd56084ec /sysdeps/i386/i686 | |
parent | a5b913e299027a799ae5435d66e3f20e95859654 (diff) | |
download | glibc-42acbb92c861e97a6e1293ea853db88342a1bf53.tar.gz glibc-42acbb92c861e97a6e1293ea853db88342a1bf53.tar.xz glibc-42acbb92c861e97a6e1293ea853db88342a1bf53.zip |
Fix -D_FORTIFY_SOURCE memmove and bcop
Diffstat (limited to 'sysdeps/i386/i686')
-rw-r--r-- | sysdeps/i386/i686/multiarch/strcmp.S | 55 |
1 files changed, 25 insertions, 30 deletions
diff --git a/sysdeps/i386/i686/multiarch/strcmp.S b/sysdeps/i386/i686/multiarch/strcmp.S index 7136d47e85..8724594881 100644 --- a/sysdeps/i386/i686/multiarch/strcmp.S +++ b/sysdeps/i386/i686/multiarch/strcmp.S @@ -40,37 +40,32 @@ need strncmp before the initialization happened. */ #if (defined SHARED || !defined USE_AS_STRNCMP) && !defined NOT_IN_libc # ifdef SHARED - .section .gnu.linkonce.t.__i686.get_pc_thunk.bx,"ax",@progbits - .globl __i686.get_pc_thunk.bx - .hidden __i686.get_pc_thunk.bx - .p2align 4 - .type __i686.get_pc_thunk.bx,@function -__i686.get_pc_thunk.bx: - movl (%esp), %ebx + .section .gnu.linkonce.t.__i686.get_pc_thunk.dx,"ax",@progbits + .globl __i686.get_pc_thunk.dx + .hidden __i686.get_pc_thunk.dx + .p2align 2 + .type __i686.get_pc_thunk.dx,@function +__i686.get_pc_thunk.dx: + movl (%esp), %edx ret + .size __i686.get_pc_thunk.dx, .-__i686.get_pc_thunk.dx .text ENTRY(STRCMP) .type STRCMP, @gnu_indirect_function - pushl %ebx - cfi_adjust_cfa_offset (4) - cfi_rel_offset (ebx, 0) - call __i686.get_pc_thunk.bx - addl $_GLOBAL_OFFSET_TABLE_, %ebx - cmpl $0, KIND_OFFSET+__cpu_features@GOTOFF(%ebx) + call __i686.get_pc_thunk.dx + addl $_GLOBAL_OFFSET_TABLE_, %edx + cmpl $0, KIND_OFFSET+__cpu_features@GOTOFF(%edx) jne 1f call __init_cpu_features -1: leal __STRCMP_IA32@GOTOFF(%ebx), %eax - testl $bit_SSSE3, CPUID_OFFSET+index_SSSE3+__cpu_features@GOTOFF(%ebx) - jz 2f - leal __STRCMP_SSSE3@GOTOFF(%ebx), %eax - testl $bit_SSE4_2, CPUID_OFFSET+index_SSE4_2+__cpu_features@GOTOFF(%ebx) - jz 2f - leal __STRCMP_SSE4_2@GOTOFF(%ebx), %eax -2: popl %ebx - cfi_adjust_cfa_offset (-4) - cfi_restore (ebx) - ret +1: leal __STRCMP_SSE4_2@GOTOFF(%edx), %eax + testl $bit_SSE4_2, CPUID_OFFSET+index_SSE4_2+__cpu_features@GOTOFF(%edx) + jnz 2f + leal __STRCMP_SSSE3@GOTOFF(%edx), %eax + testl $bit_SSSE3, CPUID_OFFSET+index_SSSE3+__cpu_features@GOTOFF(%edx) + jnz 2f + leal __STRCMP_IA32@GOTOFF(%edx), %ecx +2: ret END(STRCMP) # else .text @@ -79,13 +74,13 @@ ENTRY(STRCMP) cmpl $0, KIND_OFFSET+__cpu_features jne 1f call __init_cpu_features -1: leal __STRCMP_IA32, %eax - testl $bit_SSSE3, CPUID_OFFSET+index_SSSE3+__cpu_features - jz 2f - leal __STRCMP_SSSE3, %eax +1: leal __STRCMP_SSE4_2, %eax testl $bit_SSE4_2, CPUID_OFFSET+index_SSE4_2+__cpu_features - jz 2f - leal __STRCMP_SSE4_2, %eax + jnz 2f + leal __STRCMP_SSSE3, %eax + testl $bit_SSSE3, CPUID_OFFSET+index_SSSE3+__cpu_features + jnz 2f + leal __STRCMP_IA32, %eax 2: ret END(STRCMP) # endif |