diff options
author | H.J. Lu <hjl.tools@gmail.com> | 2015-08-13 03:39:22 -0700 |
---|---|---|
committer | H.J. Lu <hjl.tools@gmail.com> | 2015-08-13 03:41:37 -0700 |
commit | 1aee37a22e3977de7a89e734e0a1e112f52045f2 (patch) | |
tree | 593060eff335a7a592234285b4c89ba21bce7706 /sysdeps/i386/i686/multiarch/memchr.S | |
parent | 0b5395f052ee09cd7e3d219af4e805c38058afb5 (diff) | |
download | glibc-1aee37a22e3977de7a89e734e0a1e112f52045f2.tar.gz glibc-1aee37a22e3977de7a89e734e0a1e112f52045f2.tar.xz glibc-1aee37a22e3977de7a89e734e0a1e112f52045f2.zip |
Update i686 multiarch functions for <cpu-features.h>
This patch updates i686 multiarch functions to use the newly defined HAS_CPU_FEATURE, HAS_ARCH_FEATURE, LOAD_GOT_AND_RTLD_GLOBAL_RO and LOAD_FUNC_GOT_EAX from <cpu-features.h>. * sysdeps/i386/i686/fpu/multiarch/e_expf.c: Replace HAS_XXX with HAS_CPU_FEATURE/HAS_ARCH_FEATURE (XXX). * sysdeps/i386/i686/fpu/multiarch/s_cosf.c: Likewise. * sysdeps/i386/i686/fpu/multiarch/s_cosf.c: Likewise. * sysdeps/i386/i686/fpu/multiarch/s_sincosf.c: Likewise. * sysdeps/i386/i686/fpu/multiarch/s_sinf.c: Likewise. * sysdeps/i386/i686/multiarch/ifunc-impl-list.c: Likewise. * sysdeps/i386/i686/multiarch/s_fma.c: Likewise. * sysdeps/i386/i686/multiarch/s_fmaf.c: Likewise. * sysdeps/i386/i686/multiarch/bcopy.S: Remove __init_cpu_features call. Merge SHARED and !SHARED. Add LOAD_GOT_AND_RTLD_GLOBAL_RO. Use LOAD_FUNC_GOT_EAX to load function address. Replace HAS_XXX with HAS_CPU_FEATURE/HAS_ARCH_FEATURE (XXX). * sysdeps/i386/i686/multiarch/bzero.S: Likewise. * sysdeps/i386/i686/multiarch/memchr.S: Likewise. * sysdeps/i386/i686/multiarch/memcmp.S: Likewise. * sysdeps/i386/i686/multiarch/memcpy.S: Likewise. * sysdeps/i386/i686/multiarch/memcpy_chk.S: Likewise. * sysdeps/i386/i686/multiarch/memmove.S: Likewise. * sysdeps/i386/i686/multiarch/memmove_chk.S: Likewise. * sysdeps/i386/i686/multiarch/mempcpy.S: Likewise. * sysdeps/i386/i686/multiarch/mempcpy_chk.S: Likewise. * sysdeps/i386/i686/multiarch/memrchr.S: Likewise. * sysdeps/i386/i686/multiarch/memset.S: Likewise. * sysdeps/i386/i686/multiarch/memset_chk.S: Likewise. * sysdeps/i386/i686/multiarch/rawmemchr.S: Likewise. * sysdeps/i386/i686/multiarch/strcasecmp.S: Likewise. * sysdeps/i386/i686/multiarch/strcat.S: Likewise. * sysdeps/i386/i686/multiarch/strchr.S: Likewise. * sysdeps/i386/i686/multiarch/strcmp.S: Likewise. * sysdeps/i386/i686/multiarch/strcpy.S: Likewise. * sysdeps/i386/i686/multiarch/strcspn.S: Likewise. * sysdeps/i386/i686/multiarch/strlen.S: Likewise. * sysdeps/i386/i686/multiarch/strncase.S: Likewise. * sysdeps/i386/i686/multiarch/strnlen.S: Likewise. * sysdeps/i386/i686/multiarch/strrchr.S: Likewise. * sysdeps/i386/i686/multiarch/strspn.S: Likewise. * sysdeps/i386/i686/multiarch/wcschr.S: Likewise. * sysdeps/i386/i686/multiarch/wcscmp.S: Likewise. * sysdeps/i386/i686/multiarch/wcscpy.S: Likewise. * sysdeps/i386/i686/multiarch/wcslen.S: Likewise. * sysdeps/i386/i686/multiarch/wcsrchr.S: Likewise. * sysdeps/i386/i686/multiarch/wmemcmp.S: Likewise.
Diffstat (limited to 'sysdeps/i386/i686/multiarch/memchr.S')
-rw-r--r-- | sysdeps/i386/i686/multiarch/memchr.S | 36 |
1 files changed, 6 insertions, 30 deletions
diff --git a/sysdeps/i386/i686/multiarch/memchr.S b/sysdeps/i386/i686/multiarch/memchr.S index 02994d055e..65e6b96f3b 100644 --- a/sysdeps/i386/i686/multiarch/memchr.S +++ b/sysdeps/i386/i686/multiarch/memchr.S @@ -22,46 +22,22 @@ #include <init-arch.h> #if IS_IN (libc) -# define CFI_POP(REG) \ - cfi_adjust_cfa_offset (-4); \ - cfi_restore (REG) - -# define CFI_PUSH(REG) \ - cfi_adjust_cfa_offset (4); \ - cfi_rel_offset (REG, 0) - .text ENTRY(__memchr) .type __memchr, @gnu_indirect_function - pushl %ebx - CFI_PUSH (%ebx) - LOAD_PIC_REG(bx) - cmpl $0, KIND_OFFSET+__cpu_features@GOTOFF(%ebx) - jne 1f - call __init_cpu_features - -1: testl $bit_SSE2, CPUID_OFFSET+index_SSE2+__cpu_features@GOTOFF(%ebx) + LOAD_GOT_AND_RTLD_GLOBAL_RO + HAS_CPU_FEATURE (SSE2) jz 2f - testl $bit_Slow_BSF, FEATURE_OFFSET+index_Slow_BSF+__cpu_features@GOTOFF(%ebx) + HAS_ARCH_FEATURE (Slow_BSF) jz 3f - leal __memchr_sse2@GOTOFF(%ebx), %eax - popl %ebx - CFI_POP (%ebx) + LOAD_FUNC_GOT_EAX ( __memchr_sse2) ret - CFI_PUSH (%ebx) - -2: leal __memchr_ia32@GOTOFF(%ebx), %eax - popl %ebx - CFI_POP (%ebx) +2: LOAD_FUNC_GOT_EAX (__memchr_ia32) ret - CFI_PUSH (%ebx) - -3: leal __memchr_sse2_bsf@GOTOFF(%ebx), %eax - popl %ebx - CFI_POP (%ebx) +3: LOAD_FUNC_GOT_EAX (__memchr_sse2_bsf) ret END(__memchr) |