diff options
author | H.J. Lu <hjl.tools@gmail.com> | 2016-06-08 13:55:45 -0700 |
---|---|---|
committer | H.J. Lu <hjl.tools@gmail.com> | 2016-06-08 13:56:14 -0700 |
commit | 5e8c5bb1ac83aa2577d64d82467a653fa413f7ce (patch) | |
tree | c713874b92fbc3f6858e48d71a883a990665a2f3 /sysdeps/x86_64/multiarch/memset_chk.S | |
parent | 5188b973250523d3e9c80ea3ab4001f696e6fa1a (diff) | |
download | glibc-5e8c5bb1ac83aa2577d64d82467a653fa413f7ce.tar.gz glibc-5e8c5bb1ac83aa2577d64d82467a653fa413f7ce.tar.xz glibc-5e8c5bb1ac83aa2577d64d82467a653fa413f7ce.zip |
X86-64: Remove the previous SSE2/AVX2 memsets
Since the new SSE2/AVX2 memsets are faster than the previous ones, we can remove the previous SSE2/AVX2 memsets and replace them with the new ones. This reduces the size of libc.so by about 900 bytes. No change in IFUNC selection if SSE2 and AVX2 memsets weren't used before. If SSE2 or AVX2 memset was used, the new SSE2 or AVX2 memset optimized with Enhanced REP STOSB will be used for processors with ERMS. The new AVX512 memset will be used for processors with AVX512 which prefer vzeroupper. [BZ #19881] * sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S: Folded into ... * sysdeps/x86_64/memset.S: This. (__bzero): Removed. (__memset_tail): Likewise. (__memset_chk): Likewise. (memset): Likewise. (MEMSET_CHK_SYMBOL): New. Define only if MEMSET_SYMBOL isn't defined. (MEMSET_SYMBOL): Define only if MEMSET_SYMBOL isn't defined. * sysdeps/x86_64/multiarch/memset-avx2.S: Removed. (__memset_zero_constant_len_parameter): Check SHARED instead of PIC. * sysdeps/x86_64/multiarch/Makefile (sysdep_routines): Remove memset-avx2 and memset-sse2-unaligned-erms. * sysdeps/x86_64/multiarch/ifunc-impl-list.c (__libc_ifunc_impl_list): Remove __memset_chk_sse2, __memset_chk_avx2, __memset_sse2 and __memset_avx2_unaligned. * sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S (__bzero): Enabled. * sysdeps/x86_64/multiarch/memset.S (memset): Replace __memset_sse2 and __memset_avx2 with __memset_sse2_unaligned and __memset_avx2_unaligned. Use __memset_sse2_unaligned_erms or __memset_avx2_unaligned_erms if processor has ERMS. Support __memset_avx512_unaligned_erms and __memset_avx512_unaligned. (memset): Removed. (__memset_chk): Likewise. (MEMSET_SYMBOL): New. (libc_hidden_builtin_def): Replace __memset_sse2 with __memset_sse2_unaligned. * sysdeps/x86_64/multiarch/memset_chk.S (__memset_chk): Replace __memset_chk_sse2 and __memset_chk_avx2 with __memset_chk_sse2_unaligned and __memset_chk_avx2_unaligned_erms. Use __memset_chk_sse2_unaligned_erms or __memset_chk_avx2_unaligned_erms if processor has ERMS. Support __memset_chk_avx512_unaligned_erms and __memset_chk_avx512_unaligned.
Diffstat (limited to 'sysdeps/x86_64/multiarch/memset_chk.S')
-rw-r--r-- | sysdeps/x86_64/multiarch/memset_chk.S | 20 |
1 files changed, 16 insertions, 4 deletions
diff --git a/sysdeps/x86_64/multiarch/memset_chk.S b/sysdeps/x86_64/multiarch/memset_chk.S index 9a7b270274..8517cfc073 100644 --- a/sysdeps/x86_64/multiarch/memset_chk.S +++ b/sysdeps/x86_64/multiarch/memset_chk.S @@ -26,16 +26,28 @@ ENTRY(__memset_chk) .type __memset_chk, @gnu_indirect_function LOAD_RTLD_GLOBAL_RO_RDX - leaq __memset_chk_sse2(%rip), %rax + lea __memset_chk_sse2_unaligned_erms(%rip), %RAX_LP + HAS_CPU_FEATURE (ERMS) + jnz 1f + lea __memset_chk_sse2_unaligned(%rip), %RAX_LP +1: HAS_ARCH_FEATURE (AVX2_Usable) jz 2f - leaq __memset_chk_avx2(%rip), %rax + lea __memset_chk_avx2_unaligned_erms(%rip), %RAX_LP + HAS_CPU_FEATURE (ERMS) + jnz L(AVX512F) + lea __memset_chk_avx2_unaligned(%rip), %RAX_LP +L(AVX512F): #ifdef HAVE_AVX512_ASM_SUPPORT HAS_ARCH_FEATURE (AVX512F_Usable) jz 2f + lea __memset_chk_avx512_no_vzeroupper(%rip), %RAX_LP HAS_ARCH_FEATURE (Prefer_No_VZEROUPPER) - jz 2f - leaq __memset_chk_avx512_no_vzeroupper(%rip), %rax + jnz 2f + lea __memset_chk_avx512_unaligned_erms(%rip), %RAX_LP + HAS_CPU_FEATURE (ERMS) + jnz 2f + lea __memset_chk_avx512_unaligned(%rip), %RAX_LP #endif 2: ret END(__memset_chk) |