diff options
author | H.J. Lu <hjl.tools@gmail.com> | 2016-06-30 07:57:07 -0700 |
---|---|---|
committer | H.J. Lu <hjl.tools@gmail.com> | 2016-06-30 07:58:11 -0700 |
commit | 13efa86ece61bf84daca50cab30db1b0902fe2db (patch) | |
tree | e6ed4e21bb720ba21d069d61064a057be6f999db /sysdeps/x86 | |
parent | 73fb56a4d51fd4437e4cde6dd3c8077a610f88a8 (diff) | |
download | glibc-13efa86ece61bf84daca50cab30db1b0902fe2db.tar.gz glibc-13efa86ece61bf84daca50cab30db1b0902fe2db.tar.xz glibc-13efa86ece61bf84daca50cab30db1b0902fe2db.zip |
Check Prefer_ERMS in memmove/memcpy/mempcpy/memset
Although the Enhanced REP MOVSB/STOSB (ERMS) implementations of memmove, memcpy, mempcpy and memset aren't used by the current processors, this patch adds Prefer_ERMS check in memmove, memcpy, mempcpy and memset so that they can be used in the future. * sysdeps/x86/cpu-features.h (bit_arch_Prefer_ERMS): New. (index_arch_Prefer_ERMS): Likewise. * sysdeps/x86_64/multiarch/memcpy.S (__new_memcpy): Return __memcpy_erms for Prefer_ERMS. * sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S (__memmove_erms): Enabled for libc.a. * ysdeps/x86_64/multiarch/memmove.S (__libc_memmove): Return __memmove_erms or Prefer_ERMS. * sysdeps/x86_64/multiarch/mempcpy.S (__mempcpy): Return __mempcpy_erms for Prefer_ERMS. * sysdeps/x86_64/multiarch/memset.S (memset): Return __memset_erms for Prefer_ERMS.
Diffstat (limited to 'sysdeps/x86')
-rw-r--r-- | sysdeps/x86/cpu-features.h | 3 |
1 files changed, 3 insertions, 0 deletions
diff --git a/sysdeps/x86/cpu-features.h b/sysdeps/x86/cpu-features.h index 2bd93713a1..97ffe765f4 100644 --- a/sysdeps/x86/cpu-features.h +++ b/sysdeps/x86/cpu-features.h @@ -36,6 +36,7 @@ #define bit_arch_Prefer_MAP_32BIT_EXEC (1 << 16) #define bit_arch_Prefer_No_VZEROUPPER (1 << 17) #define bit_arch_Fast_Unaligned_Copy (1 << 18) +#define bit_arch_Prefer_ERMS (1 << 19) /* CPUID Feature flags. */ @@ -105,6 +106,7 @@ # define index_arch_Prefer_MAP_32BIT_EXEC FEATURE_INDEX_1*FEATURE_SIZE # define index_arch_Prefer_No_VZEROUPPER FEATURE_INDEX_1*FEATURE_SIZE # define index_arch_Fast_Unaligned_Copy FEATURE_INDEX_1*FEATURE_SIZE +# define index_arch_Prefer_ERMS FEATURE_INDEX_1*FEATURE_SIZE # if defined (_LIBC) && !IS_IN (nonlib) @@ -274,6 +276,7 @@ extern const struct cpu_features *__get_cpu_features (void) # define index_arch_Prefer_MAP_32BIT_EXEC FEATURE_INDEX_1 # define index_arch_Prefer_No_VZEROUPPER FEATURE_INDEX_1 # define index_arch_Fast_Unaligned_Copy FEATURE_INDEX_1 +# define index_arch_Prefer_ERMS FEATURE_INDEX_1 #endif /* !__ASSEMBLER__ */ |