diff options
author | Ulrich Drepper <drepper@gmail.com> | 2012-01-26 09:45:54 -0500 |
---|---|---|
committer | Ulrich Drepper <drepper@gmail.com> | 2012-01-26 09:45:54 -0500 |
commit | 08cf777f9e7f6d826658a99c7d77a359f73a45bf (patch) | |
tree | 89cdc5e4339c060b4e0ccaab79a2924c4989ab9c /sysdeps/x86_64/multiarch/init-arch.h | |
parent | afc5ed09cbce5d6fd48b3a8c5ec427b31f996880 (diff) | |
download | glibc-08cf777f9e7f6d826658a99c7d77a359f73a45bf.tar.gz glibc-08cf777f9e7f6d826658a99c7d77a359f73a45bf.tar.xz glibc-08cf777f9e7f6d826658a99c7d77a359f73a45bf.zip |
Really fix AVX tests
There is no problem with strcmp, it doesn't use the YMM registers. The math routines might since gcc perhaps generates such code. Introduce bit_YMM_USBALE and use it in the math routines.
Diffstat (limited to 'sysdeps/x86_64/multiarch/init-arch.h')
-rw-r--r-- | sysdeps/x86_64/multiarch/init-arch.h | 26 |
1 files changed, 13 insertions, 13 deletions
diff --git a/sysdeps/x86_64/multiarch/init-arch.h b/sysdeps/x86_64/multiarch/init-arch.h index 408e5aeb89..2dc75ab37b 100644 --- a/sysdeps/x86_64/multiarch/init-arch.h +++ b/sysdeps/x86_64/multiarch/init-arch.h @@ -22,6 +22,7 @@ #define bit_Prefer_SSE_for_memop (1 << 3) #define bit_Fast_Unaligned_Load (1 << 4) #define bit_Prefer_PMINUB_for_stringop (1 << 5) +#define bit_YMM_Usable (1 << 6) #define bit_SSE2 (1 << 26) #define bit_SSSE3 (1 << 9) @@ -49,6 +50,7 @@ # define index_Prefer_SSE_for_memop FEATURE_INDEX_1*FEATURE_SIZE # define index_Fast_Unaligned_Load FEATURE_INDEX_1*FEATURE_SIZE # define index_Prefer_PMINUB_for_stringop FEATURE_INDEX_1*FEATURE_SIZE +# define index_YMM_Usable FEATURE_INDEX_1*FEATURE_SIZE #else /* __ASSEMBLER__ */ @@ -93,7 +95,7 @@ extern struct cpu_features extern void __init_cpu_features (void) attribute_hidden; -#define INIT_ARCH()\ +# define INIT_ARCH() \ do \ if (__cpu_features.kind == arch_kind_unknown) \ __init_cpu_features (); \ @@ -126,23 +128,21 @@ extern const struct cpu_features *__get_cpu_features (void) # define index_Slow_BSF FEATURE_INDEX_1 # define index_Prefer_SSE_for_memop FEATURE_INDEX_1 # define index_Fast_Unaligned_Load FEATURE_INDEX_1 +# define index_YMM_Usable FEATURE_INDEX_1 -#define HAS_ARCH_FEATURE(idx, bit) \ - ((__get_cpu_features ()->feature[idx] & (bit)) != 0) +# define HAS_ARCH_FEATURE(name) \ + ((__get_cpu_features ()->feature[index_##name] & (bit_##name)) != 0) -#define HAS_FAST_REP_STRING \ - HAS_ARCH_FEATURE (index_Fast_Rep_String, bit_Fast_Rep_String) +# define HAS_FAST_REP_STRING HAS_ARCH_FEATURE (Fast_Rep_String) -#define HAS_FAST_COPY_BACKWARD \ - HAS_ARCH_FEATURE (index_Fast_Copy_Backward, bit_Fast_Copy_Backward) +# define HAS_FAST_COPY_BACKWARD HAS_ARCH_FEATURE (Fast_Copy_Backward) -#define HAS_SLOW_BSF \ - HAS_ARCH_FEATURE (index_Slow_BSF, bit_Slow_BSF) +# define HAS_SLOW_BSF HAS_ARCH_FEATURE (Slow_BSF) -#define HAS_PREFER_SSE_FOR_MEMOP \ - HAS_ARCH_FEATURE (index_Prefer_SSE_for_memop, bit_Prefer_SSE_for_memop) +# define HAS_PREFER_SSE_FOR_MEMOP HAS_ARCH_FEATURE (Prefer_SSE_for_memop) -#define HAS_FAST_UNALIGNED_LOAD \ - HAS_ARCH_FEATURE (index_Fast_Unaligned_Load, bit_Fast_Unaligned_Load) +# define HAS_FAST_UNALIGNED_LOAD HAS_ARCH_FEATURE (Fast_Unaligned_Load) + +# define HAS_YMM_USABLE HAS_ARCH_FEATURE (YMM_Usable) #endif /* __ASSEMBLER__ */ |