about summary refs log tree commit diff
path: root/sysdeps/x86_64
diff options
context:
space:
mode:
authorH.J. Lu <hjl.tools@gmail.com>2016-04-03 14:32:20 -0700
committerH.J. Lu <hjl.tools@gmail.com>2016-04-03 14:35:38 -0700
commit5cd7af016d8587ff53b20ba259746f97edbddbf7 (patch)
tree97e7bf8c54c53bcf58b97649edfdf2db4dff4362 /sysdeps/x86_64
parentea2785e96fa503f3a2b5dd9f3a6ca65622b3c5f2 (diff)
downloadglibc-5cd7af016d8587ff53b20ba259746f97edbddbf7.tar.gz
glibc-5cd7af016d8587ff53b20ba259746f97edbddbf7.tar.xz
glibc-5cd7af016d8587ff53b20ba259746f97edbddbf7.zip
Don't put SSE2/AVX/AVX512 memmove/memset in ld.so
Since memmove and memset in ld.so don't use IFUNC, don't put SSE2, AVX
and AVX512 memmove and memset in ld.so.

	* sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S: Skip
	if not in libc.
	* sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S:
	Likewise.
	* sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S:
	Likewise.
	* sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S:
	Likewise.
Diffstat (limited to 'sysdeps/x86_64')
-rw-r--r--sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S16
-rw-r--r--sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S2
-rw-r--r--sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S16
-rw-r--r--sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S18
-rw-r--r--sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S2
-rw-r--r--sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S18
6 files changed, 40 insertions, 32 deletions
diff --git a/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S
index 3a72c7eafd..44711c37ca 100644
--- a/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memmove-avx-unaligned-erms.S
@@ -1,9 +1,11 @@
-#define VEC_SIZE	32
-#define VEC(i)		ymm##i
-#define VMOVU		vmovdqu
-#define VMOVA		vmovdqa
+#if IS_IN (libc)
+# define VEC_SIZE	32
+# define VEC(i)		ymm##i
+# define VMOVU		vmovdqu
+# define VMOVA		vmovdqa
 
-#define SECTION(p)		p##.avx
-#define MEMMOVE_SYMBOL(p,s)	p##_avx_##s
+# define SECTION(p)		p##.avx
+# define MEMMOVE_SYMBOL(p,s)	p##_avx_##s
 
-#include "memmove-vec-unaligned-erms.S"
+# include "memmove-vec-unaligned-erms.S"
+#endif
diff --git a/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S
index 38358fa37c..c2c52937bf 100644
--- a/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memmove-avx512-unaligned-erms.S
@@ -1,4 +1,4 @@
-#ifdef HAVE_AVX512_ASM_SUPPORT
+#if defined HAVE_AVX512_ASM_SUPPORT && IS_IN (libc)
 # define VEC_SIZE	64
 # define VEC(i)		zmm##i
 # define VMOVU		vmovdqu64
diff --git a/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S
index 52b9ae08fc..85214fe725 100644
--- a/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memmove-sse2-unaligned-erms.S
@@ -1,9 +1,11 @@
-#define VEC_SIZE	16
-#define VEC(i)		xmm##i
-#define VMOVU		movdqu
-#define VMOVA		movdqa
+#if IS_IN (libc)
+# define VEC_SIZE	16
+# define VEC(i)		xmm##i
+# define VMOVU		movdqu
+# define VMOVA		movdqa
 
-#define SECTION(p)		p
-#define MEMMOVE_SYMBOL(p,s)	p##_sse2_##s
+# define SECTION(p)		p
+# define MEMMOVE_SYMBOL(p,s)	p##_sse2_##s
 
-#include "memmove-vec-unaligned-erms.S"
+# include "memmove-vec-unaligned-erms.S"
+#endif
diff --git a/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S
index e0dc56512e..79975e0825 100644
--- a/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memset-avx2-unaligned-erms.S
@@ -1,14 +1,16 @@
-#define VEC_SIZE	32
-#define VEC(i)		ymm##i
-#define VMOVU		vmovdqu
-#define VMOVA		vmovdqa
+#if IS_IN (libc)
+# define VEC_SIZE	32
+# define VEC(i)		ymm##i
+# define VMOVU		vmovdqu
+# define VMOVA		vmovdqa
 
-#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
+# define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
   vmovd d, %xmm0; \
   movq r, %rax; \
   vpbroadcastb %xmm0, %ymm0
 
-#define SECTION(p)		p##.avx
-#define MEMSET_SYMBOL(p,s)	p##_avx2_##s
+# define SECTION(p)		p##.avx
+# define MEMSET_SYMBOL(p,s)	p##_avx2_##s
 
-#include "memset-vec-unaligned-erms.S"
+# include "memset-vec-unaligned-erms.S"
+#endif
diff --git a/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
index 72f4095831..f1b3cb23d3 100644
--- a/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memset-avx512-unaligned-erms.S
@@ -1,4 +1,4 @@
-#ifdef HAVE_AVX512_ASM_SUPPORT
+#if defined HAVE_AVX512_ASM_SUPPORT && IS_IN (libc)
 # define VEC_SIZE	64
 # define VEC(i)		zmm##i
 # define VMOVU		vmovdqu64
diff --git a/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S b/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S
index 437a858dab..2deba42c16 100644
--- a/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S
+++ b/sysdeps/x86_64/multiarch/memset-sse2-unaligned-erms.S
@@ -1,16 +1,18 @@
-#define VEC_SIZE	16
-#define VEC(i)		xmm##i
-#define VMOVU		movdqu
-#define VMOVA		movdqa
+#if IS_IN (libc)
+# define VEC_SIZE	16
+# define VEC(i)		xmm##i
+# define VMOVU		movdqu
+# define VMOVA		movdqa
 
-#define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
+# define VDUP_TO_VEC0_AND_SET_RETURN(d, r) \
   movd d, %xmm0; \
   movq r, %rax; \
   punpcklbw %xmm0, %xmm0; \
   punpcklwd %xmm0, %xmm0; \
   pshufd $0, %xmm0, %xmm0
 
-#define SECTION(p)		p
-#define MEMSET_SYMBOL(p,s)	p##_sse2_##s
+# define SECTION(p)		p
+# define MEMSET_SYMBOL(p,s)	p##_sse2_##s
 
-#include "memset-vec-unaligned-erms.S"
+# include "memset-vec-unaligned-erms.S"
+#endif