about summary refs log tree commit diff
path: root/sysdeps/x86_64/dl-trampoline.S
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2009-07-15 17:59:14 -0700
committerUlrich Drepper <drepper@redhat.com>2009-07-15 17:59:14 -0700
commitca419225a3c4f9f341eddf582b201211d1bf2aec (patch)
tree796509510a38c030f19933b668324b6809f736ea /sysdeps/x86_64/dl-trampoline.S
parent47fc9b710bcadb4196f8ef71813d6724d954fcb2 (diff)
downloadglibc-ca419225a3c4f9f341eddf582b201211d1bf2aec.tar.gz
glibc-ca419225a3c4f9f341eddf582b201211d1bf2aec.tar.xz
glibc-ca419225a3c4f9f341eddf582b201211d1bf2aec.zip
Fix thinko in AVX audit patch.
Don't use AVX instructions too often.
Diffstat (limited to 'sysdeps/x86_64/dl-trampoline.S')
-rw-r--r--sysdeps/x86_64/dl-trampoline.S24
1 files changed, 4 insertions, 20 deletions
diff --git a/sysdeps/x86_64/dl-trampoline.S b/sysdeps/x86_64/dl-trampoline.S
index d09001bb58..7f20491130 100644
--- a/sysdeps/x86_64/dl-trampoline.S
+++ b/sysdeps/x86_64/dl-trampoline.S
@@ -249,17 +249,7 @@ L(no_avx1):
 	jmp	1f
 
 L(no_avx2):
-	vmovdqa		    (LR_XMM_OFFSET)(%rsp), %xmm0
-	vmovdqa	 (LR_XMM_OFFSET + XMM_SIZE)(%rsp), %xmm1
-	vmovdqa (LR_XMM_OFFSET + XMM_SIZE*2)(%rsp), %xmm2
-	vmovdqa (LR_XMM_OFFSET + XMM_SIZE*3)(%rsp), %xmm3
-	vmovdqa (LR_XMM_OFFSET + XMM_SIZE*4)(%rsp), %xmm4
-	vmovdqa (LR_XMM_OFFSET + XMM_SIZE*5)(%rsp), %xmm5
-	vmovdqa (LR_XMM_OFFSET + XMM_SIZE*6)(%rsp), %xmm6
-	vmovdqa (LR_XMM_OFFSET + XMM_SIZE*7)(%rsp), %xmm7
-
-1:
-# else
+# endif
 	movaps		    (LR_XMM_OFFSET)(%rsp), %xmm0
 	movaps	 (LR_XMM_OFFSET + XMM_SIZE)(%rsp), %xmm1
 	movaps (LR_XMM_OFFSET + XMM_SIZE*2)(%rsp), %xmm2
@@ -268,9 +258,8 @@ L(no_avx2):
 	movaps (LR_XMM_OFFSET + XMM_SIZE*5)(%rsp), %xmm5
 	movaps (LR_XMM_OFFSET + XMM_SIZE*6)(%rsp), %xmm6
 	movaps (LR_XMM_OFFSET + XMM_SIZE*7)(%rsp), %xmm7
-# endif
 
-	movq 16(%rbx), %r10	# Anything in framesize?
+1:	movq 16(%rbx), %r10	# Anything in framesize?
 	testq %r10, %r10
 	jns 3f
 
@@ -390,16 +379,11 @@ L(no_avx3):
 	jmp 1f
 
 L(no_avx4):
-	vmovdqa LRV_XMM0_OFFSET(%rsp), %xmm0
-	vmovdqa LRV_XMM1_OFFSET(%rsp), %xmm1
-
-1:
-# else
+# endif
 	movaps LRV_XMM0_OFFSET(%rsp), %xmm0
 	movaps LRV_XMM1_OFFSET(%rsp), %xmm1
-# endif
 
-	fldt LRV_ST1_OFFSET(%rsp)
+1:	fldt LRV_ST1_OFFSET(%rsp)
 	fldt LRV_ST0_OFFSET(%rsp)
 
 	movq %rbx, %rsp