about summary refs log tree commit diff
path: root/sysdeps/x86_64/multiarch/memcpy-ssse3.S
diff options
context:
space:
mode:
authorH.J. Lu <hjl.tools@gmail.com>2019-01-21 11:27:25 -0800
committerH.J. Lu <hjl.tools@gmail.com>2019-01-21 11:27:36 -0800
commit231c56760c1e2ded21ad96bbb860b1f08c556c7a (patch)
tree4aca6b1947a0188731cbf37e27b307cc8603b1ef /sysdeps/x86_64/multiarch/memcpy-ssse3.S
parentb304fc201d2f6baf52ea790df8643e99772243cd (diff)
downloadglibc-231c56760c1e2ded21ad96bbb860b1f08c556c7a.tar.gz
glibc-231c56760c1e2ded21ad96bbb860b1f08c556c7a.tar.xz
glibc-231c56760c1e2ded21ad96bbb860b1f08c556c7a.zip
x86-64 memcpy: Properly handle the length parameter [BZ# 24097]
On x32, the size_t parameter may be passed in the lower 32 bits of a
64-bit register with the non-zero upper 32 bits.  The string/memory
functions written in assembly can only use the lower 32 bits of a
64-bit register as length or must clear the upper 32 bits before using
the full 64-bit register for length.

This pach fixes memcpy for x32.  Tested on x86-64 and x32.  On x86-64,
libc.so is the same with and withou the fix.

	[BZ# 24097]
	CVE-2019-6488
	* sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Use RDX_LP for
	length.  Clear the upper 32 bits of RDX register.
	* sysdeps/x86_64/multiarch/memcpy-ssse3.S: Likewise.
	* sysdeps/x86_64/multiarch/memmove-avx512-no-vzeroupper.S:
	Likewise.
	* sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:
	Likewise.
	* sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memcpy.
	tst-size_t-wmemchr.
	* sysdeps/x86_64/x32/tst-size_t-memcpy.c: New file.
Diffstat (limited to 'sysdeps/x86_64/multiarch/memcpy-ssse3.S')
-rw-r--r--sysdeps/x86_64/multiarch/memcpy-ssse3.S17
1 files changed, 11 insertions, 6 deletions
diff --git a/sysdeps/x86_64/multiarch/memcpy-ssse3.S b/sysdeps/x86_64/multiarch/memcpy-ssse3.S
index 7d74dbd2f5..8f68315418 100644
--- a/sysdeps/x86_64/multiarch/memcpy-ssse3.S
+++ b/sysdeps/x86_64/multiarch/memcpy-ssse3.S
@@ -45,28 +45,33 @@
 	.section .text.ssse3,"ax",@progbits
 #if !defined USE_AS_MEMPCPY && !defined USE_AS_MEMMOVE
 ENTRY (MEMPCPY_CHK)
-	cmpq	%rdx, %rcx
+	cmp	%RDX_LP, %RCX_LP
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
 END (MEMPCPY_CHK)
 
 ENTRY (MEMPCPY)
-	movq	%rdi, %rax
-	addq	%rdx, %rax
+	mov	%RDI_LP, %RAX_LP
+	add	%RDX_LP, %RAX_LP
 	jmp	L(start)
 END (MEMPCPY)
 #endif
 
 #if !defined USE_AS_BCOPY
 ENTRY (MEMCPY_CHK)
-	cmpq	%rdx, %rcx
+	cmp	%RDX_LP, %RCX_LP
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
 END (MEMCPY_CHK)
 #endif
 
 ENTRY (MEMCPY)
-	mov	%rdi, %rax
+	mov	%RDI_LP, %RAX_LP
 #ifdef USE_AS_MEMPCPY
-	add	%rdx, %rax
+	add	%RDX_LP, %RAX_LP
+#endif
+
+#ifdef __ILP32__
+	/* Clear the upper 32 bits.  */
+	mov	%edx, %edx
 #endif
 
 #ifdef USE_AS_MEMMOVE