about summary refs log tree commit diff
path: root/sysdeps/x86_64/multiarch
diff options
context:
space:
mode:
authorH.J. Lu <hjl.tools@gmail.com>2019-01-21 11:35:18 -0800
committerH.J. Lu <hjl.tools@gmail.com>2019-01-21 11:35:34 -0800
commitc7c54f65b080affb87a1513dee449c8ad6143c8b (patch)
tree7843555220fda4dbbe023b2035a8de081810128a /sysdeps/x86_64/multiarch
parentee915088a0231cd421054dbd8abab7aadf331153 (diff)
downloadglibc-c7c54f65b080affb87a1513dee449c8ad6143c8b.tar.gz
glibc-c7c54f65b080affb87a1513dee449c8ad6143c8b.tar.xz
glibc-c7c54f65b080affb87a1513dee449c8ad6143c8b.zip
x86-64 strncpy: Properly handle the length parameter [BZ# 24097]
On x32, the size_t parameter may be passed in the lower 32 bits of a
64-bit register with the non-zero upper 32 bits.  The string/memory
functions written in assembly can only use the lower 32 bits of a
64-bit register as length or must clear the upper 32 bits before using
the full 64-bit register for length.

This pach fixes strncpy for x32.  Tested on x86-64 and x32.  On x86-64,
libc.so is the same with and withou the fix.

	[BZ# 24097]
	CVE-2019-6488
	* sysdeps/x86_64/multiarch/strcpy-avx2.S: Use RDX_LP for length.
	* sysdeps/x86_64/multiarch/strcpy-sse2-unaligned.S: Likewise.
	* sysdeps/x86_64/multiarch/strcpy-ssse3.S: Likewise.
	* sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-strncpy.
	* sysdeps/x86_64/x32/tst-size_t-strncpy.c: New file.
Diffstat (limited to 'sysdeps/x86_64/multiarch')
-rw-r--r--sysdeps/x86_64/multiarch/strcpy-avx2.S4
-rw-r--r--sysdeps/x86_64/multiarch/strcpy-sse2-unaligned.S4
-rw-r--r--sysdeps/x86_64/multiarch/strcpy-ssse3.S6
3 files changed, 7 insertions, 7 deletions
diff --git a/sysdeps/x86_64/multiarch/strcpy-avx2.S b/sysdeps/x86_64/multiarch/strcpy-avx2.S
index 81677f9060..e72a0cad7d 100644
--- a/sysdeps/x86_64/multiarch/strcpy-avx2.S
+++ b/sysdeps/x86_64/multiarch/strcpy-avx2.S
@@ -49,8 +49,8 @@
 	.section .text.avx,"ax",@progbits
 ENTRY (STRCPY)
 #  ifdef USE_AS_STRNCPY
-	mov	%rdx, %r8
-	test	%r8, %r8
+	mov	%RDX_LP, %R8_LP
+	test	%R8_LP, %R8_LP
 	jz	L(ExitZero)
 #  endif
 	mov	%rsi, %rcx
diff --git a/sysdeps/x86_64/multiarch/strcpy-sse2-unaligned.S b/sysdeps/x86_64/multiarch/strcpy-sse2-unaligned.S
index b7c79976ea..0d6914e113 100644
--- a/sysdeps/x86_64/multiarch/strcpy-sse2-unaligned.S
+++ b/sysdeps/x86_64/multiarch/strcpy-sse2-unaligned.S
@@ -40,8 +40,8 @@
 .text
 ENTRY (STRCPY)
 #  ifdef USE_AS_STRNCPY
-	mov	%rdx, %r8
-	test	%r8, %r8
+	mov	%RDX_LP, %R8_LP
+	test	%R8_LP, %R8_LP
 	jz	L(ExitZero)
 #  endif
 	mov	%rsi, %rcx
diff --git a/sysdeps/x86_64/multiarch/strcpy-ssse3.S b/sysdeps/x86_64/multiarch/strcpy-ssse3.S
index ec53f2a632..39e6ad762c 100644
--- a/sysdeps/x86_64/multiarch/strcpy-ssse3.S
+++ b/sysdeps/x86_64/multiarch/strcpy-ssse3.S
@@ -31,13 +31,13 @@ ENTRY (STRCPY)
 
 	mov	%rsi, %rcx
 #  ifdef USE_AS_STRNCPY
-	mov	%rdx, %r8
+	mov	%RDX_LP, %R8_LP
 #  endif
 	mov	%rdi, %rdx
 #  ifdef USE_AS_STRNCPY
-	test	%r8, %r8
+	test	%R8_LP, %R8_LP
 	jz	L(Exit0)
-	cmp	$8, %r8
+	cmp	$8, %R8_LP
 	jbe	L(StrncpyExit8Bytes)
 # endif
 	cmpb	$0, (%rcx)