about summary refs log tree commit diff
path: root/sysdeps/x86_64/memchr.S
diff options
context:
space:
mode:
authorH.J. Lu <hjl.tools@gmail.com>2019-02-01 12:17:09 -0800
committerH.J. Lu <hjl.tools@gmail.com>2019-02-01 15:32:53 -0800
commitbff8346b0184b15fbb80863112133f48a7bd62a9 (patch)
treeb0dcd6025eba0540bfb2019ab2a6b116a1bc8cc3 /sysdeps/x86_64/memchr.S
parent7ab39c6a3cd4a61a2be3d2e6a2a56f4dccca9750 (diff)
downloadglibc-bff8346b0184b15fbb80863112133f48a7bd62a9.tar.gz
glibc-bff8346b0184b15fbb80863112133f48a7bd62a9.tar.xz
glibc-bff8346b0184b15fbb80863112133f48a7bd62a9.zip
x86-64 memchr/wmemchr: Properly handle the length parameter [BZ #24097]
On x32, the size_t parameter may be passed in the lower 32 bits of a
64-bit register with the non-zero upper 32 bits.  The string/memory
functions written in assembly can only use the lower 32 bits of a
64-bit register as length or must clear the upper 32 bits before using
the full 64-bit register for length.

This pach fixes memchr/wmemchr for x32.  Tested on x86-64 and x32.  On
x86-64, libc.so is the same with and withou the fix.

	[BZ #24097]
	CVE-2019-6488
	* sysdeps/x86_64/memchr.S: Use RDX_LP for length.  Clear the
	upper 32 bits of RDX register.
	* sysdeps/x86_64/multiarch/memchr-avx2.S: Likewise.
	* sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memchr and
	tst-size_t-wmemchr.
	* sysdeps/x86_64/x32/test-size_t.h: New file.
	* sysdeps/x86_64/x32/tst-size_t-memchr.c: Likewise.
	* sysdeps/x86_64/x32/tst-size_t-wmemchr.c: Likewise.

(cherry picked from commit 97700a34f36721b11a754cf37a1cc40695ece1fd)
Diffstat (limited to 'sysdeps/x86_64/memchr.S')
-rw-r--r--sysdeps/x86_64/memchr.S10
1 files changed, 7 insertions, 3 deletions
diff --git a/sysdeps/x86_64/memchr.S b/sysdeps/x86_64/memchr.S
index f5f05f6c8c..fd20f64f9b 100644
--- a/sysdeps/x86_64/memchr.S
+++ b/sysdeps/x86_64/memchr.S
@@ -34,12 +34,16 @@ ENTRY(MEMCHR)
 	mov	%edi, %ecx
 
 #ifdef USE_AS_WMEMCHR
-	test	%rdx, %rdx
+	test	%RDX_LP, %RDX_LP
 	jz	L(return_null)
-	shl	$2, %rdx
+	shl	$2, %RDX_LP
 #else
+# ifdef __ILP32__
+	/* Clear the upper 32 bits.  */
+	movl	%edx, %edx
+# endif
 	punpcklbw %xmm1, %xmm1
-	test	%rdx, %rdx
+	test	%RDX_LP, %RDX_LP
 	jz	L(return_null)
 	punpcklbw %xmm1, %xmm1
 #endif