about summary refs log tree commit diff
path: root/sysdeps/x86_64/multiarch/memchr-avx2.S
diff options
context:
space:
mode:
authorH.J. Lu <hjl.tools@gmail.com>2019-01-21 11:23:59 -0800
committerH.J. Lu <hjl.tools@gmail.com>2019-01-21 11:24:13 -0800
commit97700a34f36721b11a754cf37a1cc40695ece1fd (patch)
tree026802789792449adc9de3532ee8b10685086401 /sysdeps/x86_64/multiarch/memchr-avx2.S
parent6ca53a2453598804a2559a548a08424fca96434a (diff)
downloadglibc-97700a34f36721b11a754cf37a1cc40695ece1fd.tar.gz
glibc-97700a34f36721b11a754cf37a1cc40695ece1fd.tar.xz
glibc-97700a34f36721b11a754cf37a1cc40695ece1fd.zip
x86-64 memchr/wmemchr: Properly handle the length parameter [BZ# 24097]
On x32, the size_t parameter may be passed in the lower 32 bits of a
64-bit register with the non-zero upper 32 bits.  The string/memory
functions written in assembly can only use the lower 32 bits of a
64-bit register as length or must clear the upper 32 bits before using
the full 64-bit register for length.

This pach fixes memchr/wmemchr for x32.  Tested on x86-64 and x32.  On
x86-64, libc.so is the same with and withou the fix.

	[BZ# 24097]
	CVE-2019-6488
	* sysdeps/x86_64/memchr.S: Use RDX_LP for length.  Clear the
	upper 32 bits of RDX register.
	* sysdeps/x86_64/multiarch/memchr-avx2.S: Likewise.
	* sysdeps/x86_64/x32/Makefile (tests): Add tst-size_t-memchr and
	tst-size_t-wmemchr.
	* sysdeps/x86_64/x32/test-size_t.h: New file.
	* sysdeps/x86_64/x32/tst-size_t-memchr.c: Likewise.
	* sysdeps/x86_64/x32/tst-size_t-wmemchr.c: Likewise.
Diffstat (limited to 'sysdeps/x86_64/multiarch/memchr-avx2.S')
-rw-r--r--sysdeps/x86_64/multiarch/memchr-avx2.S8
1 files changed, 6 insertions, 2 deletions
diff --git a/sysdeps/x86_64/multiarch/memchr-avx2.S b/sysdeps/x86_64/multiarch/memchr-avx2.S
index 9f98b7ec60..cfec1657b6 100644
--- a/sysdeps/x86_64/multiarch/memchr-avx2.S
+++ b/sysdeps/x86_64/multiarch/memchr-avx2.S
@@ -40,16 +40,20 @@
 ENTRY (MEMCHR)
 # ifndef USE_AS_RAWMEMCHR
 	/* Check for zero length.  */
-	testq	%rdx, %rdx
+	test	%RDX_LP, %RDX_LP
 	jz	L(null)
 # endif
 	movl	%edi, %ecx
 	/* Broadcast CHAR to YMM0.  */
 	vmovd	%esi, %xmm0
 # ifdef USE_AS_WMEMCHR
-	shl	$2, %rdx
+	shl	$2, %RDX_LP
 	vpbroadcastd %xmm0, %ymm0
 # else
+#  ifdef __ILP32__
+	/* Clear the upper 32 bits.  */
+	movl	%edx, %edx
+#  endif
 	vpbroadcastb %xmm0, %ymm0
 # endif
 	/* Check if we may cross page boundary with one vector load.  */