summary refs log tree commit diff
diff options
context:
space:
mode:
authorRichard Henderson <rth@twiddle.net>2012-06-06 14:45:45 -0700
committerRichard Henderson <rth@twiddle.net>2012-06-06 14:45:45 -0700
commit8e2f4e971fa048f55428ca49c3a55fa5dc03bd52 (patch)
treec16113724e2d38b6b90879b9e3687b08f3670f71
parent43ac6d266566b995c0c184dd964987c23ce1e969 (diff)
downloadglibc-8e2f4e971fa048f55428ca49c3a55fa5dc03bd52.tar.gz
glibc-8e2f4e971fa048f55428ca49c3a55fa5dc03bd52.tar.xz
glibc-8e2f4e971fa048f55428ca49c3a55fa5dc03bd52.zip
alpha: Fix [BZ #13718]
The routines expect to be able to bias the count by a small number.
If the count is near -1ull, the count will overflow.  Since we cannot
use the whole 64-bit address space, bound the count to LONG_MAX.
-rw-r--r--ChangeLog.alpha4
-rw-r--r--sysdeps/alpha/alphaev6/stxncpy.S19
-rw-r--r--sysdeps/alpha/stxncpy.S23
3 files changed, 30 insertions, 16 deletions
diff --git a/ChangeLog.alpha b/ChangeLog.alpha
index 0598d7ee19..d291df970a 100644
--- a/ChangeLog.alpha
+++ b/ChangeLog.alpha
@@ -1,5 +1,9 @@
 2012-06-06  Richard Henderson  <rth@twiddle.net>
 
+	[BZ #13718]
+	* sysdeps/alpha/stxncmp.S: Bound count to LONG_MAX at startup.
+	* sysdeps/alpha/alphaev6/stxncmp.S: Likewise.
+
 	* sysdeps/alpha/fpu/e_sqrt.c: Include <math_private.h> before
 	redefining __ieee758_sqrt.
 
diff --git a/sysdeps/alpha/alphaev6/stxncpy.S b/sysdeps/alpha/alphaev6/stxncpy.S
index d134eb8c10..28495df004 100644
--- a/sysdeps/alpha/alphaev6/stxncpy.S
+++ b/sysdeps/alpha/alphaev6/stxncpy.S
@@ -143,18 +143,25 @@ $a_eoc:
 	.align 4
 __stxncpy:
 	/* Are source and destination co-aligned?  */
+	lda	t2, -1		# E :
 	xor	a0, a1, t1	# E :
 	and	a0, 7, t0	# E : find dest misalignment
-	and	t1, 7, t1	# E : (stall)
-	addq	a2, t0, a2	# E : bias count by dest misalignment (stall)
+	nop			# E :
 
-	subq	a2, 1, a2	# E :
+	srl	t2, 1, t2	# U :
+	and	t1, 7, t1	# E :
+	cmovlt	a2, t2, a2	# E : bound count to LONG_MAX (stall)
+	nop			# E :
+
+	addq	a2, t0, a2	# E : bias count by dest misalignment
+	subq	a2, 1, a2	# E : (stall)
 	and	a2, 7, t2	# E : (stall)
-	srl	a2, 3, a2	# U : a2 = loop counter = (count - 1)/8 (stall)
-	addq	zero, 1, t10	# E :
+	lda	t10, 1		# E :
 
+	srl	a2, 3, a2	# U : a2 = loop counter = (count - 1)/8
 	sll	t10, t2, t10	# U : t10 = bitmask of last count byte
-	bne	t1, $unaligned	# U :
+	nop			# E :
+	bne	t1, $unaligned	# U : (stall)
 
 	/* We are co-aligned; take care of a partial first word.  */
 	ldq_u	t1, 0(a1)	# L : load first src word
diff --git a/sysdeps/alpha/stxncpy.S b/sysdeps/alpha/stxncpy.S
index f8b494af0c..d2cb9c3c93 100644
--- a/sysdeps/alpha/stxncpy.S
+++ b/sysdeps/alpha/stxncpy.S
@@ -123,16 +123,19 @@ $a_eoc:
 	.align 3
 __stxncpy:
 	/* Are source and destination co-aligned?  */
-	xor	a0, a1, t1	# e0    :
-	and	a0, 7, t0	# .. e1 : find dest misalignment
-	and	t1, 7, t1	# e0    :
-	addq	a2, t0, a2	# .. e1 : bias count by dest misalignment
-	subq	a2, 1, a2	# e0    :
-	and	a2, 7, t2	# e1    :
-	srl	a2, 3, a2	# e0    : a2 = loop counter = (count - 1)/8
-	addq	zero, 1, t10	# .. e1 :
-	sll	t10, t2, t10	# e0    : t10 = bitmask of last count byte
-	bne	t1, $unaligned	# .. e1 :
+	lda	t2, -1
+	xor	a0, a1, t1
+	srl	t2, 1, t2
+	and	a0, 7, t0		# find dest misalignment
+	cmovlt	a2, t2, a2		# bound neg count to LONG_MAX
+	and	t1, 7, t1
+	addq	a2, t0, a2		# bias count by dest misalignment
+	subq	a2, 1, a2
+	and	a2, 7, t2
+	srl	a2, 3, a2		# a2 = loop counter = (count - 1)/8
+	addq	zero, 1, t10
+	sll	t10, t2, t10		# t10 = bitmask of last count byte
+	bne	t1, $unaligned
 
 	/* We are co-aligned; take care of a partial first word.  */