about summary refs log tree commit diff
path: root/sysdeps/i386/i586
diff options
context:
space:
mode:
authorJoseph Myers <joseph@codesourcery.com>2013-02-19 21:58:08 +0000
committerJoseph Myers <joseph@codesourcery.com>2013-02-19 21:58:08 +0000
commit92945b5261c412eb590b2b34c7ec9a035f0693a1 (patch)
tree2d09031d37dcb8faab0ba90eb72b61681deecc51 /sysdeps/i386/i586
parentb65e2ba34b218a58a74123e2d6ba70ab0d4797bf (diff)
downloadglibc-92945b5261c412eb590b2b34c7ec9a035f0693a1.tar.gz
glibc-92945b5261c412eb590b2b34c7ec9a035f0693a1.tar.xz
glibc-92945b5261c412eb590b2b34c7ec9a035f0693a1.zip
Remove some bounded-pointers support from i386 .S files.
Diffstat (limited to 'sysdeps/i386/i586')
-rw-r--r--sysdeps/i386/i586/add_n.S9
-rw-r--r--sysdeps/i386/i586/addmul_1.S8
-rw-r--r--sysdeps/i386/i586/lshift.S9
-rw-r--r--sysdeps/i386/i586/memcpy.S4
-rw-r--r--sysdeps/i386/i586/memset.S4
-rw-r--r--sysdeps/i386/i586/mul_1.S8
-rw-r--r--sysdeps/i386/i586/rshift.S9
-rw-r--r--sysdeps/i386/i586/strchr.S26
-rw-r--r--sysdeps/i386/i586/strcpy.S5
-rw-r--r--sysdeps/i386/i586/strlen.S6
-rw-r--r--sysdeps/i386/i586/sub_n.S9
-rw-r--r--sysdeps/i386/i586/submul_1.S8
12 files changed, 11 insertions, 94 deletions
diff --git a/sysdeps/i386/i586/add_n.S b/sysdeps/i386/i586/add_n.S
index 395c4b78f4..e06893c6ed 100644
--- a/sysdeps/i386/i586/add_n.S
+++ b/sysdeps/i386/i586/add_n.S
@@ -30,7 +30,6 @@
 
 	.text
 ENTRY (BP_SYM (__mpn_add_n))
-	ENTER
 
 	pushl	%edi
 	cfi_adjust_cfa_offset (4)
@@ -48,13 +47,6 @@ ENTRY (BP_SYM (__mpn_add_n))
 	movl	S2(%esp),%ebx
 	cfi_rel_offset (ebx, 0)
 	movl	SIZE(%esp),%ecx
-#if __BOUNDED_POINTERS__
-	shll	$2, %ecx		/* convert limbs to bytes */
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
-	CHECK_BOUNDS_BOTH_WIDE (%ebx, S2(%esp), %ecx)
-	shrl	$2, %ecx
-#endif
 	movl	(%ebx),%ebp
 	cfi_rel_offset (ebp, 4)
 
@@ -149,6 +141,5 @@ L(end2):
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	ret
 END (BP_SYM (__mpn_add_n))
diff --git a/sysdeps/i386/i586/addmul_1.S b/sysdeps/i386/i586/addmul_1.S
index faf862502a..cdb8ddd671 100644
--- a/sysdeps/i386/i586/addmul_1.S
+++ b/sysdeps/i386/i586/addmul_1.S
@@ -35,7 +35,6 @@
 
 	.text
 ENTRY (BP_SYM (__mpn_addmul_1))
-	ENTER
 
 	pushl	%res_ptr
 	cfi_adjust_cfa_offset (4)
@@ -53,12 +52,6 @@ ENTRY (BP_SYM (__mpn_addmul_1))
 	movl	SIZE(%esp), %size
 	movl	S2LIMB(%esp), %s2_limb
 	cfi_rel_offset (s2_limb, 0)
-#if __BOUNDED_POINTERS__
-	shll	$2, %size	/* convert limbs to bytes */
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size)
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %size)
-	shrl	$2, %size
-#endif
 	leal	(%res_ptr,%size,4), %res_ptr
 	leal	(%s1_ptr,%size,4), %s1_ptr
 	negl	%size
@@ -98,7 +91,6 @@ L(oop):	adcl	$0, %ebp
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (res_ptr)
 
-	LEAVE
 	ret
 #undef size
 END (BP_SYM (__mpn_addmul_1))
diff --git a/sysdeps/i386/i586/lshift.S b/sysdeps/i386/i586/lshift.S
index 461e32dbe7..de680fc7b7 100644
--- a/sysdeps/i386/i586/lshift.S
+++ b/sysdeps/i386/i586/lshift.S
@@ -29,7 +29,6 @@
 
 	.text
 ENTRY (BP_SYM (__mpn_lshift))
-	ENTER
 
 	pushl	%edi
 	cfi_adjust_cfa_offset (4)
@@ -48,12 +47,6 @@ ENTRY (BP_SYM (__mpn_lshift))
 	movl	SIZE(%esp),%ebx
 	cfi_rel_offset (ebx, 0)
 	movl	CNT(%esp),%ecx
-#if __BOUNDED_POINTERS__
-	shll	$2, %ebx		/* convert limbs to bytes */
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ebx)
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %ebx)
-	shrl	$2, %ebx
-#endif
 
 /* We can use faster code for shift-by-1 under certain conditions.  */
 	cmp	$1,%ecx
@@ -155,7 +148,6 @@ L(end2):
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	ret
 
 /* We loop from least significant end of the arrays, which is only
@@ -261,6 +253,5 @@ L(L1):	movl	%edx,(%edi)		/* store last limb */
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	ret
 END (BP_SYM (__mpn_lshift))
diff --git a/sysdeps/i386/i586/memcpy.S b/sysdeps/i386/i586/memcpy.S
index 206715482d..1ad8684de3 100644
--- a/sysdeps/i386/i586/memcpy.S
+++ b/sysdeps/i386/i586/memcpy.S
@@ -42,7 +42,6 @@ ENTRY (__memcpy_chk)
 END (__memcpy_chk)
 #endif
 ENTRY (BP_SYM (memcpy))
-	ENTER
 
 	pushl	%edi
 	cfi_adjust_cfa_offset (4)
@@ -54,8 +53,6 @@ ENTRY (BP_SYM (memcpy))
 	movl	SRC(%esp), %esi
 	cfi_rel_offset (esi, 0)
 	movl	LEN(%esp), %ecx
-	CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %ecx)
-	CHECK_BOUNDS_BOTH_WIDE (%esi, SRC(%esp), %ecx)
 	movl	%edi, %eax
 
 	/* We need this in any case.  */
@@ -127,7 +124,6 @@ L(1):	rep; movsb
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	RET_PTR
 END (BP_SYM (memcpy))
 #if !MEMPCPY_P
diff --git a/sysdeps/i386/i586/memset.S b/sysdeps/i386/i586/memset.S
index 07cd27fbcb..728e12a285 100644
--- a/sysdeps/i386/i586/memset.S
+++ b/sysdeps/i386/i586/memset.S
@@ -45,7 +45,6 @@ ENTRY (__memset_chk)
 END (__memset_chk)
 #endif
 ENTRY (BP_SYM (memset))
-	ENTER
 
 	pushl	%edi
 	cfi_adjust_cfa_offset (4)
@@ -53,7 +52,6 @@ ENTRY (BP_SYM (memset))
 	movl	DEST(%esp), %edi
 	cfi_rel_offset (edi, 0)
 	movl	LEN(%esp), %edx
-	CHECK_BOUNDS_BOTH_WIDE (%edi, DEST(%esp), %edx)
 #if BZERO_P
 	xorl	%eax, %eax	/* we fill with 0 */
 #else
@@ -111,13 +109,11 @@ L(2):	shrl	$2, %ecx	/* convert byte count to longword count */
 #if !BZERO_P
 	/* Load result (only if used as memset).  */
 	movl DEST(%esp), %eax	/* start address of destination is result */
-	RETURN_BOUNDED_POINTER (DEST(%esp))
 #endif
 	popl	%edi
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 #if BZERO_P
 	ret
 #else
diff --git a/sysdeps/i386/i586/mul_1.S b/sysdeps/i386/i586/mul_1.S
index 0026293188..6965e8b9ba 100644
--- a/sysdeps/i386/i586/mul_1.S
+++ b/sysdeps/i386/i586/mul_1.S
@@ -35,7 +35,6 @@
 
 	.text
 ENTRY (BP_SYM (__mpn_mul_1))
-	ENTER
 
 	pushl	%res_ptr
 	cfi_adjust_cfa_offset (4)
@@ -53,12 +52,6 @@ ENTRY (BP_SYM (__mpn_mul_1))
 	movl	SIZE(%esp), %size
 	movl	S2LIMB(%esp), %s2_limb
 	cfi_rel_offset (s2_limb, 0)
-#if __BOUNDED_POINTERS__
-	shll	$2, %size	/* convert limbs to bytes */
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %size)
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %size)
-	shrl	$2, %size
-#endif
 	leal	(%res_ptr,%size,4), %res_ptr
 	leal	(%s1_ptr,%size,4), %s1_ptr
 	negl	%size
@@ -94,7 +87,6 @@ L(oop):	adcl	$0, %ebp
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (res_ptr)
 
-	LEAVE
 	ret
 #undef size
 END (BP_SYM (__mpn_mul_1))
diff --git a/sysdeps/i386/i586/rshift.S b/sysdeps/i386/i586/rshift.S
index c5438ffc9e..bec72b0168 100644
--- a/sysdeps/i386/i586/rshift.S
+++ b/sysdeps/i386/i586/rshift.S
@@ -29,7 +29,6 @@
 
 	.text
 ENTRY (BP_SYM (__mpn_rshift))
-	ENTER
 
 	pushl	%edi
 	cfi_adjust_cfa_offset (4)
@@ -48,12 +47,6 @@ ENTRY (BP_SYM (__mpn_rshift))
 	movl	SIZE(%esp),%ebx
 	cfi_rel_offset (ebx, 0)
 	movl	CNT(%esp),%ecx
-#if __BOUNDED_POINTERS__
-	shll	$2, %ebx		/* convert limbs to bytes */
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ebx)
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S(%esp), %ebx)
-	shrl	$2, %ebx
-#endif
 
 /* We can use faster code for shift-by-1 under certain conditions.  */
 	cmp	$1,%ecx
@@ -152,7 +145,6 @@ L(end2):
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	ret
 
 /* We loop from least significant end of the arrays, which is only
@@ -261,6 +253,5 @@ L(L1):	movl	%edx,(%edi)		/* store last limb */
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	ret
 END (BP_SYM (__mpn_rshift))
diff --git a/sysdeps/i386/i586/strchr.S b/sysdeps/i386/i586/strchr.S
index d005b164f3..e2dfa20152 100644
--- a/sysdeps/i386/i586/strchr.S
+++ b/sysdeps/i386/i586/strchr.S
@@ -43,7 +43,6 @@
 
 	.text
 ENTRY (BP_SYM (strchr))
-	ENTER
 
 	pushl %edi		/* Save callee-safe registers.  */
 	cfi_adjust_cfa_offset (-4)
@@ -57,7 +56,6 @@ ENTRY (BP_SYM (strchr))
 
 	movl STR(%esp), %eax
 	movl CHR(%esp), %edx
-	CHECK_BOUNDS_LOW (%eax, STR(%esp))
 
 	movl %eax, %edi		/* duplicate string pointer for later */
 	cfi_rel_offset (edi, 12)
@@ -82,7 +80,7 @@ ENTRY (BP_SYM (strchr))
 	jp L(0)			/* exactly two bits set */
 
 	xorb (%eax), %cl	/* is byte the one we are looking for? */
-	jz L(2)			/* yes => return pointer */
+	jz L(out)		/* yes => return pointer */
 
 	xorb %dl, %cl		/* load single byte and test for NUL */
 	je L(3)			/* yes => return NULL */
@@ -91,7 +89,7 @@ ENTRY (BP_SYM (strchr))
 	incl %eax
 
 	cmpb %cl, %dl		/* is byte == C? */
-	je L(2)			/* aligned => return pointer */
+	je L(out)		/* aligned => return pointer */
 
 	cmpb $0, %cl		/* is byte NUL? */
 	je L(3)			/* yes => return NULL */
@@ -104,7 +102,7 @@ ENTRY (BP_SYM (strchr))
 L(0):	movb (%eax), %cl	/* load single byte */
 
 	cmpb %cl, %dl		/* is byte == C? */
-	je L(2)			/* aligned => return pointer */
+	je L(out)		/* aligned => return pointer */
 
 	cmpb $0, %cl		/* is byte NUL? */
 	je L(3)			/* yes => return NULL */
@@ -274,23 +272,21 @@ L(1):	xorl %ecx, %ebp			/* (word^magic) */
 L(5):	subl $4, %eax		/* adjust pointer */
 	testb %bl, %bl		/* first byte == C? */
 
-	jz L(2)			/* yes => return pointer */
+	jz L(out)		/* yes => return pointer */
 
 	incl %eax		/* increment pointer */
 	testb %bh, %bh		/* second byte == C? */
 
-	jz L(2)			/* yes => return pointer */
+	jz L(out)		/* yes => return pointer */
 
 	shrl $16, %ebx		/* make upper bytes accessible */
 	incl %eax		/* increment pointer */
 
 	cmp $0, %bl		/* third byte == C */
-	je L(2)			/* yes => return pointer */
+	je L(out)		/* yes => return pointer */
 
 	incl %eax		/* increment pointer */
 
-L(2):	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
-	RETURN_BOUNDED_POINTER (STR(%esp))
 L(out):	popl %ebp		/* restore saved registers */
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (ebp)
@@ -305,7 +301,6 @@ L(out):	popl %ebp		/* restore saved registers */
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	RET_PTR
 
 	cfi_adjust_cfa_offset (16)
@@ -318,7 +313,7 @@ L(out):	popl %ebp		/* restore saved registers */
 L(4):	subl $4, %eax		/* adjust pointer */
 	cmpb %dl, %cl		/* first byte == C? */
 
-	je L(2)			/* yes => return pointer */
+	je L(out)		/* yes => return pointer */
 
 	cmpb $0, %cl		/* first byte == NUL? */
 	je L(3)			/* yes => return NULL */
@@ -326,7 +321,7 @@ L(4):	subl $4, %eax		/* adjust pointer */
 	incl %eax		/* increment pointer */
 
 	cmpb %dl, %ch		/* second byte == C? */
-	je L(2)			/* yes => return pointer */
+	je L(out)		/* yes => return pointer */
 
 	cmpb $0, %ch		/* second byte == NUL? */
 	je L(3)			/* yes => return NULL */
@@ -335,7 +330,7 @@ L(4):	subl $4, %eax		/* adjust pointer */
 	incl %eax		/* increment pointer */
 
 	cmpb %dl, %cl		/* third byte == C? */
-	je L(2)			/* yes => return pointer */
+	je L(out)		/* yes => return pointer */
 
 	cmpb $0, %cl		/* third byte == NUL? */
 	je L(3)			/* yes => return NULL */
@@ -344,10 +339,9 @@ L(4):	subl $4, %eax		/* adjust pointer */
 
 	/* The test four the fourth byte is necessary!  */
 	cmpb %dl, %ch		/* fourth byte == C? */
-	je L(2)			/* yes => return pointer */
+	je L(out)		/* yes => return pointer */
 
 L(3):	xorl %eax, %eax
-	RETURN_NULL_BOUNDED_POINTER
 	jmp L(out)
 END (BP_SYM (strchr))
 
diff --git a/sysdeps/i386/i586/strcpy.S b/sysdeps/i386/i586/strcpy.S
index af23bf5a26..c5a4ce75a2 100644
--- a/sysdeps/i386/i586/strcpy.S
+++ b/sysdeps/i386/i586/strcpy.S
@@ -35,7 +35,6 @@
 
 	.text
 ENTRY (BP_SYM (STRCPY))
-	ENTER
 
 	pushl	%edi
 	cfi_adjust_cfa_offset (4)
@@ -48,8 +47,6 @@ ENTRY (BP_SYM (STRCPY))
 	cfi_rel_offset (edi, 8)
 	movl	SRC(%esp), %esi
 	cfi_rel_offset (esi, 4)
-	CHECK_BOUNDS_LOW (%edi, DEST(%esp))
-	CHECK_BOUNDS_LOW (%esi, SRC(%esp))
 
 	xorl	%eax, %eax
 	leal	-1(%esi), %ecx
@@ -158,7 +155,6 @@ L(end2):
 #else
 	movl	DEST(%esp), %eax
 #endif
-	RETURN_BOUNDED_POINTER (DEST(%esp))
 	popl	%ebx
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (ebx)
@@ -169,7 +165,6 @@ L(end2):
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	RET_PTR
 END (BP_SYM (STRCPY))
 #ifndef USE_AS_STPCPY
diff --git a/sysdeps/i386/i586/strlen.S b/sysdeps/i386/i586/strlen.S
index a145cb5684..c076343a7b 100644
--- a/sysdeps/i386/i586/strlen.S
+++ b/sysdeps/i386/i586/strlen.S
@@ -41,10 +41,8 @@
 
 	.text
 ENTRY (BP_SYM (strlen))
-	ENTER
 
 	movl STR(%esp), %eax
-	CHECK_BOUNDS_LOW (%eax, STR(%esp))
 	movl $3, %edx		/* load mask (= 3) */
 
 	andl %eax, %edx		/* separate last two bits of address */
@@ -178,11 +176,9 @@ L(3):	subl $4, %eax		/* correct too early pointer increment */
 
 	incl %eax		/* increment pointer */
 
-L(2):	CHECK_BOUNDS_HIGH (%eax, STR(%esp), jb)
-	subl STR(%esp), %eax	/* now compute the length as difference
+L(2):	subl STR(%esp), %eax	/* now compute the length as difference
 				   between start and terminating NUL
 				   character */
-	LEAVE
 	ret
 END (BP_SYM (strlen))
 libc_hidden_builtin_def (strlen)
diff --git a/sysdeps/i386/i586/sub_n.S b/sysdeps/i386/i586/sub_n.S
index 5d3c70235e..14406482ea 100644
--- a/sysdeps/i386/i586/sub_n.S
+++ b/sysdeps/i386/i586/sub_n.S
@@ -30,7 +30,6 @@
 
 	.text
 ENTRY (BP_SYM (__mpn_sub_n))
-	ENTER
 
 	pushl	%edi
 	cfi_adjust_cfa_offset (4)
@@ -48,13 +47,6 @@ ENTRY (BP_SYM (__mpn_sub_n))
 	movl	S2(%esp),%ebx
 	cfi_rel_offset (ebx, 0)
 	movl	SIZE(%esp),%ecx
-#if __BOUNDED_POINTERS__
-	shll	$2, %ecx	/* convert limbs to bytes */
-	CHECK_BOUNDS_BOTH_WIDE (%edi, RES(%esp), %ecx)
-	CHECK_BOUNDS_BOTH_WIDE (%esi, S1(%esp), %ecx)
-	CHECK_BOUNDS_BOTH_WIDE (%ebx, S2(%esp), %ecx)
-	shrl	$2, %ecx
-#endif
 	movl	(%ebx),%ebp
 	cfi_rel_offset (ebp, 4)
 
@@ -149,6 +141,5 @@ L(end2):
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (edi)
 
-	LEAVE
 	ret
 END (BP_SYM (__mpn_sub_n))
diff --git a/sysdeps/i386/i586/submul_1.S b/sysdeps/i386/i586/submul_1.S
index 8e2b41a35e..cf4fcf05a7 100644
--- a/sysdeps/i386/i586/submul_1.S
+++ b/sysdeps/i386/i586/submul_1.S
@@ -35,7 +35,6 @@
 
 	.text
 ENTRY (BP_SYM (__mpn_submul_1))
-	ENTER
 
 	pushl	%res_ptr
 	cfi_adjust_cfa_offset (4)
@@ -53,12 +52,6 @@ ENTRY (BP_SYM (__mpn_submul_1))
 	movl	SIZE(%esp), %size
 	movl	S2LIMB(%esp), %s2_limb
 	cfi_rel_offset (s2_limb, 0)
-#if __BOUNDED_POINTERS__
-	shll	$2, %sizeP	/* convert limbs to bytes */
-	CHECK_BOUNDS_BOTH_WIDE (%res_ptr, RES(%esp), %sizeP)
-	CHECK_BOUNDS_BOTH_WIDE (%s1_ptr, S1(%esp), %sizeP)
-	shrl	$2, %sizeP
-#endif
 	leal	(%res_ptr,%size,4), %res_ptr
 	leal	(%s1_ptr,%size,4), %s1_ptr
 	negl	%size
@@ -98,7 +91,6 @@ L(oop):	adcl	$0, %ebp
 	cfi_adjust_cfa_offset (-4)
 	cfi_restore (res_ptr)
 
-	LEAVE
 	ret
 #undef size
 END (BP_SYM (__mpn_submul_1))