about summary refs log tree commit diff
path: root/sysdeps/sparc/sparc64/memset.S
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>1999-07-27 04:43:32 +0000
committerUlrich Drepper <drepper@redhat.com>1999-07-27 04:43:32 +0000
commit8cb079d41b2108d7a6db4c91a51156464912548b (patch)
tree6cbfca0ae13331d50e1559c50c9a128dec6082a0 /sysdeps/sparc/sparc64/memset.S
parentf05f5ca3857fbf83460003f12e81667c2f60851e (diff)
downloadglibc-8cb079d41b2108d7a6db4c91a51156464912548b.tar.gz
glibc-8cb079d41b2108d7a6db4c91a51156464912548b.tar.xz
glibc-8cb079d41b2108d7a6db4c91a51156464912548b.zip
Update.
	* sysdeps/sparc/sparc64/add_n.S: Avoid using %g2, %g3, %g7 registers
	as much as possible. Declare them using .register pseudo-op if they
	are still used.
	* sysdeps/sparc/sparc64/lshift.S: Likewise.
	* sysdeps/sparc/sparc64/memchr.S: Likewise.
	* sysdeps/sparc/sparc64/memcmp.S: Likewise.
	* sysdeps/sparc/sparc64/memcpy.S: Likewise.
	* sysdeps/sparc/sparc64/memset.S: Likewise.
	* sysdeps/sparc/sparc64/rawmemchr.S: Likewise.
	* sysdeps/sparc/sparc64/rshift.S: Likewise.
	* sysdeps/sparc/sparc64/stpcpy.S: Likewise.
	* sysdeps/sparc/sparc64/stpncpy.S: Likewise.
	* sysdeps/sparc/sparc64/strcat.S: Likewise.
	* sysdeps/sparc/sparc64/strchr.S: Likewise.
	* sysdeps/sparc/sparc64/strcmp.S: Likewise.
	* sysdeps/sparc/sparc64/strcpy.S: Likewise.
	* sysdeps/sparc/sparc64/strcspn.S: Likewise.
	* sysdeps/sparc/sparc64/strlen.S: Likewise.
	* sysdeps/sparc/sparc64/strncmp.S: Likewise.
	* sysdeps/sparc/sparc64/strncpy.S: Likewise.
	* sysdeps/sparc/sparc64/strpbrk.S: Likewise.
	* sysdeps/sparc/sparc64/strspn.S: Likewise.
	* sysdeps/sparc/sparc64/sub_n.S: Likewise.
	* sysdeps/sparc/sparc64/dl-machine.h: Likewise.
	Optimize trampoline code for .plt4-.plt32767.
	Fix trampolines for .plt32768+.

1999-07-25  Jakub Jelinek  <jj@ultra.linux.cz>
Diffstat (limited to 'sysdeps/sparc/sparc64/memset.S')
-rw-r--r--sysdeps/sparc/sparc64/memset.S32
1 files changed, 16 insertions, 16 deletions
diff --git a/sysdeps/sparc/sparc64/memset.S b/sysdeps/sparc/sparc64/memset.S
index c2b23647f5..9c0f234a6e 100644
--- a/sysdeps/sparc/sparc64/memset.S
+++ b/sysdeps/sparc/sparc64/memset.S
@@ -39,7 +39,7 @@
 	.align		32
 ENTRY(memset)
 	andcc		%o1, 0xff, %o1
-	mov		%o0, %g3
+	mov		%o0, %o5
 	be,a,pt		%icc, 50f
 #ifndef USE_BPR
 	 srl		%o2, 0, %o1
@@ -79,19 +79,19 @@ ENTRY(memset)
 	blu,pn		%xcc, 9f
 	 andcc		%o0, 0x38, %g5
 	be,pn		%icc, 6f
-	 mov		64, %o5
+	 mov		64, %o4
 	andcc		%o0, 8, %g0
 	be,pn		%icc, 1f
-	 sub		%o5, %g5, %o5
+	 sub		%o4, %g5, %o4
 	stx		%o1, [%o0]
 	add		%o0, 8, %o0
-1:	andcc		%o5, 16, %g0
+1:	andcc		%o4, 16, %g0
 	be,pn		%icc, 1f
-	 sub		%o2, %o5, %o2
+	 sub		%o2, %o4, %o2
 	stx		%o1, [%o0]
 	stx		%o1, [%o0 + 8]
 	add		%o0, 16, %o0
-1:	andcc		%o5, 32, %g0
+1:	andcc		%o4, 32, %g0
 	be,pn		%icc, 7f
 	 andncc		%o2, 0x3f, %o3
 	stw		%o1, [%o0]
@@ -162,14 +162,14 @@ ENTRY(memset)
 1:	bne,a,pn	%xcc, 8f
 	 stb		%o1, [%o0]
 8:	retl
-	 mov		%g3, %o0
+	 mov		%o5, %o0
 17:	brz,pn		%o2, 0f
 8:	 add		%o0, 1, %o0
 	subcc		%o2, 1, %o2
 	bne,pt		%xcc, 8b
 	 stb		%o1, [%o0 - 1]
 0:	retl
-	 mov		%g3, %o0
+	 mov		%o5, %o0
 
 6:	stx		%o1, [%o0]
 	andncc		%o2, 0x3f, %o3
@@ -195,7 +195,7 @@ ENTRY(__bzero)
 #ifndef USE_BPR
 	srl		%o1, 0, %o1
 #endif
-	mov		%o0, %g3
+	mov		%o0, %o5
 50:	cmp		%o1, 7
 	bleu,pn		%xcc, 17f
 	 andcc		%o0, 3, %o2
@@ -220,19 +220,19 @@ ENTRY(__bzero)
 2:	blu,pn		%xcc, 9f
 	 andcc		%o0, 0x38, %o2
 	be,pn		%icc, 6f
-	 mov		64, %o5
+	 mov		64, %o4
 	andcc		%o0, 8, %g0
 	be,pn		%icc, 1f
-	 sub		%o5, %o2, %o5
+	 sub		%o4, %o2, %o4
 	stx		%g0, [%o0]
 	add		%o0, 8, %o0
-1:	andcc		%o5, 16, %g0
+1:	andcc		%o4, 16, %g0
 	be,pn		%icc, 1f
-	 sub		%o1, %o5, %o1
+	 sub		%o1, %o4, %o1
 	stx		%g0, [%o0]
 	stx		%g0, [%o0 + 8]
 	add		%o0, 16, %o0
-1:	andcc		%o5, 32, %g0
+1:	andcc		%o4, 32, %g0
 	be,pn		%icc, 7f
 	 andncc		%o1, 0x3f, %o3
 	stx		%g0, [%o0]
@@ -299,7 +299,7 @@ ENTRY(__bzero)
 1:	bne,a,pn	%xcc, 8f
 	 stb		%g0, [%o0]
 8:	retl
-	 mov		%g3, %o0
+	 mov		%o5, %o0
 17:	be,pn		%xcc, 13b
 	 orcc		%o1, 0, %g0
 	be,pn		%xcc, 0f
@@ -308,7 +308,7 @@ ENTRY(__bzero)
 	bne,pt		%xcc, 8b
 	 stb		%g0, [%o0 - 1]
 0:	retl
-	 mov		%g3, %o0
+	 mov		%o5, %o0
 END(__bzero)
 
 weak_alias(__bzero, bzero)