about summary refs log tree commit diff
path: root/sysdeps/powerpc/powerpc32/strcmp.S
diff options
context:
space:
mode:
authorAlan Modra <amodra@gmail.com>2013-08-17 18:41:17 +0930
committerAlan Modra <amodra@gmail.com>2013-10-04 10:39:52 +0930
commit8a7413f9b036da83ffde491a37d9d2340bc321a7 (patch)
tree8ba04b78fde9893b9da3a10e7934a2c127594f6e /sysdeps/powerpc/powerpc32/strcmp.S
parent33ee81de05e83ce12f32a491270bb4c1611399c7 (diff)
downloadglibc-8a7413f9b036da83ffde491a37d9d2340bc321a7.tar.gz
glibc-8a7413f9b036da83ffde491a37d9d2340bc321a7.tar.xz
glibc-8a7413f9b036da83ffde491a37d9d2340bc321a7.zip
PowerPC LE strcmp and strncmp
http://sourceware.org/ml/libc-alpha/2013-08/msg00099.html

More little-endian support.  I leave the main strcmp loops unchanged,
(well, except for renumbering rTMP to something other than r0 since
it's needed in an addi insn) and modify the tail for little-endian.

I noticed some of the big-endian tail code was a little untidy so have
cleaned that up too.

	* sysdeps/powerpc/powerpc64/strcmp.S (rTMP2): Define as r0.
	(rTMP): Define as r11.
	(strcmp): Add little-endian support.  Optimise tail.
	* sysdeps/powerpc/powerpc32/strcmp.S: Similarly.
	* sysdeps/powerpc/powerpc64/strncmp.S: Likewise.
	* sysdeps/powerpc/powerpc32/strncmp.S: Likewise.
	* sysdeps/powerpc/powerpc64/power4/strncmp.S: Likewise.
	* sysdeps/powerpc/powerpc32/power4/strncmp.S: Likewise.
	* sysdeps/powerpc/powerpc64/power7/strncmp.S: Likewise.
	* sysdeps/powerpc/powerpc32/power7/strncmp.S: Likewise.
Diffstat (limited to 'sysdeps/powerpc/powerpc32/strcmp.S')
-rw-r--r--sysdeps/powerpc/powerpc32/strcmp.S42
1 files changed, 39 insertions, 3 deletions
diff --git a/sysdeps/powerpc/powerpc32/strcmp.S b/sysdeps/powerpc/powerpc32/strcmp.S
index 297ca3c1b2..91d60c9053 100644
--- a/sysdeps/powerpc/powerpc32/strcmp.S
+++ b/sysdeps/powerpc/powerpc32/strcmp.S
@@ -24,7 +24,7 @@
 
 EALIGN (strcmp, 4, 0)
 
-#define rTMP	r0
+#define rTMP2	r0
 #define rRTN	r3
 #define rSTR1	r3	/* first string arg */
 #define rSTR2	r4	/* second string arg */
@@ -34,6 +34,7 @@ EALIGN (strcmp, 4, 0)
 #define r7F7F	r8	/* constant 0x7f7f7f7f */
 #define rNEG	r9	/* ~(word in s1 | 0x7f7f7f7f) */
 #define rBITDIF	r10	/* bits that differ in s1 & s2 words */
+#define rTMP	r11
 
 
 	or	rTMP, rSTR2, rSTR1
@@ -56,10 +57,45 @@ L(g1):	add	rTMP, rFEFE, rWORD1
 	and.	rTMP, rTMP, rNEG
 	cmpw	cr1, rWORD1, rWORD2
 	beq+	L(g0)
-L(endstring):
+
 /* OK. We've hit the end of the string. We need to be careful that
    we don't compare two strings as different because of gunk beyond
    the end of the strings...  */
+#ifdef __LITTLE_ENDIAN__
+L(endstring):
+	addi    rTMP2, rTMP, -1
+	andc    rTMP2, rTMP2, rTMP
+	rlwimi	rTMP2, rTMP2, 1, 0, 30
+	and	rWORD2, rWORD2, rTMP2		/* Mask off gunk.  */
+	and	rWORD1, rWORD1, rTMP2
+	rlwinm	rTMP2, rWORD2, 8, 0xffffffff	/* Byte reverse word.  */
+	rlwinm	rTMP, rWORD1, 8, 0xffffffff
+	rlwimi	rTMP2, rWORD2, 24, 0, 7
+	rlwimi	rTMP, rWORD1, 24, 0, 7
+	rlwimi	rTMP2, rWORD2, 24, 16, 23
+	rlwimi	rTMP, rWORD1, 24, 16, 23
+	xor.	rBITDIF, rTMP, rTMP2
+	sub	rRTN, rTMP, rTMP2
+	bgelr+
+	ori	rRTN, rTMP2, 1
+	blr
+
+L(different):
+	lwz	rWORD1, -4(rSTR1)
+	rlwinm	rTMP2, rWORD2, 8, 0xffffffff	/* Byte reverse word.  */
+	rlwinm	rTMP, rWORD1, 8, 0xffffffff
+	rlwimi	rTMP2, rWORD2, 24, 0, 7
+	rlwimi	rTMP, rWORD1, 24, 0, 7
+	rlwimi	rTMP2, rWORD2, 24, 16, 23
+	rlwimi	rTMP, rWORD1, 24, 16, 23
+	xor.	rBITDIF, rTMP, rTMP2
+	sub	rRTN, rTMP, rTMP2
+	bgelr+
+	ori	rRTN, rTMP2, 1
+	blr
+
+#else
+L(endstring):
 	and	rTMP, r7F7F, rWORD1
 	beq	cr1, L(equal)
 	add	rTMP, rTMP, r7F7F
@@ -84,7 +120,7 @@ L(different):
 L(highbit):
 	ori	rRTN, rWORD2, 1
 	blr
-
+#endif
 
 /* Oh well.  In this case, we just do a byte-by-byte comparison.  */
 	.align 4