diff options
-rw-r--r-- | ChangeLog | 7 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc32/stpcpy.S | 18 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc32/strcpy.S | 18 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc64/stpcpy.S | 18 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc64/strcpy.S | 27 |
5 files changed, 85 insertions, 3 deletions
diff --git a/ChangeLog b/ChangeLog index 540cb94316..bbe5836409 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,5 +1,12 @@ 2013-10-04 Alan Modra <amodra@gmail.com> + * sysdeps/powerpc/powerpc64/strcpy.S: Add little-endian support: + * sysdeps/powerpc/powerpc32/strcpy.S: Likewise. + * sysdeps/powerpc/powerpc64/stpcpy.S: Likewise. + * sysdeps/powerpc/powerpc32/stpcpy.S: Likewise. + +2013-10-04 Alan Modra <amodra@gmail.com> + * sysdeps/powerpc/powerpc64/strcmp.S (rTMP2): Define as r0. (rTMP): Define as r11. (strcmp): Add little-endian support. Optimise tail. diff --git a/sysdeps/powerpc/powerpc32/stpcpy.S b/sysdeps/powerpc/powerpc32/stpcpy.S index 03c6dddc3a..7e106e0e6c 100644 --- a/sysdeps/powerpc/powerpc32/stpcpy.S +++ b/sysdeps/powerpc/powerpc32/stpcpy.S @@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD mr rALT, rWORD /* We've hit the end of the string. Do the rest byte-by-byte. */ -L(g1): rlwinm. rTMP, rALT, 8, 24, 31 +L(g1): +#ifdef __LITTLE_ENDIAN__ + rlwinm. rTMP, rALT, 0, 24, 31 + stbu rALT, 4(rDEST) + beqlr- + rlwinm. rTMP, rALT, 24, 24, 31 + stbu rTMP, 1(rDEST) + beqlr- + rlwinm. rTMP, rALT, 16, 24, 31 + stbu rTMP, 1(rDEST) + beqlr- + rlwinm rTMP, rALT, 8, 24, 31 + stbu rTMP, 1(rDEST) + blr +#else + rlwinm. rTMP, rALT, 8, 24, 31 stbu rTMP, 4(rDEST) beqlr- rlwinm. rTMP, rALT, 16, 24, 31 @@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31 beqlr- stbu rALT, 1(rDEST) blr +#endif /* Oh well. In this case, we just do a byte-by-byte copy. */ .align 4 diff --git a/sysdeps/powerpc/powerpc32/strcpy.S b/sysdeps/powerpc/powerpc32/strcpy.S index 4ae577dbb6..e938cc42a7 100644 --- a/sysdeps/powerpc/powerpc32/strcpy.S +++ b/sysdeps/powerpc/powerpc32/strcpy.S @@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD mr rALT, rWORD /* We've hit the end of the string. Do the rest byte-by-byte. */ -L(g1): rlwinm. rTMP, rALT, 8, 24, 31 +L(g1): +#ifdef __LITTLE_ENDIAN__ + rlwinm. rTMP, rALT, 0, 24, 31 + stb rALT, 4(rDEST) + beqlr- + rlwinm. rTMP, rALT, 24, 24, 31 + stb rTMP, 5(rDEST) + beqlr- + rlwinm. rTMP, rALT, 16, 24, 31 + stb rTMP, 6(rDEST) + beqlr- + rlwinm rTMP, rALT, 8, 24, 31 + stb rTMP, 7(rDEST) + blr +#else + rlwinm. rTMP, rALT, 8, 24, 31 stb rTMP, 4(rDEST) beqlr- rlwinm. rTMP, rALT, 16, 24, 31 @@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31 beqlr- stb rALT, 7(rDEST) blr +#endif /* Oh well. In this case, we just do a byte-by-byte copy. */ .align 4 diff --git a/sysdeps/powerpc/powerpc64/stpcpy.S b/sysdeps/powerpc/powerpc64/stpcpy.S index 070cd4662f..c0b39729e2 100644 --- a/sysdeps/powerpc/powerpc64/stpcpy.S +++ b/sysdeps/powerpc/powerpc64/stpcpy.S @@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD mr rALT, rWORD /* We've hit the end of the string. Do the rest byte-by-byte. */ -L(g1): rlwinm. rTMP, rALT, 8, 24, 31 +L(g1): +#ifdef __LITTLE_ENDIAN__ + rlwinm. rTMP, rALT, 0, 24, 31 + stbu rALT, 4(rDEST) + beqlr- + rlwinm. rTMP, rALT, 24, 24, 31 + stbu rTMP, 1(rDEST) + beqlr- + rlwinm. rTMP, rALT, 16, 24, 31 + stbu rTMP, 1(rDEST) + beqlr- + rlwinm rTMP, rALT, 8, 24, 31 + stbu rTMP, 1(rDEST) + blr +#else + rlwinm. rTMP, rALT, 8, 24, 31 stbu rTMP, 4(rDEST) beqlr- rlwinm. rTMP, rALT, 16, 24, 31 @@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31 beqlr- stbu rALT, 1(rDEST) blr +#endif /* Oh well. In this case, we just do a byte-by-byte copy. */ .align 4 diff --git a/sysdeps/powerpc/powerpc64/strcpy.S b/sysdeps/powerpc/powerpc64/strcpy.S index 4c6fd3f9d7..a7fd85bad4 100644 --- a/sysdeps/powerpc/powerpc64/strcpy.S +++ b/sysdeps/powerpc/powerpc64/strcpy.S @@ -68,6 +68,32 @@ L(g2): add rTMP, rFEFE, rWORD mr rALT, rWORD /* We've hit the end of the string. Do the rest byte-by-byte. */ L(g1): +#ifdef __LITTLE_ENDIAN__ + extrdi. rTMP, rALT, 8, 56 + stb rALT, 8(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 48 + stb rTMP, 9(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 40 + stb rTMP, 10(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 32 + stb rTMP, 11(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 24 + stb rTMP, 12(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 16 + stb rTMP, 13(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 8 + stb rTMP, 14(rDEST) + beqlr- + extrdi rTMP, rALT, 8, 0 + stb rTMP, 15(rDEST) + blr +#else extrdi. rTMP, rALT, 8, 0 stb rTMP, 8(rDEST) beqlr- @@ -91,6 +117,7 @@ L(g1): beqlr- stb rALT, 15(rDEST) blr +#endif /* Oh well. In this case, we just do a byte-by-byte copy. */ .align 4 |