diff options
author | Alan Modra <amodra@gmail.com> | 2013-08-17 18:45:31 +0930 |
---|---|---|
committer | Alan Modra <amodra@gmail.com> | 2013-10-04 10:40:11 +0930 |
commit | 43b84013714c46e6dcae4a5564c5527777ad5e08 (patch) | |
tree | 7d7c8aa8ec9c84991e0fc2f1e24a2c711c5de3ed /sysdeps | |
parent | 8a7413f9b036da83ffde491a37d9d2340bc321a7 (diff) | |
download | glibc-43b84013714c46e6dcae4a5564c5527777ad5e08.tar.gz glibc-43b84013714c46e6dcae4a5564c5527777ad5e08.tar.xz glibc-43b84013714c46e6dcae4a5564c5527777ad5e08.zip |
PowerPC LE strcpy
http://sourceware.org/ml/libc-alpha/2013-08/msg00100.html The strcpy changes for little-endian are quite straight-forward, just a matter of rotating the last word differently. I'll note that the powerpc64 version of stpcpy is just begging to be converted to use 64-bit loads and stores.. * sysdeps/powerpc/powerpc64/strcpy.S: Add little-endian support: * sysdeps/powerpc/powerpc32/strcpy.S: Likewise. * sysdeps/powerpc/powerpc64/stpcpy.S: Likewise. * sysdeps/powerpc/powerpc32/stpcpy.S: Likewise.
Diffstat (limited to 'sysdeps')
-rw-r--r-- | sysdeps/powerpc/powerpc32/stpcpy.S | 18 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc32/strcpy.S | 18 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc64/stpcpy.S | 18 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc64/strcpy.S | 27 |
4 files changed, 78 insertions, 3 deletions
diff --git a/sysdeps/powerpc/powerpc32/stpcpy.S b/sysdeps/powerpc/powerpc32/stpcpy.S index 03c6dddc3a..7e106e0e6c 100644 --- a/sysdeps/powerpc/powerpc32/stpcpy.S +++ b/sysdeps/powerpc/powerpc32/stpcpy.S @@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD mr rALT, rWORD /* We've hit the end of the string. Do the rest byte-by-byte. */ -L(g1): rlwinm. rTMP, rALT, 8, 24, 31 +L(g1): +#ifdef __LITTLE_ENDIAN__ + rlwinm. rTMP, rALT, 0, 24, 31 + stbu rALT, 4(rDEST) + beqlr- + rlwinm. rTMP, rALT, 24, 24, 31 + stbu rTMP, 1(rDEST) + beqlr- + rlwinm. rTMP, rALT, 16, 24, 31 + stbu rTMP, 1(rDEST) + beqlr- + rlwinm rTMP, rALT, 8, 24, 31 + stbu rTMP, 1(rDEST) + blr +#else + rlwinm. rTMP, rALT, 8, 24, 31 stbu rTMP, 4(rDEST) beqlr- rlwinm. rTMP, rALT, 16, 24, 31 @@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31 beqlr- stbu rALT, 1(rDEST) blr +#endif /* Oh well. In this case, we just do a byte-by-byte copy. */ .align 4 diff --git a/sysdeps/powerpc/powerpc32/strcpy.S b/sysdeps/powerpc/powerpc32/strcpy.S index 4ae577dbb6..e938cc42a7 100644 --- a/sysdeps/powerpc/powerpc32/strcpy.S +++ b/sysdeps/powerpc/powerpc32/strcpy.S @@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD mr rALT, rWORD /* We've hit the end of the string. Do the rest byte-by-byte. */ -L(g1): rlwinm. rTMP, rALT, 8, 24, 31 +L(g1): +#ifdef __LITTLE_ENDIAN__ + rlwinm. rTMP, rALT, 0, 24, 31 + stb rALT, 4(rDEST) + beqlr- + rlwinm. rTMP, rALT, 24, 24, 31 + stb rTMP, 5(rDEST) + beqlr- + rlwinm. rTMP, rALT, 16, 24, 31 + stb rTMP, 6(rDEST) + beqlr- + rlwinm rTMP, rALT, 8, 24, 31 + stb rTMP, 7(rDEST) + blr +#else + rlwinm. rTMP, rALT, 8, 24, 31 stb rTMP, 4(rDEST) beqlr- rlwinm. rTMP, rALT, 16, 24, 31 @@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31 beqlr- stb rALT, 7(rDEST) blr +#endif /* Oh well. In this case, we just do a byte-by-byte copy. */ .align 4 diff --git a/sysdeps/powerpc/powerpc64/stpcpy.S b/sysdeps/powerpc/powerpc64/stpcpy.S index 070cd4662f..c0b39729e2 100644 --- a/sysdeps/powerpc/powerpc64/stpcpy.S +++ b/sysdeps/powerpc/powerpc64/stpcpy.S @@ -62,7 +62,22 @@ L(g2): add rTMP, rFEFE, rWORD mr rALT, rWORD /* We've hit the end of the string. Do the rest byte-by-byte. */ -L(g1): rlwinm. rTMP, rALT, 8, 24, 31 +L(g1): +#ifdef __LITTLE_ENDIAN__ + rlwinm. rTMP, rALT, 0, 24, 31 + stbu rALT, 4(rDEST) + beqlr- + rlwinm. rTMP, rALT, 24, 24, 31 + stbu rTMP, 1(rDEST) + beqlr- + rlwinm. rTMP, rALT, 16, 24, 31 + stbu rTMP, 1(rDEST) + beqlr- + rlwinm rTMP, rALT, 8, 24, 31 + stbu rTMP, 1(rDEST) + blr +#else + rlwinm. rTMP, rALT, 8, 24, 31 stbu rTMP, 4(rDEST) beqlr- rlwinm. rTMP, rALT, 16, 24, 31 @@ -73,6 +88,7 @@ L(g1): rlwinm. rTMP, rALT, 8, 24, 31 beqlr- stbu rALT, 1(rDEST) blr +#endif /* Oh well. In this case, we just do a byte-by-byte copy. */ .align 4 diff --git a/sysdeps/powerpc/powerpc64/strcpy.S b/sysdeps/powerpc/powerpc64/strcpy.S index 4c6fd3f9d7..a7fd85bad4 100644 --- a/sysdeps/powerpc/powerpc64/strcpy.S +++ b/sysdeps/powerpc/powerpc64/strcpy.S @@ -68,6 +68,32 @@ L(g2): add rTMP, rFEFE, rWORD mr rALT, rWORD /* We've hit the end of the string. Do the rest byte-by-byte. */ L(g1): +#ifdef __LITTLE_ENDIAN__ + extrdi. rTMP, rALT, 8, 56 + stb rALT, 8(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 48 + stb rTMP, 9(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 40 + stb rTMP, 10(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 32 + stb rTMP, 11(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 24 + stb rTMP, 12(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 16 + stb rTMP, 13(rDEST) + beqlr- + extrdi. rTMP, rALT, 8, 8 + stb rTMP, 14(rDEST) + beqlr- + extrdi rTMP, rALT, 8, 0 + stb rTMP, 15(rDEST) + blr +#else extrdi. rTMP, rALT, 8, 0 stb rTMP, 8(rDEST) beqlr- @@ -91,6 +117,7 @@ L(g1): beqlr- stb rALT, 15(rDEST) blr +#endif /* Oh well. In this case, we just do a byte-by-byte copy. */ .align 4 |