about summary refs log tree commit diff
path: root/sysdeps/s390/s390-64
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps/s390/s390-64')
-rw-r--r--sysdeps/s390/s390-64/memcmp.S4
-rw-r--r--sysdeps/s390/s390-64/memcpy.S10
-rw-r--r--sysdeps/s390/s390-64/memset.S4
-rw-r--r--sysdeps/s390/s390-64/multiarch/ifunc-resolve.c18
-rw-r--r--sysdeps/s390/s390-64/multiarch/memcmp.S12
-rw-r--r--sysdeps/s390/s390-64/multiarch/memcpy.S14
-rw-r--r--sysdeps/s390/s390-64/multiarch/memset.S18
7 files changed, 40 insertions, 40 deletions
diff --git a/sysdeps/s390/s390-64/memcmp.S b/sysdeps/s390/s390-64/memcmp.S
index 327d7fab38..6767438f28 100644
--- a/sysdeps/s390/s390-64/memcmp.S
+++ b/sysdeps/s390/s390-64/memcmp.S
@@ -27,7 +27,7 @@
 
        .text
 #ifdef USE_MULTIARCH
-ENTRY(memcmp_z900)
+ENTRY(__memcmp_z900)
 #else
 ENTRY(memcmp)
 #endif
@@ -56,7 +56,7 @@ ENTRY(memcmp)
 .L_Z900_15:
         clc     0(1,%r3),0(%r2)
 #ifdef USE_MULTIARCH
-END(memcmp_z900)
+END(__memcmp_z900)
 #else
 END(memcmp)
 libc_hidden_builtin_def (memcmp)
diff --git a/sysdeps/s390/s390-64/memcpy.S b/sysdeps/s390/s390-64/memcpy.S
index da387afe6c..3f122dcf0f 100644
--- a/sysdeps/s390/s390-64/memcpy.S
+++ b/sysdeps/s390/s390-64/memcpy.S
@@ -29,7 +29,7 @@
        .text
 
 #ifdef USE_MULTIARCH
-ENTRY(memcpy_z900)
+ENTRY(__memcpy_z900)
 #else
 ENTRY(memcpy)
 #endif
@@ -48,7 +48,7 @@ ENTRY(memcpy)
         br      %r14
 .L_Z900_13:
         chi	%r5,4096             # Switch to mvcle for copies >1MB
-        jh      memcpy_mvcle
+        jh      __memcpy_mvcle
 .L_Z900_12:
         mvc     0(256,%r1),0(%r3)
         la      %r1,256(%r1)
@@ -59,13 +59,13 @@ ENTRY(memcpy)
         mvc     0(1,%r1),0(%r3)
 
 #ifdef USE_MULTIARCH
-END(memcpy_z900)
+END(__memcpy_z900)
 #else
 END(memcpy)
 libc_hidden_builtin_def (memcpy)
 #endif
 
-ENTRY(memcpy_mvcle)
+ENTRY(__memcpy_mvcle)
        # Using as standalone function will result in unexpected
        # results since the length field is incremented by 1 in order to
        # compensate the changes already done in the functions above.
@@ -78,4 +78,4 @@ ENTRY(memcpy_mvcle)
        jo      .L_MVCLE_1
        lgr     %r2,%r1             # return destination address
        br      %r14
-END(memcpy_mvcle)
+END(__memcpy_mvcle)
diff --git a/sysdeps/s390/s390-64/memset.S b/sysdeps/s390/s390-64/memset.S
index 78e74a05a4..1e307d7ec8 100644
--- a/sysdeps/s390/s390-64/memset.S
+++ b/sysdeps/s390/s390-64/memset.S
@@ -29,7 +29,7 @@
        .text
 
 #ifdef USE_MULTIARCH
-ENTRY(memset_z900)
+ENTRY(__memset_z900)
 #else
 ENTRY(memset)
 #endif
@@ -57,7 +57,7 @@ ENTRY(memset)
 .L_Z900_18:
         mvc     1(1,%r1),0(%r1)
 #ifdef USE_MULTIARCH
-END(memset_z900)
+END(__memset_z900)
 #else
 END(memset)
 libc_hidden_builtin_def (memset)
diff --git a/sysdeps/s390/s390-64/multiarch/ifunc-resolve.c b/sysdeps/s390/s390-64/multiarch/ifunc-resolve.c
index fce8ef6401..b303304f31 100644
--- a/sysdeps/s390/s390-64/multiarch/ifunc-resolve.c
+++ b/sysdeps/s390/s390-64/multiarch/ifunc-resolve.c
@@ -28,17 +28,17 @@
 #define IFUNC_RESOLVE(FUNC)						\
   asm (".globl " #FUNC "\n\t"						\
        ".type  " #FUNC ",@gnu_indirect_function\n\t"			\
-       ".set   " #FUNC ",resolve_" #FUNC "\n\t"				\
+       ".set   " #FUNC ",__resolve_" #FUNC "\n\t"			\
        ".globl __GI_" #FUNC "\n\t"					\
        ".set   __GI_" #FUNC "," #FUNC "\n");				\
 									\
   /* Make the declarations of the optimized functions hidden in order
      to prevent GOT slots being generated for them. */			\
-  extern void *FUNC##_z196 attribute_hidden;				\
-  extern void *FUNC##_z10 attribute_hidden;				\
-  extern void *FUNC##_z900 attribute_hidden;				\
+  extern void *__##FUNC##_z196 attribute_hidden;			\
+  extern void *__##FUNC##_z10 attribute_hidden;				\
+  extern void *__##FUNC##_z900 attribute_hidden;			\
 									\
-  void *resolve_##FUNC (unsigned long int dl_hwcap)			\
+  void *__resolve_##FUNC (unsigned long int dl_hwcap)			\
   {									\
     if (dl_hwcap & HWCAP_S390_STFLE)					\
       {									\
@@ -54,14 +54,14 @@
 		     : : "cc");						\
 									\
 	if ((stfle_bits & (1UL << (63 - STFLE_BITS_Z196))) != 0)	\
-	  return &FUNC##_z196;						\
+	  return &__##FUNC##_z196;					\
 	else if ((stfle_bits & (1UL << (63 - STFLE_BITS_Z10))) != 0)	\
-	  return &FUNC##_z10;						\
+	  return &__##FUNC##_z10;					\
 	else								\
-	  return &FUNC##_z900;						\
+	  return &__##FUNC##_z900;					\
       }									\
     else								\
-      return &FUNC##_z900;						\
+      return &__##FUNC##_z900;						\
   }
 
 IFUNC_RESOLVE(memset)
diff --git a/sysdeps/s390/s390-64/multiarch/memcmp.S b/sysdeps/s390/s390-64/multiarch/memcmp.S
index eed49c71f5..049847d9cf 100644
--- a/sysdeps/s390/s390-64/multiarch/memcmp.S
+++ b/sysdeps/s390/s390-64/multiarch/memcmp.S
@@ -29,7 +29,7 @@
 
 #if IS_IN (libc)
 
-ENTRY(memcmp_z196)
+ENTRY(__memcmp_z196)
 	.machine "z196"
         ltgr    %r4,%r4
         je      .L_Z196_4
@@ -60,9 +60,9 @@ ENTRY(memcmp_z196)
         br      %r14
 .L_Z196_14:
         clc     0(1,%r3),0(%r2)
-END(memcmp_z196)
+END(__memcmp_z196)
 
-ENTRY(memcmp_z10)
+ENTRY(__memcmp_z10)
 	.machine "z10"
         ltgr    %r4,%r4
         je      .L_Z10_4
@@ -87,7 +87,7 @@ ENTRY(memcmp_z10)
         j       .L_Z10_3
 .L_Z10_15:
         clc     0(1,%r3),0(%r2)
-END(memcmp_z10)
+END(__memcmp_z10)
 
 #endif
 
@@ -95,7 +95,7 @@ END(memcmp_z10)
 
 #if !IS_IN (libc)
 .globl   memcmp
-.set     memcmp,memcmp_z900
+.set     memcmp,__memcmp_z900
 .weak    bcmp
-.set	 bcmp,memcmp_z900
+.set	 bcmp,__memcmp_z900
 #endif
diff --git a/sysdeps/s390/s390-64/multiarch/memcpy.S b/sysdeps/s390/s390-64/multiarch/memcpy.S
index 575ff4307e..fc670c7ac4 100644
--- a/sysdeps/s390/s390-64/multiarch/memcpy.S
+++ b/sysdeps/s390/s390-64/multiarch/memcpy.S
@@ -29,7 +29,7 @@
 
 #if defined SHARED && IS_IN (libc)
 
-ENTRY(memcpy_z196)
+ENTRY(__memcpy_z196)
         .machine "z196"
         ltgr    %r4,%r4
         je      .L_Z196_4
@@ -44,7 +44,7 @@ ENTRY(memcpy_z196)
         br      %r14
 .L_Z196_5:
         cgfi    %r5,262144      # Switch to mvcle for copies >64MB
-        jh      memcpy_mvcle
+        jh      __memcpy_mvcle
 .L_Z196_2:
         pfd     1,768(%r3)
         pfd     2,768(%r1)
@@ -56,9 +56,9 @@ ENTRY(memcpy_z196)
         j       .L_Z196_3
 .L_Z196_14:
         mvc     0(1,%r1),0(%r3)
-END(memcpy_z196)
+END(__memcpy_z196)
 
-ENTRY(memcpy_z10)
+ENTRY(__memcpy_z10)
 	.machine "z10"
         cgije   %r4,0,.L_Z10_4
         aghi    %r4,-1
@@ -71,7 +71,7 @@ ENTRY(memcpy_z10)
         br      %r14
 .L_Z10_13:
         cgfi    %r5,65535	# Switch to mvcle for copies >16MB
-        jh      memcpy_mvcle
+        jh      __memcpy_mvcle
 .L_Z10_12:
         pfd     1,768(%r3)
         pfd     2,768(%r1)
@@ -82,7 +82,7 @@ ENTRY(memcpy_z10)
         j       .L_Z10_3
 .L_Z10_15:
         mvc     0(1,%r1),0(%r3)
-END(memcpy_z10)
+END(__memcpy_z10)
 
 #endif
 
@@ -90,5 +90,5 @@ END(memcpy_z10)
 
 #if !defined SHARED || !IS_IN (libc)
 .globl   memcpy
-.set     memcpy,memcpy_z900
+.set     memcpy,__memcpy_z900
 #endif
diff --git a/sysdeps/s390/s390-64/multiarch/memset.S b/sysdeps/s390/s390-64/multiarch/memset.S
index 74345c01e8..3ac110a7e0 100644
--- a/sysdeps/s390/s390-64/multiarch/memset.S
+++ b/sysdeps/s390/s390-64/multiarch/memset.S
@@ -29,7 +29,7 @@
 
 #if IS_IN (libc)
 
-ENTRY(memset_z196)
+ENTRY(__memset_z196)
 	.machine "z196"
         ltgr    %r4,%r4
         je      .L_Z196_4
@@ -47,7 +47,7 @@ ENTRY(memset_z196)
         br      %r14
 .L_Z196_1:
 	cgfi	%r5,1048576
-	jh	memset_mvcle	   # Switch to mvcle for >256MB
+	jh	__memset_mvcle	   # Switch to mvcle for >256MB
 .L_Z196_2:
         pfd     2,1024(%r1)
         mvc     1(256,%r1),0(%r1)
@@ -57,9 +57,9 @@ ENTRY(memset_z196)
         j       .L_Z196_3
 .L_Z196_17:
         mvc     1(1,%r1),0(%r1)
-END(memset_z196)
+END(__memset_z196)
 
-ENTRY(memset_z10)
+ENTRY(__memset_z10)
 	.machine "z10"
         cgije   %r4,0,.L_Z10_4
         stc     %r3,0(%r2)
@@ -74,7 +74,7 @@ ENTRY(memset_z10)
         br      %r14
 .L_Z10_15:
 	cgfi	%r5,163840          # Switch to mvcle for >40MB
-	jh	memset_mvcle
+	jh	__memset_mvcle
 .L_Z10_14:
         pfd     2,1024(%r1)
         mvc     1(256,%r1),0(%r1)
@@ -83,9 +83,9 @@ ENTRY(memset_z10)
         j       .L_Z10_3
 .L_Z10_18:
         mvc     1(1,%r1),0(%r1)
-END(memset_z10)
+END(__memset_z10)
 
-ENTRY(memset_mvcle)
+ENTRY(__memset_mvcle)
 	aghi	%r4,2               # take back the change done by the caller
 	lgr	%r0,%r2		    # save source address
 	lgr	%r1,%r3		    # move pad byte to R1
@@ -97,7 +97,7 @@ ENTRY(memset_mvcle)
 	lgr	%r2,%r0		    # return value is source address
 .L1:
 	br	%r14
-END(memset_mvcle)
+END(__memset_mvcle)
 
 #endif
 
@@ -105,5 +105,5 @@ END(memset_mvcle)
 
 #if !IS_IN (libc)
 .globl   memset
-.set     memset,memset_z900
+.set     memset,__memset_z900
 #endif