diff options
Diffstat (limited to 'sysdeps')
-rw-r--r-- | sysdeps/i386/i486/bits/string.h | 3 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S | 6 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc32/fpu/setjmp-common.S | 10 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc64/__longjmp-common.S | 6 | ||||
-rw-r--r-- | sysdeps/powerpc/powerpc64/setjmp-common.S | 10 | ||||
-rw-r--r-- | sysdeps/unix/sysv/linux/powerpc/powerpc32/clone.S | 6 | ||||
-rw-r--r-- | sysdeps/unix/sysv/linux/powerpc/powerpc64/clone.S | 25 |
7 files changed, 30 insertions, 36 deletions
diff --git a/sysdeps/i386/i486/bits/string.h b/sysdeps/i386/i486/bits/string.h index 099d720a4c..203907b146 100644 --- a/sysdeps/i386/i486/bits/string.h +++ b/sysdeps/i386/i486/bits/string.h @@ -1050,6 +1050,7 @@ __strncat_g (char *__dest, __const char __src[], size_t __n) #ifdef __i686__ __asm__ __volatile__ ("repne; scasb\n" + "movl %4, %3\n\t" "decl %1\n\t" "1:\n\t" "decl %3\n\t" @@ -1062,7 +1063,7 @@ __strncat_g (char *__dest, __const char __src[], size_t __n) "2:\n\t" "movb $0,(%1)" : "=&a" (__dummy), "=&D" (__tmp), "=&S" (__src), "=&c" (__n) - : "0" (0), "1" (__tmp), "2" (__src), "3" (__n) + : "g" (__n), "0" (0), "1" (__tmp), "2" (__src), "3" (0xffffffff) : "memory", "cc"); #else --__tmp; diff --git a/sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S b/sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S index b61e127a99..a2415b9542 100644 --- a/sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S +++ b/sysdeps/powerpc/powerpc32/fpu/__longjmp-common.S @@ -50,7 +50,7 @@ ENTRY (BP_SYM (__longjmp)) lwz r5,_dl_hwcap@l(r5) # endif andis. r5,r5,(PPC_FEATURE_HAS_ALTIVEC >> 16) - beq no_vmx + beq L(no_vmx) la r5,((JB_VRS)*4)(3) andi. r6,r5,0xf lwz r0,((JB_VRSAVE)*4)(3) @@ -78,7 +78,7 @@ ENTRY (BP_SYM (__longjmp)) load_misaligned_vmx_lo_loaded(v30,v31,v0,r6,r5) lvx v1,0,r5 vperm v31,v31,v1,v0 - b no_vmx + b L(no_vmx) aligned_restore_vmx: addi r6,r5,16 lvx v20,0,r5 @@ -103,7 +103,7 @@ aligned_restore_vmx: addi r6,r6,32 lvx v30,0,r5 lvx v31,0,r6 -no_vmx: +L(no_vmx): #endif lwz r1,(JB_GPR1*4)(r3) lwz r0,(JB_LR*4)(r3) diff --git a/sysdeps/powerpc/powerpc32/fpu/setjmp-common.S b/sysdeps/powerpc/powerpc32/fpu/setjmp-common.S index 796d24f25c..77ee05f487 100644 --- a/sysdeps/powerpc/powerpc32/fpu/setjmp-common.S +++ b/sysdeps/powerpc/powerpc32/fpu/setjmp-common.S @@ -92,13 +92,13 @@ ENTRY (BP_SYM (__sigsetjmp)) lwz r5,_dl_hwcap@l(r5) #endif andis. r5,r5,(PPC_FEATURE_HAS_ALTIVEC >> 16) - beq no_vmx + beq L(no_vmx) la r5,((JB_VRS)*4)(3) andi. r6,r5,0xf mfspr r0,VRSAVE stw r0,((JB_VRSAVE)*4)(3) addi r6,r5,16 - beq+ aligned_save_vmx + beq+ L(aligned_save_vmx) lvsr v0,0,r5 vspltisb v1,-1 /* set v1 to all 1's */ vspltisb v2,0 /* set v2 to all 0's */ @@ -137,9 +137,9 @@ ENTRY (BP_SYM (__sigsetjmp)) stvx v5,0,r6 vsel v4,v31,v4,v3 stvx v4,0,r5 - b no_vmx + b L(no_vmx) -aligned_save_vmx: +L(aligned_save_vmx): stvx 20,0,r5 addi r5,r5,32 stvx 21,0,r6 @@ -162,7 +162,7 @@ aligned_save_vmx: addi r6,r6,32 stvx 30,0,r5 stvx 31,0,r6 -no_vmx: +L(no_vmx): #endif b JUMPTARGET (BP_SYM (__sigjmp_save)) END (BP_SYM (__sigsetjmp)) diff --git a/sysdeps/powerpc/powerpc64/__longjmp-common.S b/sysdeps/powerpc/powerpc64/__longjmp-common.S index 9d0195dd14..30087d7db4 100644 --- a/sysdeps/powerpc/powerpc64/__longjmp-common.S +++ b/sysdeps/powerpc/powerpc64/__longjmp-common.S @@ -53,7 +53,7 @@ ENTRY (BP_SYM (__longjmp)) ld r5,0(r5) /* Load extern _dl_hwcap. */ # endif andis. r5,r5,(PPC_FEATURE_HAS_ALTIVEC >> 16) - beq no_vmx + beq L(no_vmx) la r5,((JB_VRS)*8)(3) andi. r6,r5,0xf lwz r0,((JB_VRSAVE)*8)(3) @@ -81,7 +81,7 @@ ENTRY (BP_SYM (__longjmp)) load_misaligned_vmx_lo_loaded(v30,v31,v0,r6,r5) lvx v1,0,r5 vperm v31,v31,v1,v0 - b no_vmx + b L(no_vmx) aligned_restore_vmx: addi r6,r5,16 lvx v20,0,r5 @@ -106,7 +106,7 @@ aligned_restore_vmx: addi r6,r6,32 lvx v30,0,r5 lvx v31,0,r6 -no_vmx: +L(no_vmx): #endif ld r1,(JB_GPR1*8)(r3) ld r2,(JB_GPR2*8)(r3) diff --git a/sysdeps/powerpc/powerpc64/setjmp-common.S b/sysdeps/powerpc/powerpc64/setjmp-common.S index f82d5a21f5..0de07a82d3 100644 --- a/sysdeps/powerpc/powerpc64/setjmp-common.S +++ b/sysdeps/powerpc/powerpc64/setjmp-common.S @@ -102,13 +102,13 @@ JUMPTARGET(GLUE(__sigsetjmp,_ent)): ld r5,0(r5) /* Load extern _dl_hwcap. */ # endif andis. r5,r5,(PPC_FEATURE_HAS_ALTIVEC >> 16) - beq no_vmx + beq L(no_vmx) la r5,((JB_VRS)*8)(3) andi. r6,r5,0xf mfspr r0,VRSAVE stw r0,((JB_VRSAVE)*8)(3) addi r6,r5,16 - beq+ aligned_save_vmx + beq+ L(aligned_save_vmx) lvsr v0,0,r5 vspltisb v1,-1 /* set v1 to all 1's */ vspltisb v2,0 /* set v2 to all 0's */ @@ -150,9 +150,9 @@ JUMPTARGET(GLUE(__sigsetjmp,_ent)): stvx v5,0,r6 vsel v4,v31,v4,v3 stvx v4,0,r5 - b no_vmx + b L(no_vmx) -aligned_save_vmx: +L(aligned_save_vmx): stvx 20,0,r5 addi r5,r5,32 stvx 21,0,r6 @@ -175,7 +175,7 @@ aligned_save_vmx: addi r6,r6,32 stvx 30,0,r5 stvx 31,0,r6 -no_vmx: +L(no_vmx): #endif b JUMPTARGET (BP_SYM (__sigjmp_save)) END (BP_SYM (__sigsetjmp)) diff --git a/sysdeps/unix/sysv/linux/powerpc/powerpc32/clone.S b/sysdeps/unix/sysv/linux/powerpc/powerpc32/clone.S index 891cf83631..f232284747 100644 --- a/sysdeps/unix/sysv/linux/powerpc/powerpc32/clone.S +++ b/sysdeps/unix/sysv/linux/powerpc/powerpc32/clone.S @@ -48,7 +48,7 @@ ENTRY (BP_SYM (__clone)) beq- cr0,L(badargs) /* Set up stack frame for parent. */ - stwu r1,-36(r1) + stwu r1,-32(r1) #ifdef RESET_PID stmw r28,16(r1) #else @@ -100,7 +100,7 @@ ENTRY (BP_SYM (__clone)) #endif #ifdef RESET_PID - andis. r0,r28,1 /* This is & CLONE_THREAD */ + andis. r0,r28,CLONE_THREAD>>16 bne+ r0,L(oldpid) andi. r0,r28,CLONE_VM li r3,-1 @@ -130,7 +130,7 @@ L(parent): lmw r30,16(r1) # endif #endif - addi r1,r1,36 + addi r1,r1,32 bnslr+ b JUMPTARGET(__syscall_error) diff --git a/sysdeps/unix/sysv/linux/powerpc/powerpc64/clone.S b/sysdeps/unix/sysv/linux/powerpc/powerpc64/clone.S index f6ce115077..287597c480 100644 --- a/sysdeps/unix/sysv/linux/powerpc/powerpc64/clone.S +++ b/sysdeps/unix/sysv/linux/powerpc/powerpc64/clone.S @@ -42,18 +42,17 @@ ENTRY (BP_SYM (__clone)) /* Check for child_stack == NULL || fn == NULL. */ cmpdi cr0,r4,0 - ld r0,0(r3) - cmpdi cr1,r0,0 + cmpdi cr1,r3,0 cror cr0*4+eq,cr1*4+eq,cr0*4+eq beq- cr0,L(badargs) /* Set up stack frame for parent. */ - stdu r1,-88(r1) + stdu r1,-80(r1) std r29,56(r1) std r30,64(r1) std r31,72(r1) #ifdef RESET_PID - std r28,80(r1) + std r28,48(r1) #endif /* Set up stack frame for child. */ @@ -85,18 +84,12 @@ ENTRY (BP_SYM (__clone)) crandc cr1*4+eq,cr1*4+eq,cr0*4+so bne- cr1,L(parent) /* The '-' is to minimise the race. */ - /* On at least mklinux DR3a5, clone() doesn't actually change - the stack pointer. I'm pretty sure this is a bug, because - it adds a race condition if a signal is sent to a thread - just after it is created (in the previous three instructions). */ - mr r1,r30 - #ifdef RESET_PID - rldicl. r0,r28,48,63 /* This is & CLONE_THREAD */ - bne+ r0,L(oldpid) - rldicl. r0,r28,56,63 /* This is & CLONE_VM */ + andis. r0,r28,CLONE_THREAD>>16 + bne+ cr0,L(oldpid) + andi. r0,r28,CLONE_VM li r3,-1 - bne- r0,L(nomoregetpid) + bne- cr0,L(nomoregetpid) DO_CALL(SYS_ify(getpid)) L(nomoregetpid): stw r3,TID(r13) @@ -122,12 +115,12 @@ L(oldpid): L(parent): /* Parent. Restore registers & return. */ #ifdef RESET_PID - ld r28,88(r1) + ld r28,48(r1) #endif ld r31,72(r1) ld r30,64(r1) ld r29,56(r1) - addi r1,r1,88 + addi r1,r1,80 bnslr+ b JUMPTARGET(__syscall_error) |