summary refs log tree commit diff
path: root/sysdeps
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps')
-rw-r--r--sysdeps/powerpc/__longjmp.S10
-rw-r--r--sysdeps/powerpc/add_n.S13
-rw-r--r--sysdeps/powerpc/addmul_1.S11
-rw-r--r--sysdeps/powerpc/bp-asm.h115
-rw-r--r--sysdeps/powerpc/bsd-_setjmp.S9
-rw-r--r--sysdeps/powerpc/bsd-setjmp.S13
-rw-r--r--sysdeps/powerpc/elf/bzero.S20
-rw-r--r--sysdeps/powerpc/lshift.S12
-rw-r--r--sysdeps/powerpc/memset.S39
-rw-r--r--sysdeps/powerpc/mul_1.S11
-rw-r--r--sysdeps/powerpc/rshift.S11
-rw-r--r--sysdeps/powerpc/setjmp.S12
-rw-r--r--sysdeps/powerpc/stpcpy.S30
-rw-r--r--sysdeps/powerpc/strcmp.S21
-rw-r--r--sysdeps/powerpc/strcpy.S32
-rw-r--r--sysdeps/powerpc/strlen.S9
-rw-r--r--sysdeps/powerpc/sub_n.S14
-rw-r--r--sysdeps/powerpc/submul_1.S11
18 files changed, 330 insertions, 63 deletions
diff --git a/sysdeps/powerpc/__longjmp.S b/sysdeps/powerpc/__longjmp.S
index 7f32b22c1b..d9ee4aecfc 100644
--- a/sysdeps/powerpc/__longjmp.S
+++ b/sysdeps/powerpc/__longjmp.S
@@ -1,5 +1,5 @@
 /* longjmp for PowerPC.
-   Copyright (C) 1995, 1996, 1997, 1999 Free Software Foundation, Inc.
+   Copyright (C) 1995, 1996, 1997, 1999, 2000 Free Software Foundation, Inc.
    This file is part of the GNU C Library.
 
    The GNU C Library is free software; you can redistribute it and/or
@@ -21,8 +21,12 @@
 #define _ASM
 #define _SETJMP_H
 #include <bits/setjmp.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
+
+ENTRY (BP_SYM (__longjmp))
+	CHECK_BOUNDS_BOTH_WIDE_LIT (r3, r8, r9, JB_SIZE)
 
-ENTRY (__longjmp)
 	lwz r1,(JB_GPR1*4)(r3)
 	lwz r2,(JB_GPR2*4)(r3)
 	lwz r0,(JB_LR*4)(r3)
@@ -67,4 +71,4 @@ ENTRY (__longjmp)
 	lfd fp31,((JB_FPRS+17*2)*4)(r3)
 	mr r3,r4
 	blr
-END (__longjmp)
+END (BP_SYM (__longjmp))
diff --git a/sysdeps/powerpc/add_n.S b/sysdeps/powerpc/add_n.S
index 7b683b7be4..468d422621 100644
--- a/sysdeps/powerpc/add_n.S
+++ b/sysdeps/powerpc/add_n.S
@@ -18,6 +18,8 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* mp_limb_t mpn_add_n (mp_ptr res_ptr, mp_srcptr s1_ptr, mp_srcptr s2_ptr,
                         mp_size_t size)
@@ -27,7 +29,14 @@
    possible 2-unrolled inner loop will not be.  Also, watch out for the
    alignment...  */
 
-EALIGN(__mpn_add_n,3,0)
+EALIGN (BP_SYM (__mpn_add_n), 3, 0)
+
+#if __BOUNDED_POINTERS__
+	slwi r10,r6,2		/* convert limbs to bytes */
+	CHECK_BOUNDS_BOTH_WIDE (r3, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r4, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r5, r8, r9, r10)
+#endif
 /* Set up for loop below.  */
 	mtcrf 0x01,r6
 	srwi. r7,r6,1
@@ -65,4 +74,4 @@ L(0):	lwz  r9,4(r4)
 /* Return the carry.  */
 L(1):	addze r3,r10
 	blr
-END(__mpn_add_n)
+END (BP_SYM (__mpn_add_n))
diff --git a/sysdeps/powerpc/addmul_1.S b/sysdeps/powerpc/addmul_1.S
index 2ce4fa2979..178abf8115 100644
--- a/sysdeps/powerpc/addmul_1.S
+++ b/sysdeps/powerpc/addmul_1.S
@@ -18,11 +18,18 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* mp_limb_t mpn_addmul_1 (mp_ptr res_ptr, mp_srcptr s1_ptr,
                            mp_size_t s1_size, mp_limb_t s2_limb)
    Calculate res+s1*s2 and put result back in res; return carry.  */
-ENTRY(__mpn_addmul_1)
+ENTRY (BP_SYM (__mpn_addmul_1))
+#if __BOUNDED_POINTERS__
+	slwi r10,r5,2		/* convert limbs to bytes */
+	CHECK_BOUNDS_BOTH_WIDE (r3, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r4, r8, r9, r10)
+#endif
 	mtctr	r5
 
 	lwz	r0,0(r4)
@@ -46,4 +53,4 @@ L(0):	lwzu	r0,4(r4)
 L(1):	stw	r8,4(r3)
 	addze	r3,r10
 	blr
-END(__mpn_addmul_1)
+END (BP_SYM (__mpn_addmul_1))
diff --git a/sysdeps/powerpc/bp-asm.h b/sysdeps/powerpc/bp-asm.h
new file mode 100644
index 0000000000..e7a364007c
--- /dev/null
+++ b/sysdeps/powerpc/bp-asm.h
@@ -0,0 +1,115 @@
+/* Bounded-pointer definitions for PowerPC assembler.
+   Copyright (C) 2000 Free Software Foundation, Inc.
+   Contributed by Greg McGary <greg@mcgary.org>
+
+   This file is part of the GNU C Library.  Its master source is NOT part of
+   the C library, however.  The master source lives in the GNU MP Library.
+
+   The GNU C Library is free software; you can redistribute it and/or
+   modify it under the terms of the GNU Library General Public License as
+   published by the Free Software Foundation; either version 2 of the
+   License, or (at your option) any later version.
+
+   The GNU C Library is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   Library General Public License for more details.
+
+   You should have received a copy of the GNU Library General Public
+   License along with the GNU C Library; see the file COPYING.LIB.  If not,
+   write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+   Boston, MA 02111-1307, USA.  */
+
+#if __BOUNDED_POINTERS__
+
+/* Byte offsets of BP components.  */
+# define oVALUE	0
+# define oLOW	4
+# define oHIGH	8
+
+/* Don't check bounds, just convert the BP register to its simple
+   pointer value.  */
+
+# define UNCHECK_BOUNDS(rBP)			\
+	lwz	rBP, oVALUE(rBP)
+
+/* Check low bound, with the side effect that the BP register is converted
+   its simple pointer value.  Move the high bound into a register for
+   later use.  */
+
+# define CHECK_BOUNDS_LOW(rBP, rLOW, rHIGH)	\
+	lwz	rHIGH, oHIGH(rBP);		\
+	lwz	rLOW, oLOW(rBP);		\
+	lwz	rBP, oVALUE(rBP);		\
+	twllt	rBP, rLOW
+
+/* Check the high bound, which is in a register, using the given
+   conditional trap instruction.  */
+
+# define CHECK_BOUNDS_HIGH(rVALUE, rHIGH, TWLcc) \
+	TWLcc	rVALUE, rHIGH
+
+/* Check the high bound, which is stored at the return-value's high
+   bound slot, using the given conditional trap instruction.  */
+
+# define CHECK_BOUNDS_HIGH_RTN(rVALUE, rHIGH, TWLcc)	\
+	lwz	rHIGH, oHIGH(rRTN);			\
+	TWLcc	rVALUE, rHIGH
+
+/* Check both bounds, with the side effect that the BP register is
+   converted to its simple pointer value.  */
+
+# define CHECK_BOUNDS_BOTH(rBP, rLOW, rHIGH)	\
+	CHECK_BOUNDS_LOW(rBP, rLOW, rHIGH);	\
+	twlge	rBP, rHIGH
+
+/* Check bounds on a memory region of given length, with the side
+   effect that the BP register is converted to its simple pointer
+   value.  */
+
+# define CHECK_BOUNDS_BOTH_WIDE(rBP, rLOW, rHIGH, rLENGTH)	\
+	CHECK_BOUNDS_LOW (rBP, rLOW, rHIGH);			\
+	sub	rHIGH, rHIGH, rLENGTH;				\
+	twlgt	rBP, rHIGH
+
+# define CHECK_BOUNDS_BOTH_WIDE_LIT(rBP, rLOW, rHIGH, LENGTH)	\
+	CHECK_BOUNDS_LOW (rBP, rLOW, rHIGH);			\
+	subi	rHIGH, rHIGH, LENGTH;				\
+	twlgt	rBP, rHIGH
+
+/* Store a pointer value register into the return-value's pointer
+   value slot.  */
+
+# define STORE_RETURN_VALUE(rVALUE)		\
+	stw	rVALUE, oVALUE(rRTN)
+
+/* Store a low and high bounds into the return-value's pointer bounds
+   slots.  */
+
+# define STORE_RETURN_BOUNDS(rLOW, rHIGH)	\
+	stw	rLOW, oLOW(rRTN);		\
+	stw	rHIGH, oHIGH(rRTN)
+
+/* Stuff zero value/low/high into the BP addressed by rRTN.  */
+
+# define RETURN_NULL_BOUNDED_POINTER		\
+	li	r4, 0;				\
+	STORE_RETURN_VALUE (r4);		\
+	STORE_RETURN_BOUNDS (r4, r4)
+
+#else
+
+# define UNCHECK_BOUNDS(rBP)
+# define CHECK_BOUNDS_LOW(rBP, rLOW, rHIGH)
+# define CHECK_BOUNDS_HIGH(rVALUE, rHIGH, TWLcc)
+# define CHECK_BOUNDS_HIGH_RTN(rVALUE, rHIGH, TWLcc)
+# define CHECK_BOUNDS_BOTH(rBP, rLOW, rHIGH)
+# define CHECK_BOUNDS_BOTH_WIDE(rBP, rLOW, rHIGH, rLENGTH)
+# define CHECK_BOUNDS_BOTH_WIDE_LIT(rBP, rLOW, rHIGH, LENGTH)
+# define STORE_RETURN_VALUE(rVALUE)
+# define STORE_RETURN_BOUNDS(rLOW, rHIGH)
+
+# define RETURN_NULL_BOUNDED_POINTER li rRTN, 0
+
+#endif
+
diff --git a/sysdeps/powerpc/bsd-_setjmp.S b/sysdeps/powerpc/bsd-_setjmp.S
index bb9b6cffce..2cf93ccb73 100644
--- a/sysdeps/powerpc/bsd-_setjmp.S
+++ b/sysdeps/powerpc/bsd-_setjmp.S
@@ -1,5 +1,5 @@
 /* BSD `_setjmp' entry point to `sigsetjmp (..., 0)'.  PowerPC version.
-   Copyright (C) 1994, 1997, 1999 Free Software Foundation, Inc.
+   Copyright (C) 1994, 1997, 1999, 2000 Free Software Foundation, Inc.
    This file is part of the GNU C Library.
 
    The GNU C Library is free software; you can redistribute it and/or
@@ -22,8 +22,9 @@
    in setjmp doesn't clobber the state restored by longjmp.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
 
-ENTRY (_setjmp)
+ENTRY (BP_SYM (_setjmp))
 	li r4,0			/* Set second argument to 0.  */
-	b JUMPTARGET(__sigsetjmp)
-END (_setjmp)
+	b JUMPTARGET(BP_SYM (__sigsetjmp))
+END (BP_SYM (_setjmp))
diff --git a/sysdeps/powerpc/bsd-setjmp.S b/sysdeps/powerpc/bsd-setjmp.S
index 29e6c7a043..c900a7b8d1 100644
--- a/sysdeps/powerpc/bsd-setjmp.S
+++ b/sysdeps/powerpc/bsd-setjmp.S
@@ -1,5 +1,5 @@
 /* BSD `setjmp' entry point to `sigsetjmp (..., 1)'.  PowerPC version.
-   Copyright (C) 1994, 1997, 1999 Free Software Foundation, Inc.
+   Copyright (C) 1994, 1997, 1999, 2000 Free Software Foundation, Inc.
    This file is part of the GNU C Library.
 
    The GNU C Library is free software; you can redistribute it and/or
@@ -22,11 +22,12 @@
    in setjmp doesn't clobber the state restored by longjmp.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
 
-ENTRY (__setjmp)
+ENTRY (BP_SYM (__setjmp))
 	li r4,1			/* Set second argument to 1.  */
-	b JUMPTARGET(__sigsetjmp)
-END (__setjmp)
+	b JUMPTARGET(BP_SYM (__sigsetjmp))
+END (BP_SYM (__setjmp))
 
-	.globl setjmp
-	.set setjmp,__setjmp
+	.globl BP_SYM (setjmp)
+	.set BP_SYM (setjmp), BP_SYM (__setjmp)
diff --git a/sysdeps/powerpc/elf/bzero.S b/sysdeps/powerpc/elf/bzero.S
index f914123563..511c89b596 100644
--- a/sysdeps/powerpc/elf/bzero.S
+++ b/sysdeps/powerpc/elf/bzero.S
@@ -1,5 +1,5 @@
 /* Optimized bzero `implementation' for PowerPC.
-   Copyright (C) 1997, 1999 Free Software Foundation, Inc.
+   Copyright (C) 1997, 1999, 2000 Free Software Foundation, Inc.
    This file is part of the GNU C Library.
 
    The GNU C Library is free software; you can redistribute it and/or
@@ -18,10 +18,20 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
 
-ENTRY(__bzero)
+ENTRY (BP_SYM (__bzero))
+
+#if __BOUNDED_POINTERS__
+	mr	r6,r4
+	li	r5,0
+	mr	r4,r3
+	/* Tell memset that we don't want a return value.  */
+	li	r3,0
+#else
 	mr	r5,r4
 	li	r4,0
-	b	memset@local
-END(__bzero)
-weak_alias (__bzero, bzero)
+#endif
+	b	BP_SYM (memset)@local
+END (BP_SYM (__bzero))
+weak_alias (BP_SYM (__bzero), BP_SYM (bzero))
diff --git a/sysdeps/powerpc/lshift.S b/sysdeps/powerpc/lshift.S
index 832ee08fe9..02b36f7dda 100644
--- a/sysdeps/powerpc/lshift.S
+++ b/sysdeps/powerpc/lshift.S
@@ -18,11 +18,19 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* mp_limb_t mpn_lshift (mp_ptr wp, mp_srcptr up, mp_size_t usize,
   			 unsigned int cnt)  */
 
-EALIGN(__mpn_lshift,3,0)
+EALIGN (BP_SYM (__mpn_lshift), 3, 0)
+
+#if __BOUNDED_POINTERS__
+	slwi r10,r5,2		/* convert limbs to bytes */
+	CHECK_BOUNDS_BOTH_WIDE (r3, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r4, r8, r9, r10)
+#endif
 	mtctr	r5		# copy size into CTR
 	cmplwi	cr0,r5,16	# is size < 16
 	slwi	r0,r5,2
@@ -120,4 +128,4 @@ L(n):	lwzu	r10,-4(r4);						\
 	DO_LSHIFT(30)
 	DO_LSHIFT(31)
 
-END(__mpn_lshift)
+END (BP_SYM (__mpn_lshift))
diff --git a/sysdeps/powerpc/memset.S b/sysdeps/powerpc/memset.S
index c48c0af7c8..2a09c24454 100644
--- a/sysdeps/powerpc/memset.S
+++ b/sysdeps/powerpc/memset.S
@@ -18,6 +18,8 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* __ptr_t [r3] memset (__ptr_t s [r3], int c [r4], size_t n [r5]));
    Returns 's'.
@@ -26,13 +28,21 @@
    cache line (256 bits). There is a special case for setting cache lines
    to 0, to take advantage of the dcbz instruction.  */
 
-EALIGN (memset, 5, 1)
+EALIGN (BP_SYM (memset), 5, 1)
 
 #define rTMP	r0
-#define	rRTN	r3	/* initial value of 1st argument */
-#define rCHR	r4	/* char to set in each byte */
-#define rLEN	r5	/* length of region to set */
-#define rMEMP	r6	/* address at which we are storing */
+#define rRTN	r3	/* initial value of 1st argument */
+#if __BOUNDED_POINTERS__
+# define rMEMP0	r4	/* original value of 1st arg */
+# define rCHR	r5	/* char to set in each byte */
+# define rLEN	r6	/* length of region to set */
+# define rMEMP	r10	/* address at which we are storing */
+#else
+# define rMEMP0	r3	/* original value of 1st arg */
+# define rCHR	r4	/* char to set in each byte */
+# define rLEN	r5	/* length of region to set */
+# define rMEMP	r6	/* address at which we are storing */
+#endif
 #define rALIGN	r7	/* number of bytes we are setting now (when aligning) */
 #define rMEMP2	r8
 
@@ -40,21 +50,30 @@ EALIGN (memset, 5, 1)
 #define rNEG64	r8	/* constant -64 for clearing with dcbz */
 #define rNEG32	r9	/* constant -32 for clearing with dcbz */
 
+#if __BOUNDED_POINTERS__
+	cmplwi	cr1, rRTN, 0
+	CHECK_BOUNDS_BOTH_WIDE (rMEMP0, rTMP, rTMP2, rLEN)
+	beq	cr1, L(b0)
+	STORE_RETURN_VALUE (rMEMP0)
+	STORE_RETURN_BOUNDS (rTMP, rTMP2)
+L(b0):
+#endif
+
 /* take care of case for size <= 4  */
 	cmplwi	cr1, rLEN, 4
-	andi.	rALIGN, rRTN, 3
-	mr	rMEMP, rRTN
+	andi.	rALIGN, rMEMP0, 3
+	mr	rMEMP, rMEMP0
 	ble-	cr1, L(small)
 /* align to word boundary  */
 	cmplwi	cr5, rLEN, 31
 	rlwimi	rCHR, rCHR, 8, 16, 23
 	beq+	L(aligned)	/* 8th instruction from .align */
-	mtcrf	0x01, rRTN
+	mtcrf	0x01, rMEMP0
 	subfic	rALIGN, rALIGN, 4
 	add	rMEMP, rMEMP, rALIGN
 	sub	rLEN, rLEN, rALIGN
 	bf+	31, L(g0)
-	stb	rCHR, 0(rRTN)
+	stb	rCHR, 0(rMEMP0)
 	bt	30, L(aligned)
 L(g0):	sth	rCHR, -2(rMEMP)	/* 16th instruction from .align */
 /* take care of case for size < 31 */
@@ -207,4 +226,4 @@ L(medium_28t):
 	stw	rCHR, -4(rMEMP)
 	stw	rCHR, -8(rMEMP)
 	blr
-END(memset)
+END (BP_SYM (memset))
diff --git a/sysdeps/powerpc/mul_1.S b/sysdeps/powerpc/mul_1.S
index 52565a6547..37e7ab7f62 100644
--- a/sysdeps/powerpc/mul_1.S
+++ b/sysdeps/powerpc/mul_1.S
@@ -18,12 +18,19 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* mp_limb_t mpn_mul_1 (mp_ptr res_ptr, mp_srcptr s1_ptr,
                         mp_size_t s1_size, mp_limb_t s2_limb)
    Calculate s1*s2 and put result in res_ptr; return carry.  */
 
-ENTRY(__mpn_mul_1)
+ENTRY (BP_SYM (__mpn_mul_1))
+#if __BOUNDED_POINTERS__
+	slwi r10,r5,2		/* convert limbs to bytes */
+	CHECK_BOUNDS_BOTH_WIDE (r3, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r4, r8, r9, r10)
+#endif
 	mtctr	r5
 
 	lwz	r0,0(r4)
@@ -43,4 +50,4 @@ L(0):	lwzu	r0,4(r4)
 L(1):	stw	r7,4(r3)
 	addze	r3,r10
 	blr
-END(__mpn_mul_1)
+END (BP_SYM (__mpn_mul_1))
diff --git a/sysdeps/powerpc/rshift.S b/sysdeps/powerpc/rshift.S
index a02fb2555c..944227d244 100644
--- a/sysdeps/powerpc/rshift.S
+++ b/sysdeps/powerpc/rshift.S
@@ -18,6 +18,8 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* INPUT PARAMETERS
    res_ptr	r3
@@ -25,7 +27,12 @@
    size		r5
    cnt		r6  */
 
-ENTRY(__mpn_rshift)
+ENTRY (BP_SYM (__mpn_rshift))
+#if __BOUNDED_POINTERS__
+	slwi r10,r5,2		/* convert limbs to bytes */
+	CHECK_BOUNDS_BOTH_WIDE (r3, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r4, r8, r9, r10)
+#endif
 	mtctr	r5		# copy size into CTR
 	addi	r7,r3,-4	# move adjusted res_ptr to free return reg
 	subfic	r8,r6,32
@@ -53,4 +60,4 @@ L(1):	srw	r0,r11,r6
 L(2):	srw	r0,r10,r6
 	stw	r0,4(r7)
 	blr
-END(__mpn_rshift)
+END (BP_SYM (__mpn_rshift))
diff --git a/sysdeps/powerpc/setjmp.S b/sysdeps/powerpc/setjmp.S
index cb95a58f2c..6c49e93d4f 100644
--- a/sysdeps/powerpc/setjmp.S
+++ b/sysdeps/powerpc/setjmp.S
@@ -1,5 +1,5 @@
 /* setjmp for PowerPC.
-   Copyright (C) 1995, 1996, 1997, 1999 Free Software Foundation, Inc.
+   Copyright (C) 1995, 1996, 1997, 1999, 2000 Free Software Foundation, Inc.
    This file is part of the GNU C Library.
 
    The GNU C Library is free software; you can redistribute it and/or
@@ -21,8 +21,12 @@
 #define _ASM
 #define _SETJMP_H
 #include <bits/setjmp.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
+
+ENTRY (BP_SYM (__sigsetjmp))
+	CHECK_BOUNDS_BOTH_WIDE_LIT (r3, r8, r9, JB_SIZE)
 
-ENTRY (__sigsetjmp)
 	stw  r1,(JB_GPR1*4)(3)
 	mflr r0
 	stw  r2,(JB_GPR2*4)(3)
@@ -65,5 +69,5 @@ ENTRY (__sigsetjmp)
 	stfd fp30,((JB_FPRS+16*2)*4)(3)
 	stw  r31,((JB_GPRS+17)*4)(3)
 	stfd fp31,((JB_FPRS+17*2)*4)(3)
-	b JUMPTARGET(__sigjmp_save)
-END (__sigsetjmp)
+	b JUMPTARGET (BP_SYM (__sigjmp_save))
+END (BP_SYM (__sigsetjmp))
diff --git a/sysdeps/powerpc/stpcpy.S b/sysdeps/powerpc/stpcpy.S
index 2dcae0738d..9a4ab0b432 100644
--- a/sysdeps/powerpc/stpcpy.S
+++ b/sysdeps/powerpc/stpcpy.S
@@ -18,22 +18,36 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* See strlen.s for comments on how the end-of-string testing works.  */
 
 /* char * [r3] stpcpy (char *dest [r3], const char *src [r4])  */
 
-EALIGN (__stpcpy, 4, 0)
+EALIGN (BP_SYM (__stpcpy), 4, 0)
 
 #define rTMP	r0
-#define rDEST	r3		/* pointer to previous word in dest */
-#define rSRC	r4		/* pointer to previous word in src */
+#define rRTN	r3
+#if __BOUNDED_POINTERS__
+# define rDEST	r4		/* pointer to previous word in dest */
+# define rSRC	r5		/* pointer to previous word in src */
+# define rLOW	r11
+# define rHIGH	r12
+#else
+# define rDEST	r3		/* pointer to previous word in dest */
+# define rSRC	r4		/* pointer to previous word in src */
+#endif
 #define rWORD	r6		/* current word from src */
 #define rFEFE	r7		/* 0xfefefeff */
 #define r7F7F	r8		/* 0x7f7f7f7f */
 #define rNEG	r9		/* ~(word in src | 0x7f7f7f7f) */
 #define rALT	r10		/* alternate word from src */
 
+	CHECK_BOUNDS_LOW (rSRC, rLOW, rHIGH)
+	CHECK_BOUNDS_LOW (rDEST, rLOW, rHIGH)
+	STORE_RETURN_BOUNDS (rLOW, rHIGH)
+
 	or	rTMP, rSRC, rDEST
 	clrlwi.	rTMP, rTMP, 30
 	addi	rDEST, rDEST, -4
@@ -71,6 +85,8 @@ L(g1):	rlwinm.	rTMP, rALT, 8, 24, 31
 	stbu	rTMP, 1(rDEST)
 	beqlr-
 	stbu	rALT, 1(rDEST)
+	CHECK_BOUNDS_HIGH (rDEST, rHIGH, twlgt)
+	STORE_RETURN_VALUE (rDEST)
 	blr
 
 /* Oh well.  In this case, we just do a byte-by-byte copy.  */
@@ -92,9 +108,13 @@ L(u0):	lbzu	rALT, 1(rSRC)
 	cmpwi	rWORD, 0
 	bne+	L(u0)
 L(u2):	stbu	rWORD, 1(rDEST)
+	CHECK_BOUNDS_HIGH (rDEST, rHIGH, twlgt)
+	STORE_RETURN_VALUE (rDEST)
 	blr
 L(u1):	stbu	rALT, 1(rDEST)
+	CHECK_BOUNDS_HIGH (rDEST, rHIGH, twlgt)
+	STORE_RETURN_VALUE (rDEST)
 	blr
-END (__stpcpy)
+END (BP_SYM (__stpcpy))
 
-weak_alias (__stpcpy, stpcpy)
+weak_alias (BP_SYM (__stpcpy), BP_SYM (stpcpy))
diff --git a/sysdeps/powerpc/strcmp.S b/sysdeps/powerpc/strcmp.S
index 1accdd70cb..fb7b9f994a 100644
--- a/sysdeps/powerpc/strcmp.S
+++ b/sysdeps/powerpc/strcmp.S
@@ -18,24 +18,33 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* See strlen.s for comments on how the end-of-string testing works.  */
 
 /* int [r3] strcmp (const char *s1 [r3], const char *s2 [r4])  */
 
-EALIGN (strcmp, 4, 0)
+EALIGN (BP_SYM (strcmp), 4, 0)
 
 #define rTMP	r0
-#define rRTN	r3	/* return value */
+#define rRTN	r3
 #define rSTR1	r3	/* first string arg */
 #define rSTR2	r4	/* second string arg */
+#if __BOUNDED_POINTERS__
+# define rHIGH1	r11
+# define rHIGH2 r12
+#endif
 #define rWORD1	r5	/* current word in s1 */
 #define rWORD2	r6	/* current word in s2 */
 #define rFEFE	r7	/* constant 0xfefefeff (-0x01010101) */
-#define	r7F7F	r8	/* constant 0x7f7f7f7f */
+#define r7F7F	r8	/* constant 0x7f7f7f7f */
 #define rNEG	r9	/* ~(word in s1 | 0x7f7f7f7f) */
 #define rBITDIF	r10	/* bits that differ in s1 & s2 words */
 
+	CHECK_BOUNDS_LOW (rSTR1, rTMP, rHIGH1)
+	CHECK_BOUNDS_LOW (rSTR2, rTMP, rHIGH2)
+
 	or	rTMP, rSTR2, rSTR1
 	clrlwi.	rTMP, rTMP, 30
 	lis	rFEFE, -0x101
@@ -74,6 +83,7 @@ L(endstring):
 	bgelr+	cr1
 L(equal):
 	li	rRTN, 0
+	/* GKM FIXME: check high bounds.  */
 	blr
 
 L(different):
@@ -83,6 +93,7 @@ L(different):
 	bgelr+
 L(highbit):
 	ori	rRTN, rWORD2, 1
+	/* GKM FIXME: check high bounds.  */
 	blr
 
 
@@ -106,8 +117,10 @@ L(u1):	cmpwi	cr1, rWORD1, 0
 	cmpw	rWORD1, rWORD2
 	bne+	cr1, L(u0)
 L(u3):	sub	rRTN, rWORD1, rWORD2
+	/* GKM FIXME: check high bounds.  */
 	blr
 L(u4):	lbz	rWORD1, -1(rSTR1)
 	sub	rRTN, rWORD1, rWORD2
+	/* GKM FIXME: check high bounds.  */
 	blr
-END(strcmp)
+END (BP_SYM (strcmp))
diff --git a/sysdeps/powerpc/strcpy.S b/sysdeps/powerpc/strcpy.S
index 901ccf1259..122cf59be7 100644
--- a/sysdeps/powerpc/strcpy.S
+++ b/sysdeps/powerpc/strcpy.S
@@ -18,26 +18,43 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* See strlen.s for comments on how the end-of-string testing works.  */
 
 /* char * [r3] strcpy (char *dest [r3], const char *src [r4])  */
 
-EALIGN(strcpy, 4, 0)
+EALIGN (BP_SYM (strcpy), 4, 0)
 
 #define rTMP	r0
 #define rRTN	r3	/* incoming DEST arg preserved as result */
-#define rSRC	r4	/* pointer to previous word in src */
-#define rDEST	r5	/* pointer to previous word in dest */
-#define	rWORD	r6	/* current word from src */
+#if __BOUNDED_POINTERS__
+# define rDEST	r4	/* pointer to previous word in dest */
+# define rSRC	r5	/* pointer to previous word in src */
+# define rLOW	r11
+# define rHIGH	r12
+#else
+# define rSRC	r4	/* pointer to previous word in src */
+# define rDEST	r5	/* pointer to previous word in dest */
+#endif
+#define rWORD	r6	/* current word from src */
 #define rFEFE	r7	/* constant 0xfefefeff (-0x01010101) */
-#define	r7F7F	r8	/* constant 0x7f7f7f7f */
+#define r7F7F	r8	/* constant 0x7f7f7f7f */
 #define rNEG	r9	/* ~(word in s1 | 0x7f7f7f7f) */
 #define rALT	r10	/* alternate word from src */
 
+	CHECK_BOUNDS_LOW (rSRC, rLOW, rHIGH)
+	CHECK_BOUNDS_LOW (rDEST, rLOW, rHIGH)
+	STORE_RETURN_BOUNDS (rLOW, rHIGH)
+
 	or	rTMP, rSRC, rRTN
 	clrlwi.	rTMP, rTMP, 30
+#if __BOUNDED_POINTERS__
+	addi	rDEST, rDEST, -4
+#else
 	addi	rDEST, rRTN, -4
+#endif
 	bne	L(unaligned)
 
 	lis	rFEFE, -0x101
@@ -72,6 +89,7 @@ L(g1):	rlwinm.	rTMP, rALT, 8, 24, 31
 	stb	rTMP, 6(rDEST)
 	beqlr-
 	stb	rALT, 7(rDEST)
+	/* GKM FIXME: check high bound.  */
 	blr
 
 /* Oh well.  In this case, we just do a byte-by-byte copy.  */
@@ -93,8 +111,10 @@ L(u0):	lbzu	rALT, 1(rSRC)
 	cmpwi	rWORD, 0
 	bne+	L(u0)
 L(u2):	stb	rWORD, 1(rDEST)
+	/* GKM FIXME: check high bound.  */
 	blr
 L(u1):	stb	rALT, 1(rDEST)
+	/* GKM FIXME: check high bound.  */
 	blr
 
-END(strcpy)
+END (BP_SYM (strcpy))
diff --git a/sysdeps/powerpc/strlen.S b/sysdeps/powerpc/strlen.S
index 18e76238c0..dc76333cbd 100644
--- a/sysdeps/powerpc/strlen.S
+++ b/sysdeps/powerpc/strlen.S
@@ -18,6 +18,8 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* The algorithm here uses the following techniques:
 
@@ -71,7 +73,7 @@
 
 /* int [r3] strlen (char *s [r3])  */
 
-ENTRY (strlen)
+ENTRY (BP_SYM (strlen))
 
 #define rTMP1	r0
 #define rRTN	r3	/* incoming STR arg, outgoing result */
@@ -87,6 +89,8 @@ ENTRY (strlen)
 #define rTMP3	r11
 #define rTMP4	r12
 
+	CHECK_BOUNDS_LOW (rRTN, rTMP1, rTMP2)
+
 	clrrwi	rSTR, rRTN, 2
 	lis	r7F7F, 0x7f7f
 	rlwinm	rPADN, rRTN, 3, 27, 28
@@ -150,5 +154,6 @@ L(done0):
 	subf	rTMP1, rRTN, rSTR
 	srwi	rTMP3, rTMP3, 3
 	add	rRTN, rTMP1, rTMP3
+	/* GKM FIXME: check high bound.  */
 	blr
-END (strlen)
+END (BP_SYM (strlen))
diff --git a/sysdeps/powerpc/sub_n.S b/sysdeps/powerpc/sub_n.S
index 244ee4b52f..b725ca98db 100644
--- a/sysdeps/powerpc/sub_n.S
+++ b/sysdeps/powerpc/sub_n.S
@@ -18,6 +18,8 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* mp_limb_t mpn_sub_n (mp_ptr res_ptr, mp_srcptr s1_ptr, mp_srcptr s2_ptr,
                         mp_size_t size)
@@ -27,7 +29,15 @@
    possible 2-unrolled inner loop will not be.  Also, watch out for the
    alignment...  */
 
-EALIGN(__mpn_sub_n,3,1)
+EALIGN (BP_SYM (__mpn_sub_n), 3, 1)
+
+#if __BOUNDED_POINTERS__
+	slwi r10,r6,2		/* convert limbs to bytes */
+	CHECK_BOUNDS_BOTH_WIDE (r3, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r4, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r5, r8, r9, r10)
+#endif
+
 /* Set up for loop below.  */
 	mtcrf 0x01,r6
 	srwi. r7,r6,1
@@ -65,4 +75,4 @@ L(0):
 L(1):	subfe r3,r3,r3
 	neg   r3,r3
 	blr
-END(__mpn_sub_n)
+END (BP_SYM (__mpn_sub_n))
diff --git a/sysdeps/powerpc/submul_1.S b/sysdeps/powerpc/submul_1.S
index e8c80afdaf..4bf4a078c2 100644
--- a/sysdeps/powerpc/submul_1.S
+++ b/sysdeps/powerpc/submul_1.S
@@ -18,12 +18,19 @@
    Boston, MA 02111-1307, USA.  */
 
 #include <sysdep.h>
+#include <bp-sym.h>
+#include <bp-asm.h>
 
 /* mp_limb_t mpn_submul_1 (mp_ptr res_ptr, mp_srcptr s1_ptr,
                            mp_size_t s1_size, mp_limb_t s2_limb)
    Calculate res-s1*s2 and put result back in res; return carry.  */
 
-ENTRY(__mpn_submul_1)
+ENTRY (BP_SYM (__mpn_submul_1))
+#if __BOUNDED_POINTERS__
+	slwi r10,r5,2		/* convert limbs to bytes */
+	CHECK_BOUNDS_BOTH_WIDE (r3, r8, r9, r10)
+	CHECK_BOUNDS_BOTH_WIDE (r4, r8, r9, r10)
+#endif
 	mtctr	r5
 
 	lwz	r0,0(r4)
@@ -49,4 +56,4 @@ L(0):	lwzu	r0,4(r4)
 L(1):	stw	r8,4(r3)
 	addze	r3,r10
 	blr
-END(__mpn_submul_1)
+END (BP_SYM (__mpn_submul_1))