diff options
Diffstat (limited to 'REORG.TODO/sysdeps/i386/i586/strcpy.S')
-rw-r--r-- | REORG.TODO/sysdeps/i386/i586/strcpy.S | 169 |
1 files changed, 169 insertions, 0 deletions
diff --git a/REORG.TODO/sysdeps/i386/i586/strcpy.S b/REORG.TODO/sysdeps/i386/i586/strcpy.S new file mode 100644 index 0000000000..a444604f4f --- /dev/null +++ b/REORG.TODO/sysdeps/i386/i586/strcpy.S @@ -0,0 +1,169 @@ +/* strcpy/stpcpy implementation for i586. + Copyright (C) 1997-2017 Free Software Foundation, Inc. + This file is part of the GNU C Library. + Contributed by Ulrich Drepper <drepper@cygnus.com>, 1997. + + The GNU C Library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + The GNU C Library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with the GNU C Library; if not, see + <http://www.gnu.org/licenses/>. */ + +#include <sysdep.h> +#include "asm-syntax.h" + +#define PARMS 4+12 /* space for 3 saved regs */ +#define RTN PARMS +#define DEST RTN +#define SRC DEST+4 + +#ifndef USE_AS_STPCPY +# define STRCPY strcpy +#endif + +#define magic 0xfefefeff + + .text +ENTRY (STRCPY) + + pushl %edi + cfi_adjust_cfa_offset (4) + pushl %esi + cfi_adjust_cfa_offset (4) + pushl %ebx + cfi_adjust_cfa_offset (4) + + movl DEST(%esp), %edi + cfi_rel_offset (edi, 8) + movl SRC(%esp), %esi + cfi_rel_offset (esi, 4) + + xorl %eax, %eax + leal -1(%esi), %ecx + + movl $magic, %ebx + cfi_rel_offset (ebx, 0) + andl $3, %ecx + +#ifdef PIC + call 2f + cfi_adjust_cfa_offset (4) +2: popl %edx + cfi_adjust_cfa_offset (-4) + /* 0xb is the distance between 2: and 1: but we avoid writing + 1f-2b because the assembler generates worse code. */ + leal 0xb(%edx,%ecx,8), %ecx +#else + leal 1f(,%ecx,8), %ecx +#endif + + jmp *%ecx + + .align 8 +1: + orb (%esi), %al + jz L(end) + stosb + xorl %eax, %eax + incl %esi + + orb (%esi), %al + jz L(end) + stosb + xorl %eax, %eax + incl %esi + + orb (%esi), %al + jz L(end) + stosb + xorl %eax, %eax + incl %esi + +L(1): movl (%esi), %ecx + leal 4(%esi),%esi + + subl %ecx, %eax + addl %ebx, %ecx + + decl %eax + jnc L(3) + + movl %ecx, %edx + xorl %ecx, %eax + + subl %ebx, %edx + andl $~magic, %eax + + jne L(4) + + movl %edx, (%edi) + leal 4(%edi),%edi + + jmp L(1) + +L(3): movl %ecx, %edx + + subl %ebx, %edx + +L(4): movb %dl, (%edi) + testb %dl, %dl + + movl %edx, %eax + jz L(end2) + + shrl $16, %eax + movb %dh, 1(%edi) +#ifdef USE_AS_STPCPY + addl $1, %edi +#endif + + cmpb $0, %dh + jz L(end2) + +#ifdef USE_AS_STPCPY + movb %al, 1(%edi) + addl $1, %edi + + cmpb $0, %al + jz L(end2) + + addl $1, %edi +#else + movb %al, 2(%edi) + testb %al, %al + + leal 3(%edi), %edi + jz L(end2) +#endif + +L(end): movb %ah, (%edi) + +L(end2): +#ifdef USE_AS_STPCPY + movl %edi, %eax +#else + movl DEST(%esp), %eax +#endif + popl %ebx + cfi_adjust_cfa_offset (-4) + cfi_restore (ebx) + popl %esi + cfi_adjust_cfa_offset (-4) + cfi_restore (esi) + popl %edi + cfi_adjust_cfa_offset (-4) + cfi_restore (edi) + + ret +END (STRCPY) +#ifndef USE_AS_STPCPY +libc_hidden_builtin_def (strcpy) +#endif |