1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
|
/* Copy memory block and return pointer to beginning of destination block
For Intel 80x86, x>=6.
This file is part of the GNU C Library.
Copyright (C) 2003-2015 Free Software Foundation, Inc.
Contributed by Ulrich Drepper <drepper@cygnus.com>, 2003.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library; if not, see
<http://www.gnu.org/licenses/>. */
#include <sysdep.h>
#include "asm-syntax.h"
#define PARMS 4+4 /* one spilled register */
#define RTN PARMS
.text
#ifdef USE_AS_BCOPY
# define SRC RTN
# define DEST SRC+4
# define LEN DEST+4
#else
# define DEST RTN
# define SRC DEST+4
# define LEN SRC+4
# if defined PIC && IS_IN (libc)
ENTRY_CHK (__memmove_chk)
movl 12(%esp), %eax
cmpl %eax, 16(%esp)
jb HIDDEN_JUMPTARGET (__chk_fail)
END_CHK (__memmove_chk)
# endif
#endif
ENTRY (memmove)
pushl %edi
cfi_adjust_cfa_offset (4)
movl LEN(%esp), %ecx
movl DEST(%esp), %edi
cfi_rel_offset (edi, 0)
movl %esi, %edx
movl SRC(%esp), %esi
cfi_register (esi, edx)
movl %edi, %eax
subl %esi, %eax
cmpl %eax, %ecx
ja 3f
cld
shrl $1, %ecx
jnc 1f
movsb
1: shrl $1, %ecx
jnc 2f
movsw
2: rep
movsl
movl %edx, %esi
cfi_restore (esi)
#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
#endif
popl %edi
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
ret
cfi_adjust_cfa_offset (4)
cfi_rel_offset (edi, 0)
cfi_register (esi, edx)
/* Backward copying. */
3: std
leal -1(%edi, %ecx), %edi
leal -1(%esi, %ecx), %esi
shrl $1, %ecx
jnc 1f
movsb
1: subl $1, %edi
subl $1, %esi
shrl $1, %ecx
jnc 2f
movsw
2: subl $2, %edi
subl $2, %esi
rep
movsl
movl %edx, %esi
cfi_restore (esi)
#ifndef USE_AS_BCOPY
movl DEST(%esp), %eax
#endif
cld
popl %edi
cfi_adjust_cfa_offset (-4)
cfi_restore (edi)
ret
END (memmove)
#ifndef USE_AS_BCOPY
libc_hidden_builtin_def (memmove)
#endif
|