about summary refs log tree commit diff
path: root/sysdeps/x86_64/multiarch/strcmp.S
blob: 45b21860866de69cde8662f41bdcf6f7b6b17844 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
/* Multiple versions of strcmp
   Copyright (C) 2009-2014 Free Software Foundation, Inc.
   Contributed by Intel Corporation.
   This file is part of the GNU C Library.

   The GNU C Library is free software; you can redistribute it and/or
   modify it under the terms of the GNU Lesser General Public
   License as published by the Free Software Foundation; either
   version 2.1 of the License, or (at your option) any later version.

   The GNU C Library is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   Lesser General Public License for more details.

   You should have received a copy of the GNU Lesser General Public
   License along with the GNU C Library; if not, see
   <http://www.gnu.org/licenses/>.  */

#include <sysdep.h>
#include <init-arch.h>

#ifdef USE_AS_STRNCMP
/* Since the counter, %r11, is unsigned, we branch to strcmp_exitz
   if the new counter > the old one or is 0.  */
# define UPDATE_STRNCMP_COUNTER				\
	/* calculate left number to compare */		\
	lea	-16(%rcx, %r11), %r9;			\
	cmp	%r9, %r11;				\
	jb	LABEL(strcmp_exitz);			\
	test	%r9, %r9;				\
	je	LABEL(strcmp_exitz);			\
	mov	%r9, %r11

# define STRCMP_SSE42	__strncmp_sse42
# define STRCMP_SSSE3	__strncmp_ssse3
# define STRCMP_SSE2	__strncmp_sse2
# define __GI_STRCMP	__GI_strncmp
#elif defined USE_AS_STRCASECMP_L
# include "locale-defines.h"

# define UPDATE_STRNCMP_COUNTER

# define STRCMP_AVX	__strcasecmp_l_avx
# define STRCMP_SSE42	__strcasecmp_l_sse42
# define STRCMP_SSSE3	__strcasecmp_l_ssse3
# define STRCMP_SSE2	__strcasecmp_l_sse2
# define __GI_STRCMP	__GI___strcasecmp_l
#elif defined USE_AS_STRNCASECMP_L
# include "locale-defines.h"

/* Since the counter, %r11, is unsigned, we branch to strcmp_exitz
   if the new counter > the old one or is 0.  */
# define UPDATE_STRNCMP_COUNTER				\
	/* calculate left number to compare */		\
	lea	-16(%rcx, %r11), %r9;			\
	cmp	%r9, %r11;				\
	jb	LABEL(strcmp_exitz);			\
	test	%r9, %r9;				\
	je	LABEL(strcmp_exitz);			\
	mov	%r9, %r11

# define STRCMP_AVX	__strncasecmp_l_avx
# define STRCMP_SSE42	__strncasecmp_l_sse42
# define STRCMP_SSSE3	__strncasecmp_l_ssse3
# define STRCMP_SSE2	__strncasecmp_l_sse2
# define __GI_STRCMP	__GI___strncasecmp_l
#else
# define USE_AS_STRCMP
# define UPDATE_STRNCMP_COUNTER
# ifndef STRCMP
#  define STRCMP	strcmp
#  define STRCMP_SSE42	__strcmp_sse42
#  define STRCMP_SSSE3	__strcmp_ssse3
#  define STRCMP_SSE2	__strcmp_sse2
#  define __GI_STRCMP	__GI_strcmp
# endif
#endif

/* Define multiple versions only for the definition in libc.  Don't
   define multiple versions for strncmp in static library since we
   need strncmp before the initialization happened.  */
#if (defined SHARED || !defined USE_AS_STRNCMP) && IS_IN (libc)
	.text
ENTRY(STRCMP)
	.type	STRCMP, @gnu_indirect_function
	/* Manually inlined call to __get_cpu_features.  */
	cmpl	$0, __cpu_features+KIND_OFFSET(%rip)
	jne	1f
	call	__init_cpu_features
1:
#ifdef USE_AS_STRCMP
	leaq	__strcmp_sse2_unaligned(%rip), %rax
	testl   $bit_Fast_Unaligned_Load, __cpu_features+FEATURE_OFFSET+index_Fast_Unaligned_Load(%rip)
	jnz     3f
#else
	testl	$bit_Slow_SSE4_2, __cpu_features+FEATURE_OFFSET+index_Slow_SSE4_2(%rip)
	jnz	2f
	leaq	STRCMP_SSE42(%rip), %rax
	testl	$bit_SSE4_2, __cpu_features+CPUID_OFFSET+index_SSE4_2(%rip)
	jnz	3f
#endif
2:	leaq	STRCMP_SSSE3(%rip), %rax
	testl	$bit_SSSE3, __cpu_features+CPUID_OFFSET+index_SSSE3(%rip)
	jnz	3f
	leaq	STRCMP_SSE2(%rip), %rax
3:	ret
END(STRCMP)

# ifdef USE_AS_STRCASECMP_L
ENTRY(__strcasecmp)
	.type	__strcasecmp, @gnu_indirect_function
	/* Manually inlined call to __get_cpu_features.  */
	cmpl	$0, __cpu_features+KIND_OFFSET(%rip)
	jne	1f
	call	__init_cpu_features
1:
#  ifdef HAVE_AVX_SUPPORT
	leaq	__strcasecmp_avx(%rip), %rax
	testl	$bit_AVX_Usable, __cpu_features+FEATURE_OFFSET+index_AVX_Usable(%rip)
	jnz	3f
#  endif
	testl	$bit_Slow_SSE4_2, __cpu_features+FEATURE_OFFSET+index_Slow_SSE4_2(%rip)
	jnz	2f
	leaq	__strcasecmp_sse42(%rip), %rax
	testl	$bit_SSE4_2, __cpu_features+CPUID_OFFSET+index_SSE4_2(%rip)
	jnz	3f
2:	leaq	__strcasecmp_ssse3(%rip), %rax
	testl	$bit_SSSE3, __cpu_features+CPUID_OFFSET+index_SSSE3(%rip)
	jnz	3f
	leaq	__strcasecmp_sse2(%rip), %rax
3:	ret
END(__strcasecmp)
weak_alias (__strcasecmp, strcasecmp)
# endif
# ifdef USE_AS_STRNCASECMP_L
ENTRY(__strncasecmp)
	.type	__strncasecmp, @gnu_indirect_function
	/* Manually inlined call to __get_cpu_features.  */
	cmpl	$0, __cpu_features+KIND_OFFSET(%rip)
	jne	1f
	call	__init_cpu_features
1:
#  ifdef HAVE_AVX_SUPPORT
	leaq	__strncasecmp_avx(%rip), %rax
	testl	$bit_AVX_Usable, __cpu_features+FEATURE_OFFSET+index_AVX_Usable(%rip)
	jnz	3f
#  endif
	testl	$bit_Slow_SSE4_2, __cpu_features+FEATURE_OFFSET+index_Slow_SSE4_2(%rip)
	jnz	2f
	leaq	__strncasecmp_sse42(%rip), %rax
	testl	$bit_SSE4_2, __cpu_features+CPUID_OFFSET+index_SSE4_2(%rip)
	jnz	3f
2:	leaq	__strncasecmp_ssse3(%rip), %rax
	testl	$bit_SSSE3, __cpu_features+CPUID_OFFSET+index_SSSE3(%rip)
	jnz	3f
	leaq	__strncasecmp_sse2(%rip), %rax
3:	ret
END(__strncasecmp)
weak_alias (__strncasecmp, strncasecmp)
# endif

# undef LABEL
# define LABEL(l) .L##l##_sse42
# define GLABEL(l) l##_sse42
# define SECTION sse4.2
# include "strcmp-sse42.S"


# ifdef HAVE_AVX_SUPPORT
#  if defined USE_AS_STRCASECMP_L || defined USE_AS_STRNCASECMP_L
#   define LABEL(l) .L##l##_avx
#   define GLABEL(l) l##_avx
#   define USE_AVX 1
#   undef STRCMP_SSE42
#   define STRCMP_SSE42 STRCMP_AVX
#   define SECTION avx
#   include "strcmp-sse42.S"
#  endif
# endif


# undef ENTRY
# define ENTRY(name) \
	.type STRCMP_SSE2, @function; \
	.align 16; \
	.globl STRCMP_SSE2; \
	.hidden STRCMP_SSE2; \
	STRCMP_SSE2: cfi_startproc; \
	CALL_MCOUNT
# undef END
# define END(name) \
	cfi_endproc; .size STRCMP_SSE2, .-STRCMP_SSE2

# ifdef USE_AS_STRCASECMP_L
#  define ENTRY2(name) \
	.type __strcasecmp_sse2, @function; \
	.align 16; \
	.globl __strcasecmp_sse2; \
	.hidden __strcasecmp_sse2; \
	__strcasecmp_sse2: cfi_startproc; \
	CALL_MCOUNT
#  define END2(name) \
	cfi_endproc; .size __strcasecmp_sse2, .-__strcasecmp_sse2
# endif

# ifdef USE_AS_STRNCASECMP_L
#  define ENTRY2(name) \
	.type __strncasecmp_sse2, @function; \
	.align 16; \
	.globl __strncasecmp_sse2; \
	.hidden __strncasecmp_sse2; \
	__strncasecmp_sse2: cfi_startproc; \
	CALL_MCOUNT
#  define END2(name) \
	cfi_endproc; .size __strncasecmp_sse2, .-__strncasecmp_sse2
# endif

# undef libc_hidden_builtin_def
/* It doesn't make sense to send libc-internal strcmp calls through a PLT.
   The speedup we get from using SSE4.2 instruction is likely eaten away
   by the indirect call in the PLT.  */
# define libc_hidden_builtin_def(name) \
	.globl __GI_STRCMP; __GI_STRCMP = STRCMP_SSE2
#endif

#include "../strcmp.S"