about summary refs log tree commit diff
path: root/REORG.TODO/sysdeps/i386/i586/memset.S
diff options
context:
space:
mode:
authorZack Weinberg <zackw@panix.com>2017-06-08 15:39:03 -0400
committerZack Weinberg <zackw@panix.com>2017-06-08 15:39:03 -0400
commit5046dbb4a7eba5eccfd258f92f4735c9ffc8d069 (patch)
tree4470480d904b65cf14ca524f96f79eca818c3eaf /REORG.TODO/sysdeps/i386/i586/memset.S
parent199fc19d3aaaf57944ef036e15904febe877fc93 (diff)
downloadglibc-zack/build-layout-experiment.tar.gz
glibc-zack/build-layout-experiment.tar.xz
glibc-zack/build-layout-experiment.zip
Prepare for radical source tree reorganization. zack/build-layout-experiment
All top-level files and directories are moved into a temporary storage
directory, REORG.TODO, except for files that will certainly still
exist in their current form at top level when we're done (COPYING,
COPYING.LIB, LICENSES, NEWS, README), all old ChangeLog files (which
are moved to the new directory OldChangeLogs, instead), and the
generated file INSTALL (which is just deleted; in the new order, there
will be no generated files checked into version control).
Diffstat (limited to 'REORG.TODO/sysdeps/i386/i586/memset.S')
-rw-r--r--REORG.TODO/sysdeps/i386/i586/memset.S121
1 files changed, 121 insertions, 0 deletions
diff --git a/REORG.TODO/sysdeps/i386/i586/memset.S b/REORG.TODO/sysdeps/i386/i586/memset.S
new file mode 100644
index 0000000000..4f8f1bcf94
--- /dev/null
+++ b/REORG.TODO/sysdeps/i386/i586/memset.S
@@ -0,0 +1,121 @@
+/* memset/bzero -- set memory area to CH/0
+   Highly optimized version for ix86, x>=5.
+   Copyright (C) 1996-2017 Free Software Foundation, Inc.
+   This file is part of the GNU C Library.
+   Contributed by Torbjorn Granlund, <tege@matematik.su.se>
+
+   The GNU C Library is free software; you can redistribute it and/or
+   modify it under the terms of the GNU Lesser General Public
+   License as published by the Free Software Foundation; either
+   version 2.1 of the License, or (at your option) any later version.
+
+   The GNU C Library is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   Lesser General Public License for more details.
+
+   You should have received a copy of the GNU Lesser General Public
+   License along with the GNU C Library; if not, see
+   <http://www.gnu.org/licenses/>.  */
+
+#include <sysdep.h>
+#include "asm-syntax.h"
+
+#define PARMS	4+4	/* space for 1 saved reg */
+#define RTN	PARMS
+#define DEST	RTN
+#ifdef USE_AS_BZERO
+# define LEN	DEST+4
+#else
+# define CHR	DEST+4
+# define LEN	CHR+4
+#endif
+
+        .text
+#if defined SHARED && IS_IN (libc) && !defined USE_AS_BZERO
+ENTRY (__memset_chk)
+	movl	12(%esp), %eax
+	cmpl	%eax, 16(%esp)
+	jb	HIDDEN_JUMPTARGET (__chk_fail)
+END (__memset_chk)
+#endif
+ENTRY (memset)
+
+	pushl	%edi
+	cfi_adjust_cfa_offset (4)
+
+	movl	DEST(%esp), %edi
+	cfi_rel_offset (edi, 0)
+	movl	LEN(%esp), %edx
+#ifdef USE_AS_BZERO
+	xorl	%eax, %eax	/* we fill with 0 */
+#else
+	movb	CHR(%esp), %al
+	movb	%al, %ah
+	movl	%eax, %ecx
+	shll	$16, %eax
+	movw	%cx, %ax
+#endif
+	cld
+
+/* If less than 36 bytes to write, skip tricky code (it wouldn't work).  */
+	cmpl	$36, %edx
+	movl	%edx, %ecx	/* needed when branch is taken! */
+	jl	L(2)
+
+/* First write 0-3 bytes to make the pointer 32-bit aligned.  */
+	movl	%edi, %ecx	/* Copy ptr to ecx... */
+	negl	%ecx		/* ...and negate that and... */
+	andl	$3, %ecx	/* ...mask to get byte count.  */
+	subl	%ecx, %edx	/* adjust global byte count */
+	rep
+	stosb
+
+	subl	$32, %edx	/* offset count for unrolled loop */
+	movl	(%edi), %ecx	/* Fetch destination cache line */
+
+	.align	2, 0x90		/* supply 0x90 for broken assemblers */
+L(1):	movl	28(%edi), %ecx	/* allocate cache line for destination */
+	subl	$32, %edx	/* decr loop count */
+	movl	%eax, 0(%edi)	/* store words pairwise */
+	movl	%eax, 4(%edi)
+	movl	%eax, 8(%edi)
+	movl	%eax, 12(%edi)
+	movl	%eax, 16(%edi)
+	movl	%eax, 20(%edi)
+	movl	%eax, 24(%edi)
+	movl	%eax, 28(%edi)
+	leal	32(%edi), %edi	/* update destination pointer */
+	jge	L(1)
+
+	leal	32(%edx), %ecx	/* reset offset count */
+
+/* Write last 0-7 full 32-bit words (up to 8 words if loop was skipped).  */
+L(2):	shrl	$2, %ecx	/* convert byte count to longword count */
+	rep
+	stosl
+
+/* Finally write the last 0-3 bytes.  */
+	movl	%edx, %ecx
+	andl	$3, %ecx
+	rep
+	stosb
+
+#ifndef USE_AS_BZERO
+	/* Load result (only if used as memset).  */
+	movl DEST(%esp), %eax	/* start address of destination is result */
+#endif
+	popl	%edi
+	cfi_adjust_cfa_offset (-4)
+	cfi_restore (edi)
+
+	ret
+END (memset)
+libc_hidden_builtin_def (memset)
+
+#if defined SHARED && IS_IN (libc) && !defined __memset_chk \
+    && !defined USE_AS_BZERO
+strong_alias (__memset_chk, __memset_zero_constant_len_parameter)
+	.section .gnu.warning.__memset_zero_constant_len_parameter
+	.string "memset used with constant zero length parameter; this could be due to transposed parameters"
+#endif