summary refs log tree commit diff
path: root/sysdeps/x86_64
diff options
context:
space:
mode:
authorUlrich Drepper <drepper@redhat.com>2007-10-17 15:58:16 +0000
committerUlrich Drepper <drepper@redhat.com>2007-10-17 15:58:16 +0000
commit406f28dbe5f8fc70fed76a2fe0112983417ebf60 (patch)
treee4ffc39ec8b741af04385bfa056ba2f38e8678c7 /sysdeps/x86_64
parentac1cb5da083e3e9f5beb923610ea52b57f1325d2 (diff)
downloadglibc-406f28dbe5f8fc70fed76a2fe0112983417ebf60.tar.gz
glibc-406f28dbe5f8fc70fed76a2fe0112983417ebf60.tar.xz
glibc-406f28dbe5f8fc70fed76a2fe0112983417ebf60.zip
* sysdeps/x86_64/cacheinfo.c: Comment out code added in support of
	new memset.
	too high for the improvements.  Implement bzero unconditionally for
	use in libc.
Diffstat (limited to 'sysdeps/x86_64')
-rw-r--r--sysdeps/x86_64/cacheinfo.c8
-rw-r--r--sysdeps/x86_64/memset.S32
2 files changed, 20 insertions, 20 deletions
diff --git a/sysdeps/x86_64/cacheinfo.c b/sysdeps/x86_64/cacheinfo.c
index 114ec673d3..83e7b3c60b 100644
--- a/sysdeps/x86_64/cacheinfo.c
+++ b/sysdeps/x86_64/cacheinfo.c
@@ -404,10 +404,13 @@ long int __x86_64_data_cache_size_half attribute_hidden = 32 * 1024 / 2;
 /* Shared cache size for use in memory and string routines, typically
    L2 or L3 size.  */
 long int __x86_64_shared_cache_size_half attribute_hidden = 1024 * 1024 / 2;
+#ifdef NOT_USED_RIGHT_NOW
 long int __x86_64_shared_cache_size attribute_hidden = 1024 * 1024;
+#endif
 /* PREFETCHW support flag for use in memory and string routines.  */
 int __x86_64_prefetchw attribute_hidden;
 
+#ifdef NOT_USED_RIGHT_NOW
 /* Instructions preferred for memory and string routines.
 
   0: Regular instructions
@@ -417,6 +420,7 @@ int __x86_64_prefetchw attribute_hidden;
 
   */
 int __x86_64_preferred_memory_instruction attribute_hidden;
+#endif
 
 
 static void
@@ -459,12 +463,14 @@ init_cacheinfo (void)
 		    : "=a" (eax), "=b" (ebx), "=c" (ecx), "=d" (edx)
 		    : "0" (1));
 
+#ifdef NOT_USED_RIGHT_NOW
       /* Intel prefers SSSE3 instructions for memory/string rountines
 	 if they are avaiable.  */
       if ((ecx & 0x200))
 	__x86_64_preferred_memory_instruction = 3;
       else
 	__x86_64_preferred_memory_instruction = 2;
+#endif
 
       /* Figure out the number of logical threads that share the
 	 highest cache level.  */
@@ -570,6 +576,8 @@ init_cacheinfo (void)
   if (shared > 0)
     {
       __x86_64_shared_cache_size_half = shared / 2;
+#ifdef NOT_USED_RIGHT_NOW
       __x86_64_shared_cache_size = shared;
+#endif
     }
 }
diff --git a/sysdeps/x86_64/memset.S b/sysdeps/x86_64/memset.S
index 1c421c75f3..db39b09c89 100644
--- a/sysdeps/x86_64/memset.S
+++ b/sysdeps/x86_64/memset.S
@@ -1,6 +1,6 @@
 /* memset/bzero -- set memory area to CH/0
    Optimized version for x86-64.
-   Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+   Copyright (C) 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
    This file is part of the GNU C Library.
    Contributed by Andreas Jaeger <aj@suse.de>.
 
@@ -24,37 +24,35 @@
 #include "bp-sym.h"
 #include "bp-asm.h"
 
-/* BEWARE: `#ifdef memset' means that memset is redefined as `bzero' */
-#define BZERO_P (defined memset)
-
 /* This is somehow experimental and could made dependend on the cache
    size.  */
 #define LARGE $120000
 
         .text
-#if !BZERO_P && defined PIC && !defined NOT_IN_libc
+#ifndef NOT_IN_libc
+ENTRY(bzero)
+	mov	%rsi,%rdx	/* Adjust parameter.  */
+	xorl	%esi,%esi	/* Fill with 0s.  */
+	jmp	L(memset_entry)
+END(bzero)
+#endif
+
+#if defined PIC && !defined NOT_IN_libc
 ENTRY (__memset_chk)
 	cmpq	%rdx, %rcx
 	jb	HIDDEN_JUMPTARGET (__chk_fail)
 END (__memset_chk)
 #endif
 ENTRY (memset)
-#if BZERO_P
-	mov	%rsi,%rdx	/* Adjust parameter.  */
-	xorl	%esi,%esi	/* Fill with 0s.  */
-#endif
+L(memset_entry):
 	cmp	$0x7,%rdx	/* Check for small length.  */
 	mov	%rdi,%rcx	/* Save ptr as return value.  */
 	jbe	7f
 
-#if BZERO_P
-	mov	%rsi,%r8	/* Just copy 0.  */
-#else
 	/* Populate 8 bit data to full 64-bit.  */
 	movabs	$0x0101010101010101,%r8
 	movzbl	%sil,%eax
 	imul	%rax,%r8
-#endif
 	test	$0x7,%edi	/* Check for alignment.  */
 	je	2f
 
@@ -109,12 +107,8 @@ ENTRY (memset)
 	dec	%rdx
 	jne	8b
 9:
-#if BZERO_P
-	nop
-#else
 	/* Load result (only if used as memset).  */
 	mov	%rdi,%rax	/* start address of destination is result */
-#endif
 	retq
 
 	.p2align 4
@@ -135,11 +129,9 @@ ENTRY (memset)
 	jmp	4b
 
 END (memset)
-#if !BZERO_P
 libc_hidden_builtin_def (memset)
-#endif
 
-#if !BZERO_P && defined PIC && !defined NOT_IN_libc
+#if defined PIC && !defined NOT_IN_libc
 strong_alias (__memset_chk, __memset_zero_constant_len_parameter)
 	.section .gnu.warning.__memset_zero_constant_len_parameter
 	.string "memset used with constant zero length parameter; this could be due to transposed parameters"