diff options
-rw-r--r-- | ChangeLog | 5 | ||||
-rw-r--r-- | sysdeps/x86_64/multiarch/memcpy-ssse3.S | 24 |
2 files changed, 17 insertions, 12 deletions
diff --git a/ChangeLog b/ChangeLog index 8946eb2f1b..7891285462 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,5 +1,10 @@ 2012-05-15 H.J. Lu <hongjiu.lu@intel.com> + * sysdeps/x86_64/multiarch/memcpy-ssse3.S: Load cache sizes + into R*_LP. + +2012-05-15 H.J. Lu <hongjiu.lu@intel.com> + * sysdeps/x86_64/multiarch/memcpy-ssse3-back.S: Load cache sizes into R*_LP. diff --git a/sysdeps/x86_64/multiarch/memcpy-ssse3.S b/sysdeps/x86_64/multiarch/memcpy-ssse3.S index b71ac33df7..3f7d542584 100644 --- a/sysdeps/x86_64/multiarch/memcpy-ssse3.S +++ b/sysdeps/x86_64/multiarch/memcpy-ssse3.S @@ -97,9 +97,9 @@ L(80bytesormore): sub %rcx, %rsi #ifdef SHARED_CACHE_SIZE_HALF - mov $SHARED_CACHE_SIZE_HALF, %rcx + mov $SHARED_CACHE_SIZE_HALF, %RCX_LP #else - mov __x86_64_shared_cache_size_half(%rip), %rcx + mov __x86_64_shared_cache_size_half(%rip), %RCX_LP #endif cmp %rcx, %rdx mov %rsi, %r9 @@ -107,9 +107,9 @@ L(80bytesormore): and $0xf, %r9 jz L(shl_0) #ifdef DATA_CACHE_SIZE_HALF - mov $DATA_CACHE_SIZE_HALF, %rcx + mov $DATA_CACHE_SIZE_HALF, %RCX_LP #else - mov __x86_64_data_cache_size_half(%rip), %rcx + mov __x86_64_data_cache_size_half(%rip), %RCX_LP #endif BRANCH_TO_JMPTBL_ENTRY (L(shl_table), %r9, 4) @@ -127,9 +127,9 @@ L(copy_backward): sub %rcx, %rsi #ifdef SHARED_CACHE_SIZE_HALF - mov $SHARED_CACHE_SIZE_HALF, %rcx + mov $SHARED_CACHE_SIZE_HALF, %RCX_LP #else - mov __x86_64_shared_cache_size_half(%rip), %rcx + mov __x86_64_shared_cache_size_half(%rip), %RCX_LP #endif cmp %rcx, %rdx @@ -138,9 +138,9 @@ L(copy_backward): and $0xf, %r9 jz L(shl_0_bwd) #ifdef DATA_CACHE_SIZE_HALF - mov $DATA_CACHE_SIZE_HALF, %rcx + mov $DATA_CACHE_SIZE_HALF, %RCX_LP #else - mov __x86_64_data_cache_size_half(%rip), %rcx + mov __x86_64_data_cache_size_half(%rip), %RCX_LP #endif BRANCH_TO_JMPTBL_ENTRY (L(shl_table_bwd), %r9, 4) @@ -175,9 +175,9 @@ L(shl_0_less_64bytes): ALIGN (4) L(shl_0_gobble): #ifdef DATA_CACHE_SIZE_HALF - cmp $DATA_CACHE_SIZE_HALF, %rdx + cmp $DATA_CACHE_SIZE_HALF, %RDX_LP #else - cmp __x86_64_data_cache_size_half(%rip), %rdx + cmp __x86_64_data_cache_size_half(%rip), %RDX_LP #endif lea -128(%rdx), %rdx jae L(shl_0_gobble_mem_loop) @@ -316,9 +316,9 @@ L(shl_0_less_64bytes_bwd): ALIGN (4) L(shl_0_gobble_bwd): #ifdef DATA_CACHE_SIZE_HALF - cmp $DATA_CACHE_SIZE_HALF, %rdx + cmp $DATA_CACHE_SIZE_HALF, %RDX_LP #else - cmp __x86_64_data_cache_size_half(%rip), %rdx + cmp __x86_64_data_cache_size_half(%rip), %RDX_LP #endif lea -128(%rdx), %rdx jae L(shl_0_gobble_mem_bwd_loop) |