diff options
-rw-r--r-- | sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S index 5e4a071f16..a783da5de2 100644 --- a/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S +++ b/sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S @@ -420,8 +420,8 @@ L(more_2x_vec): cmpq $(VEC_SIZE * 8), %rdx ja L(more_8x_vec) cmpq $(VEC_SIZE * 4), %rdx - jb L(last_4x_vec) - /* Copy from 4 * VEC to 8 * VEC, inclusively. */ + jbe L(last_4x_vec) + /* Copy from 4 * VEC + 1 to 8 * VEC, inclusively. */ VMOVU (%rsi), %VEC(0) VMOVU VEC_SIZE(%rsi), %VEC(1) VMOVU (VEC_SIZE * 2)(%rsi), %VEC(2) @@ -440,7 +440,7 @@ L(more_2x_vec): VMOVU %VEC(7), -(VEC_SIZE * 4)(%rdi,%rdx) VZEROUPPER_RETURN L(last_4x_vec): - /* Copy from 2 * VEC to 4 * VEC. */ + /* Copy from 2 * VEC + 1 to 4 * VEC, inclusively. */ VMOVU (%rsi), %VEC(0) VMOVU VEC_SIZE(%rsi), %VEC(1) VMOVU -VEC_SIZE(%rsi,%rdx), %VEC(2) |