about summary refs log tree commit diff
path: root/sysdeps/x86_64/multiarch/memrchr-avx2.S
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps/x86_64/multiarch/memrchr-avx2.S')
-rw-r--r--sysdeps/x86_64/multiarch/memrchr-avx2.S4
1 files changed, 2 insertions, 2 deletions
diff --git a/sysdeps/x86_64/multiarch/memrchr-avx2.S b/sysdeps/x86_64/multiarch/memrchr-avx2.S
index 15c83f6a2b..409706fd8e 100644
--- a/sysdeps/x86_64/multiarch/memrchr-avx2.S
+++ b/sysdeps/x86_64/multiarch/memrchr-avx2.S
@@ -65,7 +65,7 @@ ENTRY_P2ALIGN(MEMRCHR, 6)
 
 L(ret_vec_x0_test):
 	/* If ecx is zero (no matches) lzcnt will set it 32 (VEC_SIZE) which
-	   will gurantee edx (len) is less than it.  */
+	   will guarantee edx (len) is less than it.  */
 	lzcntl	%ecx, %ecx
 
 	/* Hoist vzeroupper (not great for RTM) to save code size. This allows
@@ -233,7 +233,7 @@ L(more_4x_vec):
 	jnz	L(ret_vec_x3)
 
 	/* Check if near end before re-aligning (otherwise might do an
-	   unnecissary loop iteration).  */
+	   unnecessary loop iteration).  */
 	addq	$-(VEC_SIZE * 4), %rax
 	cmpq	$(VEC_SIZE * 4), %rdx
 	jbe	L(last_4x_vec)