diff options
Diffstat (limited to 'sysdeps/x86_64/multiarch/memrchr-sse2.S')
-rw-r--r-- | sysdeps/x86_64/multiarch/memrchr-sse2.S | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/sysdeps/x86_64/multiarch/memrchr-sse2.S b/sysdeps/x86_64/multiarch/memrchr-sse2.S index 8fdad16346..0ac707bc14 100644 --- a/sysdeps/x86_64/multiarch/memrchr-sse2.S +++ b/sysdeps/x86_64/multiarch/memrchr-sse2.S @@ -50,7 +50,7 @@ ENTRY_P2ALIGN(MEMRCHR, 6) jz L(page_cross) /* NB: This load happens regardless of whether rdx (len) is zero. Since - it doesn't cross a page and the standard gurantees any pointer have + it doesn't cross a page and the standard guarantees any pointer have at least one-valid byte this load must be safe. For the entire history of the x86 memrchr implementation this has been possible so no code "should" be relying on a zero-length check before this load. |