about summary refs log tree commit diff
path: root/sysdeps/hppa/string-fzc.h
diff options
context:
space:
mode:
authorRichard Henderson <richard.henderson@linaro.org>2023-01-10 18:01:02 -0300
committerAdhemerval Zanella <adhemerval.zanella@linaro.org>2023-02-06 16:19:35 -0300
commitc62b1c29c2100f6d0bb8fe6d5e35811bd8ab49d9 (patch)
tree0512bf2d4e936afdad81efd54c85a43260ce1073 /sysdeps/hppa/string-fzc.h
parentbe836d9153eb6c1203c6659befb38427514eb349 (diff)
downloadglibc-c62b1c29c2100f6d0bb8fe6d5e35811bd8ab49d9.tar.gz
glibc-c62b1c29c2100f6d0bb8fe6d5e35811bd8ab49d9.tar.xz
glibc-c62b1c29c2100f6d0bb8fe6d5e35811bd8ab49d9.zip
hppa: Add string-fza.h, string-fzc.h, and string-fzi.h
Use UXOR,SBZ to test for a zero byte within a word.  While we can
get semi-decent code out of asm-goto, we would do slightly better
with a compiler builtin.

For index_zero et al, sequential testing of bytes is less expensive than
any tricks that involve a count-leading-zeros insn that we don't have.

Checked on hppa-linux-gnu.

Reviewed-by: Adhemerval Zanella  <adhemerval.zanella@linaro.org>
Diffstat (limited to 'sysdeps/hppa/string-fzc.h')
-rw-r--r--sysdeps/hppa/string-fzc.h124
1 files changed, 124 insertions, 0 deletions
diff --git a/sysdeps/hppa/string-fzc.h b/sysdeps/hppa/string-fzc.h
new file mode 100644
index 0000000000..c4e2e11550
--- /dev/null
+++ b/sysdeps/hppa/string-fzc.h
@@ -0,0 +1,124 @@
+/* string-fzc.h -- zero byte detection with indexes.  HPPA version.
+   Copyright (C) 2023 Free Software Foundation, Inc.
+   This file is part of the GNU C Library.
+
+   The GNU C Library is free software; you can redistribute it and/or
+   modify it under the terms of the GNU Lesser General Public
+   License as published by the Free Software Foundation; either
+   version 2.1 of the License, or (at your option) any later version.
+
+   The GNU C Library is distributed in the hope that it will be useful,
+   but WITHOUT ANY WARRANTY; without even the implied warranty of
+   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+   Lesser General Public License for more details.
+
+   You should have received a copy of the GNU Lesser General Public
+   License along with the GNU C Library; if not, see
+   <http://www.gnu.org/licenses/>.  */
+
+#ifndef _STRING_FZC_H
+#define _STRING_FZC_H 1
+
+#include <string-optype.h>
+
+_Static_assert (sizeof (op_t) == 4, "64-bit not supported");
+
+/* Given a word X that is known to contain a zero byte, return the
+   index of the first such within the long in memory order.  */
+static __always_inline unsigned int
+index_first_zero (op_t x)
+{
+  unsigned int ret;
+
+  /* Since we have no clz insn, direct tests of the bytes is faster
+     than loading up the constants to do the masking.  */
+  asm ("extrw,u,<> %1,23,8,%%r0\n\t"
+       "ldi 2,%0\n\t"
+       "extrw,u,<> %1,15,8,%%r0\n\t"
+       "ldi 1,%0\n\t"
+       "extrw,u,<> %1,7,8,%%r0\n\t"
+       "ldi 0,%0"
+       : "=r"(ret) : "r"(x), "0"(3));
+
+  return ret;
+}
+
+/* Similarly, but perform the search for byte equality between X1 and X2.  */
+static __always_inline unsigned int
+index_first_eq (op_t x1, op_t x2)
+{
+  return index_first_zero (x1 ^ x2);
+}
+
+/* Similarly, but perform the search for zero within X1 or
+   equality between X1 and X2.  */
+static __always_inline unsigned int
+index_first_zero_eq (op_t x1, op_t x2)
+{
+  unsigned int ret;
+
+  /* Since we have no clz insn, direct tests of the bytes is faster
+     than loading up the constants to do the masking.  */
+  asm ("extrw,u,= %1,23,8,%%r0\n\t"
+       "extrw,u,<> %2,23,8,%%r0\n\t"
+       "ldi 2,%0\n\t"
+       "extrw,u,= %1,15,8,%%r0\n\t"
+       "extrw,u,<> %2,15,8,%%r0\n\t"
+       "ldi 1,%0\n\t"
+       "extrw,u,= %1,7,8,%%r0\n\t"
+       "extrw,u,<> %2,7,8,%%r0\n\t"
+       "ldi 0,%0"
+       : "=r"(ret) : "r"(x1), "r"(x1 ^ x2), "0"(3));
+
+  return ret;
+}
+
+/* Similarly, but perform the search for zero within X1 or
+   inequality between X1 and X2. */
+static __always_inline unsigned int
+index_first_zero_ne (op_t x1, op_t x2)
+{
+  unsigned int ret;
+
+  /* Since we have no clz insn, direct tests of the bytes is faster
+     than loading up the constants to do the masking.  */
+  asm ("extrw,u,<> %2,23,8,%%r0\n\t"
+       "extrw,u,<> %1,23,8,%%r0\n\t"
+       "ldi 2,%0\n\t"
+       "extrw,u,<> %2,15,8,%%r0\n\t"
+       "extrw,u,<> %1,15,8,%%r0\n\t"
+       "ldi 1,%0\n\t"
+       "extrw,u,<> %2,7,8,%%r0\n\t"
+       "extrw,u,<> %1,7,8,%%r0\n\t"
+       "ldi 0,%0"
+       : "=r"(ret) : "r"(x1), "r"(x1 ^ x2), "0"(3));
+
+  return ret;
+}
+
+/* Similarly, but search for the last zero within X.  */
+static __always_inline unsigned int
+index_last_zero (op_t x)
+{
+  unsigned int ret;
+
+  /* Since we have no ctz insn, direct tests of the bytes is faster
+     than loading up the constants to do the masking.  */
+  asm ("extrw,u,<> %1,15,8,%%r0\n\t"
+       "ldi 1,%0\n\t"
+       "extrw,u,<> %1,23,8,%%r0\n\t"
+       "ldi 2,%0\n\t"
+       "extrw,u,<> %1,31,8,%%r0\n\t"
+       "ldi 3,%0"
+       : "=r"(ret) : "r"(x), "0"(0));
+
+  return ret;
+}
+
+static __always_inline unsigned int
+index_last_eq (op_t x1, op_t x2)
+{
+  return index_last_zero (x1 ^ x2);
+}
+
+#endif /* _STRING_FZC_H */