about summary refs log tree commit diff
path: root/sysdeps/hppa/string-fzc.h
blob: 292ed065192fa4fcc40c017aff7740a9f4087576 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
/* string-fzc.h -- zero byte detection with indexes.  HPPA version.
   Copyright (C) 2023-2024 Free Software Foundation, Inc.
   This file is part of the GNU C Library.

   The GNU C Library is free software; you can redistribute it and/or
   modify it under the terms of the GNU Lesser General Public
   License as published by the Free Software Foundation; either
   version 2.1 of the License, or (at your option) any later version.

   The GNU C Library is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   Lesser General Public License for more details.

   You should have received a copy of the GNU Lesser General Public
   License along with the GNU C Library; if not, see
   <http://www.gnu.org/licenses/>.  */

#ifndef _STRING_FZC_H
#define _STRING_FZC_H 1

#include <string-optype.h>

_Static_assert (sizeof (op_t) == 4, "64-bit not supported");

/* Given a word X that is known to contain a zero byte, return the
   index of the first such within the long in memory order.  */
static __always_inline unsigned int
index_first_zero (op_t x)
{
  unsigned int ret;

  /* Since we have no clz insn, direct tests of the bytes is faster
     than loading up the constants to do the masking.  */
  asm ("extrw,u,<> %1,23,8,%%r0\n\t"
       "ldi 2,%0\n\t"
       "extrw,u,<> %1,15,8,%%r0\n\t"
       "ldi 1,%0\n\t"
       "extrw,u,<> %1,7,8,%%r0\n\t"
       "ldi 0,%0"
       : "=r"(ret) : "r"(x), "0"(3));

  return ret;
}

/* Similarly, but perform the search for byte equality between X1 and X2.  */
static __always_inline unsigned int
index_first_eq (op_t x1, op_t x2)
{
  return index_first_zero (x1 ^ x2);
}

/* Similarly, but perform the search for zero within X1 or
   equality between X1 and X2.  */
static __always_inline unsigned int
index_first_zero_eq (op_t x1, op_t x2)
{
  unsigned int ret;

  /* Since we have no clz insn, direct tests of the bytes is faster
     than loading up the constants to do the masking.  */
  asm ("extrw,u,= %1,23,8,%%r0\n\t"
       "extrw,u,<> %2,23,8,%%r0\n\t"
       "ldi 2,%0\n\t"
       "extrw,u,= %1,15,8,%%r0\n\t"
       "extrw,u,<> %2,15,8,%%r0\n\t"
       "ldi 1,%0\n\t"
       "extrw,u,= %1,7,8,%%r0\n\t"
       "extrw,u,<> %2,7,8,%%r0\n\t"
       "ldi 0,%0"
       : "=r"(ret) : "r"(x1), "r"(x1 ^ x2), "0"(3));

  return ret;
}

/* Similarly, but perform the search for zero within X1 or
   inequality between X1 and X2. */
static __always_inline unsigned int
index_first_zero_ne (op_t x1, op_t x2)
{
  unsigned int ret;

  /* Since we have no clz insn, direct tests of the bytes is faster
     than loading up the constants to do the masking.  */
  asm ("extrw,u,<> %2,23,8,%%r0\n\t"
       "extrw,u,<> %1,23,8,%%r0\n\t"
       "ldi 2,%0\n\t"
       "extrw,u,<> %2,15,8,%%r0\n\t"
       "extrw,u,<> %1,15,8,%%r0\n\t"
       "ldi 1,%0\n\t"
       "extrw,u,<> %2,7,8,%%r0\n\t"
       "extrw,u,<> %1,7,8,%%r0\n\t"
       "ldi 0,%0"
       : "=r"(ret) : "r"(x1), "r"(x1 ^ x2), "0"(3));

  return ret;
}

/* Similarly, but search for the last zero within X.  */
static __always_inline unsigned int
index_last_zero (op_t x)
{
  unsigned int ret;

  /* Since we have no ctz insn, direct tests of the bytes is faster
     than loading up the constants to do the masking.  */
  asm ("extrw,u,<> %1,15,8,%%r0\n\t"
       "ldi 1,%0\n\t"
       "extrw,u,<> %1,23,8,%%r0\n\t"
       "ldi 2,%0\n\t"
       "extrw,u,<> %1,31,8,%%r0\n\t"
       "ldi 3,%0"
       : "=r"(ret) : "r"(x), "0"(0));

  return ret;
}

static __always_inline unsigned int
index_last_eq (op_t x1, op_t x2)
{
  return index_last_zero (x1 ^ x2);
}

#endif /* _STRING_FZC_H */