]> git.ipfire.org Git - thirdparty/glibc.git/blob - sysdeps/hppa/string-fzc.h
Update copyright dates with scripts/update-copyrights
[thirdparty/glibc.git] / sysdeps / hppa / string-fzc.h
1 /* string-fzc.h -- zero byte detection with indexes. HPPA version.
2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
18
19 #ifndef _STRING_FZC_H
20 #define _STRING_FZC_H 1
21
22 #include <string-optype.h>
23
24 _Static_assert (sizeof (op_t) == 4, "64-bit not supported");
25
26 /* Given a word X that is known to contain a zero byte, return the
27 index of the first such within the long in memory order. */
28 static __always_inline unsigned int
29 index_first_zero (op_t x)
30 {
31 unsigned int ret;
32
33 /* Since we have no clz insn, direct tests of the bytes is faster
34 than loading up the constants to do the masking. */
35 asm ("extrw,u,<> %1,23,8,%%r0\n\t"
36 "ldi 2,%0\n\t"
37 "extrw,u,<> %1,15,8,%%r0\n\t"
38 "ldi 1,%0\n\t"
39 "extrw,u,<> %1,7,8,%%r0\n\t"
40 "ldi 0,%0"
41 : "=r"(ret) : "r"(x), "0"(3));
42
43 return ret;
44 }
45
46 /* Similarly, but perform the search for byte equality between X1 and X2. */
47 static __always_inline unsigned int
48 index_first_eq (op_t x1, op_t x2)
49 {
50 return index_first_zero (x1 ^ x2);
51 }
52
53 /* Similarly, but perform the search for zero within X1 or
54 equality between X1 and X2. */
55 static __always_inline unsigned int
56 index_first_zero_eq (op_t x1, op_t x2)
57 {
58 unsigned int ret;
59
60 /* Since we have no clz insn, direct tests of the bytes is faster
61 than loading up the constants to do the masking. */
62 asm ("extrw,u,= %1,23,8,%%r0\n\t"
63 "extrw,u,<> %2,23,8,%%r0\n\t"
64 "ldi 2,%0\n\t"
65 "extrw,u,= %1,15,8,%%r0\n\t"
66 "extrw,u,<> %2,15,8,%%r0\n\t"
67 "ldi 1,%0\n\t"
68 "extrw,u,= %1,7,8,%%r0\n\t"
69 "extrw,u,<> %2,7,8,%%r0\n\t"
70 "ldi 0,%0"
71 : "=r"(ret) : "r"(x1), "r"(x1 ^ x2), "0"(3));
72
73 return ret;
74 }
75
76 /* Similarly, but perform the search for zero within X1 or
77 inequality between X1 and X2. */
78 static __always_inline unsigned int
79 index_first_zero_ne (op_t x1, op_t x2)
80 {
81 unsigned int ret;
82
83 /* Since we have no clz insn, direct tests of the bytes is faster
84 than loading up the constants to do the masking. */
85 asm ("extrw,u,<> %2,23,8,%%r0\n\t"
86 "extrw,u,<> %1,23,8,%%r0\n\t"
87 "ldi 2,%0\n\t"
88 "extrw,u,<> %2,15,8,%%r0\n\t"
89 "extrw,u,<> %1,15,8,%%r0\n\t"
90 "ldi 1,%0\n\t"
91 "extrw,u,<> %2,7,8,%%r0\n\t"
92 "extrw,u,<> %1,7,8,%%r0\n\t"
93 "ldi 0,%0"
94 : "=r"(ret) : "r"(x1), "r"(x1 ^ x2), "0"(3));
95
96 return ret;
97 }
98
99 /* Similarly, but search for the last zero within X. */
100 static __always_inline unsigned int
101 index_last_zero (op_t x)
102 {
103 unsigned int ret;
104
105 /* Since we have no ctz insn, direct tests of the bytes is faster
106 than loading up the constants to do the masking. */
107 asm ("extrw,u,<> %1,15,8,%%r0\n\t"
108 "ldi 1,%0\n\t"
109 "extrw,u,<> %1,23,8,%%r0\n\t"
110 "ldi 2,%0\n\t"
111 "extrw,u,<> %1,31,8,%%r0\n\t"
112 "ldi 3,%0"
113 : "=r"(ret) : "r"(x), "0"(0));
114
115 return ret;
116 }
117
118 static __always_inline unsigned int
119 index_last_eq (op_t x1, op_t x2)
120 {
121 return index_last_zero (x1 ^ x2);
122 }
123
124 #endif /* _STRING_FZC_H */