]> git.ipfire.org Git - thirdparty/glibc.git/blame - sysdeps/s390/multiarch/memrchr-vx.S
Update copyright dates with scripts/update-copyrights.
[thirdparty/glibc.git] / sysdeps / s390 / multiarch / memrchr-vx.S
CommitLineData
798f5b4b 1/* Vector optimized 32/64 bit S/390 version of memrchr.
f7a9f785 2 Copyright (C) 2015-2016 Free Software Foundation, Inc.
798f5b4b
SL
3 This file is part of the GNU C Library.
4
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
9
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
14
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
18
19#if defined HAVE_S390_VX_ASM_SUPPORT && IS_IN (libc)
20
21# include "sysdep.h"
22# include "asm-syntax.h"
23
24 .text
25
26/* void *memrchr (const void *s, int c, size_t n)
27 Scans memory for character c backwards
28 and returns pointer to first c.
29
30 Register usage:
31 -r0=tmp
32 -r1=tmp
33 -r2=s
34 -r3=c
35 -r4=n
36 -r5=s in loop
37
38 -v16=part of s
39 -v17=index of found c
40 -v18=c replicated
41 -v20=permute pattern
42*/
43ENTRY(__memrchr_vx)
44 .machine "z13"
45 .machinemode "zarch_nohighgprs"
46
47# if !defined __s390x__
48 llgfr %r4,%r4
49# endif /* !defined __s390x__ */
50 clgije %r4,0,.Lnot_found
51
52 vlvgb %v18,%r3,0 /* Generate vector which elements are all c.
53 If c > 255, c will be truncated. */
54 vrepb %v18,%v18,0
55
56 llcr %r3,%r3 /* char c_char = (char) c. */
57
58 /* check byte n - 1. */
59 llc %r0,-1(%r4,%r2)
60 slgfi %r4,1
61 clrje %r0,%r3,.Lfound_end
62 jh .Lnot_found /* Return NULL if n is now 0. */
63
64 larl %r1,.Lpermute_mask /* Load permute mask. */
65 vl %v20,0(%r1)
66
67 /* check byte n - 2. */
68 llc %r0,-1(%r4,%r2)
69 slgfi %r4,1
70 clrje %r0,%r3,.Lfound_end
71 jh .Lnot_found /* Return NULL if n is now 0. */
72
73 clgijhe %r4,64,.Lloop64 /* If n >= 64 -> loop64. */
74
75.Llt64:
76 /* Process n < 64 bytes. */
77 clgijl %r4,16,.Llt16 /* Jump away if n < 16. */
78 aghi %r4,-16
79 vl %v16,0(%r4,%r2)
80 vfeebs %v17,%v16,%v18
81 jno .Lfound0
82 clgijl %r4,16,.Llt16
83 aghi %r4,-16
84 vl %v16,0(%r4,%r2)
85 vfeebs %v17,%v16,%v18
86 jno .Lfound0
87 clgijl %r4,16,.Llt16
88 aghi %r4,-16
89 vl %v16,0(%r4,%r2)
90 vfeebs %v17,%v16,%v18
91 jno .Lfound0
92.Llt16:
93 clgfi %r4,0 /* if remaining bytes == 0, return NULL. */
94 locghie %r2,0
95 ber %r14
96
97 aghi %r4,-1 /* vll needs highest index. */
98 vll %v16,%r4,0(%r2) /* Load remaining bytes. */
99
100 /* Right-shift of v16 to mask bytes after highest index. */
101 lhi %r0,15
102 slr %r0,%r4 /* Compute byte count for vector shift right. */
103 sll %r0,3 /* Convert to bit count. */
104 vlvgb %v17,%r0,7
105 vsrlb %v16,%v16,%v17 /* Vector shift right by byte by number of bytes
106 specified in bits 1-4 of byte 7 in v17. */
107 j .Lfound_permute
108
109.Lfound48:
110 aghi %r4,16
111.Lfound32:
112 aghi %r4,16
113.Lfound16:
114 aghi %r4,16
115.Lfound0:
116 la %r2,0(%r4,%r2) /* Set pointer to start of v16. */
117 lghi %r4,15 /* Set highest index in v16 to last index. */
118.Lfound_permute:
119 /* Search for a c in v16 in reversed byte order. v16 contains %r4 + 1
120 bytes. If v16 was not fully loaded, the bytes are already
121 right shifted, so that the bytes in v16 can simply be reversed. */
122 vperm %v16,%v16,%v16,%v20 /* Permute v16 to reversed order. */
123 vfeeb %v16,%v16,%v18 /* Find c in reversed v16. */
124 vlgvb %r1,%v16,7 /* Index of c or 16 if not found. */
125
126 /* Return NULL if there is no c in loaded bytes. */
127 clrjh %r1,%r4,.Lnot_found
128
129 slgr %r4,%r1
130.Lfound_end:
131 la %r2,0(%r4,%r2) /* Return pointer to c. */
132 br %r14
133
134.Lnot_found:
135 lghi %r2,0
136 br %r14
137
138.Lpermute_mask:
139 .byte 0x0F,0x0E,0x0D,0x0C,0x0B,0x0A,0x09,0x08
140 .byte 0x07,0x06,0x05,0x04,0x03,0x02,0x01,0x00
141
142.Lloop64:
143 aghi %r4,-64
144 vl %v16,48(%r4,%r2) /* Load 16bytes of memory area. */
145 vfeebs %v17,%v16,%v18 /* Find c. */
146 jno .Lfound48 /* Jump away if c was found. */
147 vl %v16,32(%r4,%r2)
148 vfeebs %v17,%v16,%v18
149 jno .Lfound32
150 vl %v16,16(%r4,%r2)
151 vfeebs %v17,%v16,%v18
152 jno .Lfound16
153 vl %v16,0(%r4,%r2)
154 vfeebs %v17,%v16,%v18
155 jno .Lfound0
156
157 clgijhe %r4,64,.Lloop64 /* If n >= 64 -> loop64. */
158 j .Llt64
159END(__memrchr_vx)
160#endif /* HAVE_S390_VX_ASM_SUPPORT && IS_IN (libc) */