]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/riscv/bitmanip.md
RISC-V: Fix wrong partial subreg check for bsetidisi
[thirdparty/gcc.git] / gcc / config / riscv / bitmanip.md
1 ;; Machine description for RISC-V Bit Manipulation operations.
2 ;; Copyright (C) 2021-2023 Free Software Foundation, Inc.
3
4 ;; This file is part of GCC.
5
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
19
20 ;; ZBA extension.
21
22 (define_insn "*zero_extendsidi2_bitmanip"
23 [(set (match_operand:DI 0 "register_operand" "=r,r")
24 (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "r,m")))]
25 "TARGET_64BIT && TARGET_ZBA"
26 "@
27 zext.w\t%0,%1
28 lwu\t%0,%1"
29 [(set_attr "type" "bitmanip,load")
30 (set_attr "mode" "DI")])
31
32 (define_insn "*shNadd"
33 [(set (match_operand:X 0 "register_operand" "=r")
34 (plus:X (ashift:X (match_operand:X 1 "register_operand" "r")
35 (match_operand:QI 2 "imm123_operand" "Ds3"))
36 (match_operand:X 3 "register_operand" "r")))]
37 "TARGET_ZBA"
38 "sh%2add\t%0,%1,%3"
39 [(set_attr "type" "bitmanip")
40 (set_attr "mode" "<X:MODE>")])
41
42 ; When using strength-reduction, we will reduce a multiplication to a
43 ; sequence of shifts and adds. If this is performed with 32-bit types
44 ; and followed by a division, the lack of w-form sh[123]add will make
45 ; combination impossible and lead to a slli + addw being generated.
46 ; Split the sequence with the knowledge that a w-form div will perform
47 ; implicit sign-extensions.
48 (define_split
49 [(set (match_operand:DI 0 "register_operand")
50 (sign_extend:DI (div:SI (plus:SI (subreg:SI (ashift:DI (match_operand:DI 1 "register_operand")
51 (match_operand:QI 2 "imm123_operand")) 0)
52 (subreg:SI (match_operand:DI 3 "register_operand") 0))
53 (subreg:SI (match_operand:DI 4 "register_operand") 0))))
54 (clobber (match_operand:DI 5 "register_operand"))]
55 "TARGET_64BIT && TARGET_ZBA"
56 [(set (match_dup 5) (plus:DI (ashift:DI (match_dup 1) (match_dup 2)) (match_dup 3)))
57 (set (match_dup 0) (sign_extend:DI (div:SI (subreg:SI (match_dup 5) 0) (subreg:SI (match_dup 4) 0))))])
58
59 ; Zba does not provide W-forms of sh[123]add(.uw)?, which leads to an
60 ; interesting irregularity: we can generate a signed 32-bit result
61 ; using slli(.uw)?+ addw, but a unsigned 32-bit result can be more
62 ; efficiently be generated as sh[123]add+zext.w (the .uw can be
63 ; dropped, if we zero-extend the output anyway).
64 ;
65 ; To enable this optimization, we split [ slli(.uw)?, addw, zext.w ]
66 ; into [ sh[123]add, zext.w ] for use during combine.
67 (define_split
68 [(set (match_operand:DI 0 "register_operand")
69 (zero_extend:DI (plus:SI (ashift:SI (subreg:SI (match_operand:DI 1 "register_operand") 0)
70 (match_operand:QI 2 "imm123_operand"))
71 (subreg:SI (match_operand:DI 3 "register_operand") 0))))]
72 "TARGET_64BIT && TARGET_ZBA"
73 [(set (match_dup 0) (plus:DI (ashift:DI (match_dup 1) (match_dup 2)) (match_dup 3)))
74 (set (match_dup 0) (zero_extend:DI (subreg:SI (match_dup 0) 0)))])
75
76 (define_split
77 [(set (match_operand:DI 0 "register_operand")
78 (zero_extend:DI (plus:SI (subreg:SI (and:DI (ashift:DI (match_operand:DI 1 "register_operand")
79 (match_operand:QI 2 "imm123_operand"))
80 (match_operand:DI 3 "consecutive_bits_operand")) 0)
81 (subreg:SI (match_operand:DI 4 "register_operand") 0))))]
82 "TARGET_64BIT && TARGET_ZBA
83 && riscv_shamt_matches_mask_p (INTVAL (operands[2]), INTVAL (operands[3]))"
84 [(set (match_dup 0) (plus:DI (ashift:DI (match_dup 1) (match_dup 2)) (match_dup 4)))
85 (set (match_dup 0) (zero_extend:DI (subreg:SI (match_dup 0) 0)))])
86
87 ; Make sure that an andi followed by a sh[123]add remains a two instruction
88 ; sequence--and is not torn apart into slli, slri, add.
89 (define_insn_and_split "*andi_add.uw"
90 [(set (match_operand:DI 0 "register_operand" "=r")
91 (plus:DI (and:DI (ashift:DI (match_operand:DI 1 "register_operand" "r")
92 (match_operand:QI 2 "imm123_operand" "Ds3"))
93 (match_operand:DI 3 "consecutive_bits_operand" ""))
94 (match_operand:DI 4 "register_operand" "r")))
95 (clobber (match_scratch:DI 5 "=&r"))]
96 "TARGET_64BIT && TARGET_ZBA
97 && riscv_shamt_matches_mask_p (INTVAL (operands[2]), INTVAL (operands[3]))
98 && SMALL_OPERAND (INTVAL (operands[3]) >> INTVAL (operands[2]))"
99 "#"
100 "&& reload_completed"
101 [(set (match_dup 5) (and:DI (match_dup 1) (match_dup 3)))
102 (set (match_dup 0) (plus:DI (ashift:DI (match_dup 5) (match_dup 2))
103 (match_dup 4)))]
104 {
105 operands[3] = GEN_INT (INTVAL (operands[3]) >> INTVAL (operands[2]));
106 })
107
108 (define_insn "*shNadduw"
109 [(set (match_operand:DI 0 "register_operand" "=r")
110 (plus:DI
111 (and:DI (ashift:DI (match_operand:DI 1 "register_operand" "r")
112 (match_operand:QI 2 "imm123_operand" "Ds3"))
113 (match_operand 3 "immediate_operand" "n"))
114 (match_operand:DI 4 "register_operand" "r")))]
115 "TARGET_64BIT && TARGET_ZBA
116 && (INTVAL (operands[3]) >> INTVAL (operands[2])) == 0xffffffff"
117 "sh%2add.uw\t%0,%1,%4"
118 [(set_attr "type" "bitmanip")
119 (set_attr "mode" "DI")])
120
121 ;; During combine, we may encounter an attempt to combine
122 ;; slli rtmp, rs, #imm
123 ;; zext.w rtmp, rtmp
124 ;; sh[123]add rd, rtmp, rs2
125 ;; which will lead to the immediate not satisfying the above constraints.
126 ;; By splitting the compound expression, we can simplify to a slli and a
127 ;; sh[123]add.uw.
128 (define_split
129 [(set (match_operand:DI 0 "register_operand")
130 (plus:DI (and:DI (ashift:DI (match_operand:DI 1 "register_operand")
131 (match_operand:QI 2 "immediate_operand"))
132 (match_operand:DI 3 "consecutive_bits_operand"))
133 (match_operand:DI 4 "register_operand")))
134 (clobber (match_operand:DI 5 "register_operand"))]
135 "TARGET_64BIT && TARGET_ZBA"
136 [(set (match_dup 5) (ashift:DI (match_dup 1) (match_dup 6)))
137 (set (match_dup 0) (plus:DI (and:DI (ashift:DI (match_dup 5)
138 (match_dup 7))
139 (match_dup 8))
140 (match_dup 4)))]
141 {
142 unsigned HOST_WIDE_INT mask = UINTVAL (operands[3]);
143 /* scale: shift within the sh[123]add.uw */
144 unsigned HOST_WIDE_INT scale = 32 - clz_hwi (mask);
145 /* bias: pre-scale amount (i.e. the prior shift amount) */
146 int bias = ctz_hwi (mask) - scale;
147
148 /* If the bias + scale don't add up to operand[2], reject. */
149 if ((scale + bias) != UINTVAL (operands[2]))
150 FAIL;
151
152 /* If the shift-amount is out-of-range for sh[123]add.uw, reject. */
153 if ((scale < 1) || (scale > 3))
154 FAIL;
155
156 /* If there's no bias, the '*shNadduw' pattern should have matched. */
157 if (bias == 0)
158 FAIL;
159
160 operands[6] = GEN_INT (bias);
161 operands[7] = GEN_INT (scale);
162 operands[8] = GEN_INT (0xffffffffULL << scale);
163 })
164
165 (define_insn "*add.uw"
166 [(set (match_operand:DI 0 "register_operand" "=r")
167 (plus:DI (zero_extend:DI
168 (match_operand:SI 1 "register_operand" "r"))
169 (match_operand:DI 2 "register_operand" "r")))]
170 "TARGET_64BIT && TARGET_ZBA"
171 "add.uw\t%0,%1,%2"
172 [(set_attr "type" "bitmanip")
173 (set_attr "mode" "DI")])
174
175 (define_insn "*slliuw"
176 [(set (match_operand:DI 0 "register_operand" "=r")
177 (and:DI (ashift:DI (match_operand:DI 1 "register_operand" "r")
178 (match_operand:QI 2 "immediate_operand" "I"))
179 (match_operand 3 "immediate_operand" "n")))]
180 "TARGET_64BIT && TARGET_ZBA
181 && (INTVAL (operands[3]) >> INTVAL (operands[2])) == 0xffffffff"
182 "slli.uw\t%0,%1,%2"
183 [(set_attr "type" "bitmanip")
184 (set_attr "mode" "DI")])
185
186 ;; ZBB extension.
187
188 (define_insn "*<optab>_not<mode>"
189 [(set (match_operand:X 0 "register_operand" "=r")
190 (bitmanip_bitwise:X (not:X (match_operand:X 1 "register_operand" "r"))
191 (match_operand:X 2 "register_operand" "r")))]
192 "TARGET_ZBB"
193 "<insn>n\t%0,%2,%1"
194 [(set_attr "type" "bitmanip")
195 (set_attr "mode" "<X:MODE>")])
196
197 ;; '(a >= 0) ? b : 0' is emitted branchless (from if-conversion). Without a
198 ;; bit of extra help for combine (i.e., the below split), we end up emitting
199 ;; not/srai/and instead of combining the not into an andn.
200 (define_split
201 [(set (match_operand:DI 0 "register_operand")
202 (and:DI (neg:DI (ge:DI (match_operand:DI 1 "register_operand")
203 (const_int 0)))
204 (match_operand:DI 2 "register_operand")))
205 (clobber (match_operand:DI 3 "register_operand"))]
206 "TARGET_ZBB"
207 [(set (match_dup 3) (ashiftrt:DI (match_dup 1) (const_int 63)))
208 (set (match_dup 0) (and:DI (not:DI (match_dup 3)) (match_dup 2)))])
209
210 (define_insn "*xor_not<mode>"
211 [(set (match_operand:X 0 "register_operand" "=r")
212 (not:X (xor:X (match_operand:X 1 "register_operand" "r")
213 (match_operand:X 2 "register_operand" "r"))))]
214 "TARGET_ZBB"
215 "xnor\t%0,%1,%2"
216 [(set_attr "type" "bitmanip")
217 (set_attr "mode" "<X:MODE>")])
218
219 (define_insn "<bitmanip_optab>si2"
220 [(set (match_operand:SI 0 "register_operand" "=r")
221 (clz_ctz_pcnt:SI (match_operand:SI 1 "register_operand" "r")))]
222 "TARGET_ZBB"
223 "<bitmanip_insn>%~\t%0,%1"
224 [(set_attr "type" "bitmanip")
225 (set_attr "mode" "SI")])
226
227 (define_insn "*<bitmanip_optab>disi2"
228 [(set (match_operand:DI 0 "register_operand" "=r")
229 (sign_extend:DI
230 (clz_ctz_pcnt:SI (match_operand:SI 1 "register_operand" "r"))))]
231 "TARGET_64BIT && TARGET_ZBB"
232 "<bitmanip_insn>w\t%0,%1"
233 [(set_attr "type" "bitmanip")
234 (set_attr "mode" "SI")])
235
236 (define_insn "<bitmanip_optab>di2"
237 [(set (match_operand:DI 0 "register_operand" "=r")
238 (clz_ctz_pcnt:DI (match_operand:DI 1 "register_operand" "r")))]
239 "TARGET_64BIT && TARGET_ZBB"
240 "<bitmanip_insn>\t%0,%1"
241 [(set_attr "type" "bitmanip")
242 (set_attr "mode" "DI")])
243
244 (define_insn "*zero_extendhi<GPR:mode>2_bitmanip"
245 [(set (match_operand:GPR 0 "register_operand" "=r,r")
246 (zero_extend:GPR (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
247 "TARGET_ZBB"
248 "@
249 zext.h\t%0,%1
250 lhu\t%0,%1"
251 [(set_attr "type" "bitmanip,load")
252 (set_attr "mode" "<GPR:MODE>")])
253
254 (define_insn "*extend<SHORT:mode><SUPERQI:mode>2_zbb"
255 [(set (match_operand:SUPERQI 0 "register_operand" "=r,r")
256 (sign_extend:SUPERQI
257 (match_operand:SHORT 1 "nonimmediate_operand" " r,m")))]
258 "TARGET_ZBB"
259 "@
260 sext.<SHORT:size>\t%0,%1
261 l<SHORT:size>\t%0,%1"
262 [(set_attr "type" "bitmanip,load")
263 (set_attr "mode" "<SUPERQI:MODE>")])
264
265 (define_insn "*zero_extendhi<GPR:mode>2_zbb"
266 [(set (match_operand:GPR 0 "register_operand" "=r,r")
267 (zero_extend:GPR
268 (match_operand:HI 1 "nonimmediate_operand" " r,m")))]
269 "TARGET_ZBB"
270 "@
271 zext.h\t%0,%1
272 lhu\t%0,%1"
273 [(set_attr "type" "bitmanip,load")
274 (set_attr "mode" "HI")])
275
276 (define_insn "rotrsi3"
277 [(set (match_operand:SI 0 "register_operand" "=r")
278 (rotatert:SI (match_operand:SI 1 "register_operand" "r")
279 (match_operand:QI 2 "arith_operand" "rI")))]
280 "TARGET_ZBB"
281 "ror%i2%~\t%0,%1,%2"
282 [(set_attr "type" "bitmanip")])
283
284 (define_insn "rotrdi3"
285 [(set (match_operand:DI 0 "register_operand" "=r")
286 (rotatert:DI (match_operand:DI 1 "register_operand" "r")
287 (match_operand:QI 2 "arith_operand" "rI")))]
288 "TARGET_64BIT && TARGET_ZBB"
289 "ror%i2\t%0,%1,%2"
290 [(set_attr "type" "bitmanip")])
291
292 (define_insn "rotrsi3_sext"
293 [(set (match_operand:DI 0 "register_operand" "=r")
294 (sign_extend:DI (rotatert:SI (match_operand:SI 1 "register_operand" "r")
295 (match_operand:QI 2 "register_operand" "r"))))]
296 "TARGET_64BIT && TARGET_ZBB"
297 "rorw\t%0,%1,%2"
298 [(set_attr "type" "bitmanip")])
299
300 (define_insn "rotlsi3"
301 [(set (match_operand:SI 0 "register_operand" "=r")
302 (rotate:SI (match_operand:SI 1 "register_operand" "r")
303 (match_operand:QI 2 "register_operand" "r")))]
304 "TARGET_ZBB"
305 "rol%~\t%0,%1,%2"
306 [(set_attr "type" "bitmanip")])
307
308 (define_insn "rotldi3"
309 [(set (match_operand:DI 0 "register_operand" "=r")
310 (rotate:DI (match_operand:DI 1 "register_operand" "r")
311 (match_operand:QI 2 "register_operand" "r")))]
312 "TARGET_64BIT && TARGET_ZBB"
313 "rol\t%0,%1,%2"
314 [(set_attr "type" "bitmanip")])
315
316 (define_insn "rotlsi3_sext"
317 [(set (match_operand:DI 0 "register_operand" "=r")
318 (sign_extend:DI (rotate:SI (match_operand:SI 1 "register_operand" "r")
319 (match_operand:QI 2 "register_operand" "r"))))]
320 "TARGET_64BIT && TARGET_ZBB"
321 "rolw\t%0,%1,%2"
322 [(set_attr "type" "bitmanip")])
323
324 ;; orc.b (or-combine) is added as an unspec for the benefit of the support
325 ;; for optimized string functions (such as strcmp).
326 (define_insn "orcb<mode>2"
327 [(set (match_operand:X 0 "register_operand" "=r")
328 (unspec:X [(match_operand:X 1 "register_operand" "r")] UNSPEC_ORC_B))]
329 "TARGET_ZBB"
330 "orc.b\t%0,%1")
331
332 (define_insn "bswap<mode>2"
333 [(set (match_operand:X 0 "register_operand" "=r")
334 (bswap:X (match_operand:X 1 "register_operand" "r")))]
335 "TARGET_ZBB"
336 "rev8\t%0,%1"
337 [(set_attr "type" "bitmanip")])
338
339 ;; HI bswap can be emulated using SI/DI bswap followed
340 ;; by a logical shift right
341 ;; SI bswap for TARGET_64BIT is already similarly in
342 ;; the common code.
343 (define_expand "bswaphi2"
344 [(set (match_operand:HI 0 "register_operand" "=r")
345 (bswap:HI (match_operand:HI 1 "register_operand" "r")))]
346 "TARGET_ZBB"
347 {
348 rtx tmp = gen_reg_rtx (word_mode);
349 rtx newop1 = gen_lowpart (word_mode, operands[1]);
350 if (TARGET_64BIT)
351 emit_insn (gen_bswapdi2 (tmp, newop1));
352 else
353 emit_insn (gen_bswapsi2 (tmp, newop1));
354 rtx tmp1 = gen_reg_rtx (word_mode);
355 if (TARGET_64BIT)
356 emit_insn (gen_lshrdi3 (tmp1, tmp, GEN_INT (64 - 16)));
357 else
358 emit_insn (gen_lshrsi3 (tmp1, tmp, GEN_INT (32 - 16)));
359 emit_move_insn (operands[0], gen_lowpart (HImode, tmp1));
360 DONE;
361 })
362
363 (define_insn "<bitmanip_optab><mode>3"
364 [(set (match_operand:X 0 "register_operand" "=r")
365 (bitmanip_minmax:X (match_operand:X 1 "register_operand" "r")
366 (match_operand:X 2 "register_operand" "r")))]
367 "TARGET_ZBB"
368 "<bitmanip_insn>\t%0,%1,%2"
369 [(set_attr "type" "bitmanip")])
370
371 ;; Optimize the common case of a SImode min/max against a constant
372 ;; that is safe both for sign- and zero-extension.
373 (define_insn_and_split "*minmax"
374 [(set (match_operand:DI 0 "register_operand" "=r")
375 (sign_extend:DI
376 (subreg:SI
377 (bitmanip_minmax:DI (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
378 (match_operand:DI 2 "immediate_operand" "i"))
379 0)))
380 (clobber (match_scratch:DI 3 "=&r"))
381 (clobber (match_scratch:DI 4 "=&r"))]
382 "TARGET_64BIT && TARGET_ZBB && sext_hwi (INTVAL (operands[2]), 32) >= 0"
383 "#"
384 "&& reload_completed"
385 [(set (match_dup 3) (sign_extend:DI (match_dup 1)))
386 (set (match_dup 4) (match_dup 2))
387 (set (match_dup 0) (<minmax_optab>:DI (match_dup 3) (match_dup 4)))])
388
389 ;; ZBS extension.
390
391 (define_insn "*bset<mode>"
392 [(set (match_operand:X 0 "register_operand" "=r")
393 (ior:X (ashift:X (const_int 1)
394 (match_operand:QI 2 "register_operand" "r"))
395 (match_operand:X 1 "register_operand" "r")))]
396 "TARGET_ZBS"
397 "bset\t%0,%1,%2"
398 [(set_attr "type" "bitmanip")])
399
400 (define_insn "*bset<mode>_mask"
401 [(set (match_operand:X 0 "register_operand" "=r")
402 (ior:X (ashift:X (const_int 1)
403 (subreg:QI
404 (and:X (match_operand:X 2 "register_operand" "r")
405 (match_operand 3 "<X:shiftm1>" "<X:shiftm1p>")) 0))
406 (match_operand:X 1 "register_operand" "r")))]
407 "TARGET_ZBS"
408 "bset\t%0,%1,%2"
409 [(set_attr "type" "bitmanip")])
410
411 (define_insn "*bset<mode>_1"
412 [(set (match_operand:X 0 "register_operand" "=r")
413 (ashift:X (const_int 1)
414 (match_operand:QI 1 "register_operand" "r")))]
415 "TARGET_ZBS"
416 "bset\t%0,x0,%1"
417 [(set_attr "type" "bitmanip")])
418
419 (define_insn "*bset<mode>_1_mask"
420 [(set (match_operand:X 0 "register_operand" "=r")
421 (ashift:X (const_int 1)
422 (subreg:QI
423 (and:X (match_operand:X 1 "register_operand" "r")
424 (match_operand 2 "<X:shiftm1>" "<X:shiftm1p>")) 0)))]
425 "TARGET_ZBS"
426 "bset\t%0,x0,%1"
427 [(set_attr "type" "bitmanip")])
428
429 (define_insn "*bseti<mode>"
430 [(set (match_operand:X 0 "register_operand" "=r")
431 (ior:X (match_operand:X 1 "register_operand" "r")
432 (match_operand:X 2 "single_bit_mask_operand" "DbS")))]
433 "TARGET_ZBS"
434 "bseti\t%0,%1,%S2"
435 [(set_attr "type" "bitmanip")])
436
437 ;; As long as the SImode operand is not a partial subreg, we can use a
438 ;; bseti without postprocessing, as the middle end is smart enough to
439 ;; stay away from the signbit.
440 (define_insn "*bsetidisi"
441 [(set (match_operand:DI 0 "register_operand" "=r")
442 (ior:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
443 (match_operand 2 "single_bit_mask_operand" "i")))]
444 "TARGET_ZBS && TARGET_64BIT
445 && !partial_subreg_p (operands[1])"
446 "bseti\t%0,%1,%S2"
447 [(set_attr "type" "bitmanip")])
448
449 (define_insn "*bclr<mode>"
450 [(set (match_operand:X 0 "register_operand" "=r")
451 (and:X (rotate:X (const_int -2)
452 (match_operand:QI 2 "register_operand" "r"))
453 (match_operand:X 1 "register_operand" "r")))]
454 "TARGET_ZBS"
455 "bclr\t%0,%1,%2"
456 [(set_attr "type" "bitmanip")])
457
458 (define_insn "*bclri<mode>"
459 [(set (match_operand:X 0 "register_operand" "=r")
460 (and:X (match_operand:X 1 "register_operand" "r")
461 (match_operand:X 2 "not_single_bit_mask_operand" "DnS")))]
462 "TARGET_ZBS"
463 "bclri\t%0,%1,%T2"
464 [(set_attr "type" "bitmanip")])
465
466 ;; In case we have "val & ~IMM" where ~IMM has 2 bits set.
467 (define_insn_and_split "*bclri<mode>_nottwobits"
468 [(set (match_operand:X 0 "register_operand" "=r")
469 (and:X (match_operand:X 1 "register_operand" "r")
470 (match_operand:X 2 "const_nottwobits_operand" "i")))]
471 "TARGET_ZBS && !paradoxical_subreg_p (operands[1])"
472 "#"
473 "&& reload_completed"
474 [(set (match_dup 0) (and:X (match_dup 1) (match_dup 3)))
475 (set (match_dup 0) (and:X (match_dup 0) (match_dup 4)))]
476 {
477 unsigned HOST_WIDE_INT bits = ~UINTVAL (operands[2]);
478 unsigned HOST_WIDE_INT topbit = HOST_WIDE_INT_1U << floor_log2 (bits);
479
480 operands[3] = GEN_INT (~bits | topbit);
481 operands[4] = GEN_INT (~topbit);
482 })
483
484 ;; In case of a paradoxical subreg, the sign bit and the high bits are
485 ;; not allowed to be changed
486 (define_insn_and_split "*bclridisi_nottwobits"
487 [(set (match_operand:DI 0 "register_operand" "=r")
488 (and:DI (match_operand:DI 1 "register_operand" "r")
489 (match_operand:DI 2 "const_nottwobits_operand" "i")))]
490 "TARGET_64BIT && TARGET_ZBS
491 && clz_hwi (~UINTVAL (operands[2])) > 33"
492 "#"
493 "&& reload_completed"
494 [(set (match_dup 0) (and:DI (match_dup 1) (match_dup 3)))
495 (set (match_dup 0) (and:DI (match_dup 0) (match_dup 4)))]
496 {
497 unsigned HOST_WIDE_INT bits = ~UINTVAL (operands[2]);
498 unsigned HOST_WIDE_INT topbit = HOST_WIDE_INT_1U << floor_log2 (bits);
499
500 operands[3] = GEN_INT (~bits | topbit);
501 operands[4] = GEN_INT (~topbit);
502 })
503
504 (define_insn "*binv<mode>"
505 [(set (match_operand:X 0 "register_operand" "=r")
506 (xor:X (ashift:X (const_int 1)
507 (match_operand:QI 2 "register_operand" "r"))
508 (match_operand:X 1 "register_operand" "r")))]
509 "TARGET_ZBS"
510 "binv\t%0,%1,%2"
511 [(set_attr "type" "bitmanip")])
512
513 (define_insn "*binvi<mode>"
514 [(set (match_operand:X 0 "register_operand" "=r")
515 (xor:X (match_operand:X 1 "register_operand" "r")
516 (match_operand:X 2 "single_bit_mask_operand" "DbS")))]
517 "TARGET_ZBS"
518 "binvi\t%0,%1,%S2"
519 [(set_attr "type" "bitmanip")])
520
521 (define_insn "*bext<mode>"
522 [(set (match_operand:X 0 "register_operand" "=r")
523 (zero_extract:X (match_operand:X 1 "register_operand" "r")
524 (const_int 1)
525 (zero_extend:X
526 (match_operand:QI 2 "register_operand" "r"))))]
527 "TARGET_ZBS"
528 "bext\t%0,%1,%2"
529 [(set_attr "type" "bitmanip")])
530
531 ;; When performing `(a & (1UL << bitno)) ? 0 : -1` the combiner
532 ;; usually has the `bitno` typed as X-mode (i.e. no further
533 ;; zero-extension is performed around the bitno).
534 (define_insn "*bext<mode>"
535 [(set (match_operand:X 0 "register_operand" "=r")
536 (zero_extract:X (match_operand:X 1 "register_operand" "r")
537 (const_int 1)
538 (match_operand:X 2 "register_operand" "r")))]
539 "TARGET_ZBS"
540 "bext\t%0,%1,%2"
541 [(set_attr "type" "bitmanip")])
542
543 (define_insn "*bexti"
544 [(set (match_operand:X 0 "register_operand" "=r")
545 (zero_extract:X (match_operand:X 1 "register_operand" "r")
546 (const_int 1)
547 (match_operand 2 "immediate_operand" "n")))]
548 "TARGET_ZBS && UINTVAL (operands[2]) < GET_MODE_BITSIZE (<MODE>mode)"
549 "bexti\t%0,%1,%2"
550 [(set_attr "type" "bitmanip")])
551
552 ;; Split for "(a & (1 << BIT_NO)) ? 0 : 1":
553 ;; We avoid reassociating "(~(a >> BIT_NO)) & 1" into "((~a) >> BIT_NO) & 1",
554 ;; so we don't have to use a temporary. Instead we extract the bit and then
555 ;; invert bit 0 ("a ^ 1") only.
556 (define_split
557 [(set (match_operand:X 0 "register_operand")
558 (and:X (not:X (lshiftrt:X (match_operand:X 1 "register_operand")
559 (subreg:QI (match_operand:X 2 "register_operand") 0)))
560 (const_int 1)))]
561 "TARGET_ZBS"
562 [(set (match_dup 0) (zero_extract:X (match_dup 1)
563 (const_int 1)
564 (match_dup 2)))
565 (set (match_dup 0) (xor:X (match_dup 0) (const_int 1)))])
566
567 ;; We can create a polarity-reversed mask (i.e. bit N -> { set = 0, clear = -1 })
568 ;; using a bext(i) followed by an addi instruction.
569 ;; This splits the canonical representation of "(a & (1 << BIT_NO)) ? 0 : -1".
570 (define_split
571 [(set (match_operand:GPR 0 "register_operand")
572 (neg:GPR (eq:GPR (zero_extract:GPR (match_operand:GPR 1 "register_operand")
573 (const_int 1)
574 (match_operand 2))
575 (const_int 0))))]
576 "TARGET_ZBS"
577 [(set (match_dup 0) (zero_extract:GPR (match_dup 1) (const_int 1) (match_dup 2)))
578 (set (match_dup 0) (plus:GPR (match_dup 0) (const_int -1)))])
579
580 ;; Catch those cases where we can use a bseti/binvi + ori/xori or
581 ;; bseti/binvi + bseti/binvi instead of a lui + addi + or/xor sequence.
582 (define_insn_and_split "*<or_optab>i<mode>_extrabit"
583 [(set (match_operand:X 0 "register_operand" "=r")
584 (any_or:X (match_operand:X 1 "register_operand" "r")
585 (match_operand:X 2 "uimm_extra_bit_or_twobits" "i")))]
586 "TARGET_ZBS"
587 "#"
588 "&& reload_completed"
589 [(set (match_dup 0) (<or_optab>:X (match_dup 1) (match_dup 3)))
590 (set (match_dup 0) (<or_optab>:X (match_dup 0) (match_dup 4)))]
591 {
592 unsigned HOST_WIDE_INT bits = UINTVAL (operands[2]);
593 unsigned HOST_WIDE_INT topbit = HOST_WIDE_INT_1U << floor_log2 (bits);
594
595 operands[3] = GEN_INT (bits &~ topbit);
596 operands[4] = GEN_INT (topbit);
597 })
598
599 ;; Same to use blcri + andi and blcri + bclri
600 (define_insn_and_split "*andi<mode>_extrabit"
601 [(set (match_operand:X 0 "register_operand" "=r")
602 (and:X (match_operand:X 1 "register_operand" "r")
603 (match_operand:X 2 "not_uimm_extra_bit_or_nottwobits" "i")))]
604 "TARGET_ZBS"
605 "#"
606 "&& reload_completed"
607 [(set (match_dup 0) (and:X (match_dup 1) (match_dup 3)))
608 (set (match_dup 0) (and:X (match_dup 0) (match_dup 4)))]
609 {
610 unsigned HOST_WIDE_INT bits = UINTVAL (operands[2]);
611 unsigned HOST_WIDE_INT topbit = HOST_WIDE_INT_1U << floor_log2 (~bits);
612
613 operands[3] = GEN_INT (bits | topbit);
614 operands[4] = GEN_INT (~topbit);
615 })
616
617 ;; IF_THEN_ELSE: test for 2 bits of opposite polarity
618 (define_insn_and_split "*branch<X:mode>_mask_twobits_equals_singlebit"
619 [(set (pc)
620 (if_then_else
621 (match_operator 1 "equality_operator"
622 [(and:X (match_operand:X 2 "register_operand" "r")
623 (match_operand:X 3 "const_twobits_not_arith_operand" "i"))
624 (match_operand:X 4 "single_bit_mask_operand" "i")])
625 (label_ref (match_operand 0 "" ""))
626 (pc)))
627 (clobber (match_scratch:X 5 "=&r"))
628 (clobber (match_scratch:X 6 "=&r"))]
629 "TARGET_ZBS && TARGET_ZBB"
630 "#"
631 "&& reload_completed"
632 [(set (match_dup 5) (zero_extract:X (match_dup 2)
633 (const_int 1)
634 (match_dup 8)))
635 (set (match_dup 6) (zero_extract:X (match_dup 2)
636 (const_int 1)
637 (match_dup 9)))
638 (set (match_dup 6) (and:X (not:X (match_dup 6)) (match_dup 5)))
639 (set (pc) (if_then_else (match_op_dup 1 [(match_dup 6) (const_int 0)])
640 (label_ref (match_dup 0))
641 (pc)))]
642 {
643 unsigned HOST_WIDE_INT twobits_mask = UINTVAL (operands[3]);
644 unsigned HOST_WIDE_INT singlebit_mask = UINTVAL (operands[4]);
645
646 /* We should never see an unsatisfiable condition. */
647 gcc_assert (twobits_mask & singlebit_mask);
648
649 int setbit = ctz_hwi (singlebit_mask);
650 int clearbit = ctz_hwi (twobits_mask & ~singlebit_mask);
651
652 operands[1] = gen_rtx_fmt_ee (GET_CODE (operands[1]) == NE ? EQ : NE,
653 <X:MODE>mode, operands[6], GEN_INT(0));
654
655 operands[8] = GEN_INT (setbit);
656 operands[9] = GEN_INT (clearbit);
657 })