]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/riscv/thead.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / config / riscv / thead.cc
CommitLineData
02fcaf41 1/* Subroutines used for code generation for RISC-V.
a945c346 2 Copyright (C) 2023-2024 Free Software Foundation, Inc.
02fcaf41
CM
3 Contributed by Christoph Müllner (christoph.muellner@vrull.eu).
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#define IN_TARGET_CODE 1
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "target.h"
27#include "backend.h"
2d65622f 28#include "tree.h"
02fcaf41 29#include "rtl.h"
2d65622f 30#include "explow.h"
02fcaf41
CM
31#include "memmodel.h"
32#include "emit-rtl.h"
2d65622f 33#include "optabs.h"
02fcaf41
CM
34#include "poly-int.h"
35#include "output.h"
2d65622f
CM
36#include "regs.h"
37#include "riscv-protos.h"
02fcaf41 38
02fcaf41
CM
39/* If X is a PLUS of a CONST_INT, return the two terms in *BASE_PTR
40 and *OFFSET_PTR. Return X in *BASE_PTR and 0 in *OFFSET_PTR otherwise. */
41
42static void
43split_plus (rtx x, rtx *base_ptr, HOST_WIDE_INT *offset_ptr)
44{
45 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
46 {
47 *base_ptr = XEXP (x, 0);
48 *offset_ptr = INTVAL (XEXP (x, 1));
49 }
50 else
51 {
52 *base_ptr = x;
53 *offset_ptr = 0;
54 }
55}
56
57/* Output a mempair instruction with the provided OPERANDS.
58 LOAD_P is true if a we have a pair of loads (stores otherwise).
59 MODE is the access mode (DI or SI).
60 CODE is the extension code (UNKNOWN, SIGN_EXTEND or ZERO_EXTEND).
61 This instruction does not handle invalid inputs gracefully,
62 but is full of assertions to ensure that only valid instructions
63 are emitted. */
64
65const char *
66th_mempair_output_move (rtx operands[4], bool load_p,
67 machine_mode mode, RTX_CODE code)
68{
69 rtx reg1, reg2, mem1, mem2, base1, base2;
70 HOST_WIDE_INT offset1, offset2;
71 rtx output_operands[5];
72 const char* format;
73
74 gcc_assert (mode == SImode || mode == DImode);
75
76 /* Paired 64-bit access instructions have a fixed shift amount of 4.
77 Paired 32-bit access instructions have a fixed shift amount of 3. */
78 unsigned shamt = (mode == DImode) ? 4 : 3;
79
80 if (load_p)
81 {
82 reg1 = copy_rtx (operands[0]);
83 reg2 = copy_rtx (operands[2]);
84 mem1 = copy_rtx (operands[1]);
85 mem2 = copy_rtx (operands[3]);
86
87 if (mode == SImode)
88 if (code == ZERO_EXTEND)
89 format = "th.lwud\t%0, %1, (%2), %3, %4";
90 else //SIGN_EXTEND or UNKNOWN
91 format = "th.lwd\t%0, %1, (%2), %3, %4";
92 else
93 format = "th.ldd\t%0, %1, (%2), %3, %4";
94 }
95 else
96 {
97 reg1 = copy_rtx (operands[1]);
98 reg2 = copy_rtx (operands[3]);
99 mem1 = copy_rtx (operands[0]);
100 mem2 = copy_rtx (operands[2]);
101
102 if (mode == SImode)
103 format = "th.swd\t%z0, %z1, (%2), %3, %4";
104 else
105 format = "th.sdd\t%z0, %z1, (%2), %3, %4";
106 }
107
108 split_plus (XEXP (mem1, 0), &base1, &offset1);
109 split_plus (XEXP (mem2, 0), &base2, &offset2);
110 gcc_assert (rtx_equal_p (base1, base2));
111 auto size1 = MEM_SIZE (mem1);
112 auto size2 = MEM_SIZE (mem2);
113 gcc_assert (known_eq (size1, size2));
114 gcc_assert (known_eq (offset1 + size1, offset2));
115
116 HOST_WIDE_INT imm2 = offset1 >> shamt;
117
118 /* Make sure all mempair instruction constraints are met. */
119 gcc_assert (imm2 >= 0 && imm2 < 4);
120 gcc_assert ((imm2 << shamt) == offset1);
121 gcc_assert (REG_P (reg1));
122 gcc_assert (REG_P (reg2));
123 gcc_assert (REG_P (base1));
124 if (load_p)
125 {
126 gcc_assert (REGNO (reg1) != REGNO (reg2));
127 gcc_assert (REGNO (reg1) != REGNO (base1));
128 gcc_assert (REGNO (reg2) != REGNO (base1));
129 }
130
131 /* Output the mempair instruction. */
132 output_operands[0] = copy_rtx (reg1);
133 output_operands[1] = copy_rtx (reg2);
134 output_operands[2] = copy_rtx (base1);
135 output_operands[3] = gen_rtx_CONST_INT (mode, imm2);
136 output_operands[4] = gen_rtx_CONST_INT (mode, shamt);
137 output_asm_insn (format, output_operands);
138
139 return "";
140}
141
142/* Analyse if a pair of loads/stores MEM1 and MEM2 with given MODE
143 are consecutive so they can be merged into a mempair instruction.
144 RESERVED will be set to true, if a reversal of the accesses is
145 required (false otherwise). Returns true if the accesses can be
146 merged (even if reversing is necessary) and false if not. */
147
148static bool
149th_mempair_check_consecutive_mems (machine_mode mode, rtx *mem1, rtx *mem2,
150 bool *reversed)
151{
152 rtx base1, base2, offset1, offset2;
153 extract_base_offset_in_addr (*mem1, &base1, &offset1);
154 extract_base_offset_in_addr (*mem2, &base2, &offset2);
155
156 /* Make sure both mems are in base+offset form. */
157 if (!base1 || !base2)
158 return false;
159
160 /* If both mems use the same base register, just check the offsets. */
161 if (rtx_equal_p (base1, base2))
162 {
163 auto size = GET_MODE_SIZE (mode);
164
165 if (known_eq (UINTVAL (offset1) + size, UINTVAL (offset2)))
166 {
167 *reversed = false;
168 return true;
169 }
170
171 if (known_eq (UINTVAL (offset2) + size, UINTVAL (offset1)))
172 {
173 *reversed = true;
174 return true;
175 }
176
177 return false;
178 }
179
180 return false;
181}
182
183/* Check if the given MEM can be used to define the address of a mempair
184 instruction. */
185
186static bool
187th_mempair_operand_p (rtx mem, machine_mode mode)
188{
189 if (!MEM_SIZE_KNOWN_P (mem))
190 return false;
191
192 /* Only DI or SI mempair instructions exist. */
193 gcc_assert (mode == SImode || mode == DImode);
194 auto mem_sz = MEM_SIZE (mem);
195 auto mode_sz = GET_MODE_SIZE (mode);
196 if (!known_eq (mem_sz, mode_sz))
197 return false;
198
199 /* Paired 64-bit access instructions have a fixed shift amount of 4.
200 Paired 32-bit access instructions have a fixed shift amount of 3. */
201 machine_mode mem_mode = GET_MODE (mem);
202 unsigned shamt = (mem_mode == DImode) ? 4 : 3;
203
204 rtx base;
205 HOST_WIDE_INT offset;
206 split_plus (XEXP (mem, 0), &base, &offset);
207 HOST_WIDE_INT imm2 = offset >> shamt;
208
209 if (imm2 < 0 || imm2 >= 4)
210 return false;
211
212 if ((imm2 << shamt) != offset)
213 return false;
214
215 return true;
216}
217
218static bool
219th_mempair_load_overlap_p (rtx reg1, rtx reg2, rtx mem)
220{
221 if (REGNO (reg1) == REGNO (reg2))
222 return true;
223
224 if (reg_overlap_mentioned_p (reg1, mem))
225 return true;
226
227 rtx base;
228 HOST_WIDE_INT offset;
229 split_plus (XEXP (mem, 0), &base, &offset);
230
231 if (!REG_P (base))
232 return true;
233
234 if (REG_P (base))
235 {
236 if (REGNO (base) == REGNO (reg1)
237 || REGNO (base) == REGNO (reg2))
238 return true;
239 }
240
241 return false;
242}
243
244/* Given OPERANDS of consecutive load/store, check if we can merge
245 them into load-pair or store-pair instructions.
246 LOAD is true if they are load instructions.
247 MODE is the mode of memory operation. */
248
249bool
250th_mempair_operands_p (rtx operands[4], bool load_p,
251 machine_mode mode)
252{
253 rtx mem_1, mem_2, reg_1, reg_2;
254
255 if (load_p)
256 {
257 reg_1 = operands[0];
258 mem_1 = operands[1];
259 reg_2 = operands[2];
260 mem_2 = operands[3];
261 if (!REG_P (reg_1) || !REG_P (reg_2))
262 return false;
263 if (th_mempair_load_overlap_p (reg_1, reg_2, mem_1))
264 return false;
265 if (th_mempair_load_overlap_p (reg_1, reg_2, mem_2))
266 return false;
267 }
268 else
269 {
270 mem_1 = operands[0];
271 reg_1 = operands[1];
272 mem_2 = operands[2];
273 reg_2 = operands[3];
274 }
275
276 /* Check if the registers are GP registers. */
277 if (!REG_P (reg_1) || !GP_REG_P (REGNO (reg_1))
278 || !REG_P (reg_2) || !GP_REG_P (REGNO (reg_2)))
279 return false;
280
281 /* The mems cannot be volatile. */
282 if (!MEM_P (mem_1) || !MEM_P (mem_2))
283 return false;
284 if (MEM_VOLATILE_P (mem_1) || MEM_VOLATILE_P (mem_2))
285 return false;
286
287 /* If we have slow unaligned access, we only accept aligned memory. */
288 if (riscv_slow_unaligned_access_p
289 && known_lt (MEM_ALIGN (mem_1), GET_MODE_SIZE (mode) * BITS_PER_UNIT))
290 return false;
291
292 /* Check if the addresses are in the form of [base+offset]. */
293 bool reversed = false;
294 if (!th_mempair_check_consecutive_mems (mode, &mem_1, &mem_2, &reversed))
295 return false;
296
297 /* The first memory accesses must be a mempair operand. */
298 if ((!reversed && !th_mempair_operand_p (mem_1, mode))
299 || (reversed && !th_mempair_operand_p (mem_2, mode)))
300 return false;
301
302 /* The operands must be of the same size. */
303 gcc_assert (known_eq (GET_MODE_SIZE (GET_MODE (mem_1)),
304 GET_MODE_SIZE (GET_MODE (mem_2))));
305
306 return true;
307}
308
309/* Given OPERANDS of consecutive load/store that can be merged,
e15a82a1
CM
310 swap them if they are not in ascending order. */
311
02fcaf41
CM
312void
313th_mempair_order_operands (rtx operands[4], bool load_p, machine_mode mode)
314{
315 int mem_op = load_p ? 1 : 0;
316 bool reversed = false;
317 if (!th_mempair_check_consecutive_mems (mode,
318 operands + mem_op,
319 operands + mem_op + 2,
320 &reversed))
321 gcc_unreachable ();
322
323 if (reversed)
324 {
325 /* Irrespective of whether this is a load or a store,
326 we do the same swap. */
327 std::swap (operands[0], operands[2]);
328 std::swap (operands[1], operands[3]);
329 }
330}
331
332/* Similar like riscv_save_reg, but saves two registers to memory
333 and marks the resulting instruction as frame-related. */
334
335static void
336th_mempair_save_regs (rtx operands[4])
337{
338 rtx set1 = gen_rtx_SET (operands[0], operands[1]);
339 rtx set2 = gen_rtx_SET (operands[2], operands[3]);
578aa2f8 340 rtx dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
02fcaf41
CM
341 rtx insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set1, set2)));
342 RTX_FRAME_RELATED_P (insn) = 1;
93973e4c 343
578aa2f8
XQ
344 XVECEXP (dwarf, 0, 0) = copy_rtx (set1);
345 XVECEXP (dwarf, 0, 1) = copy_rtx (set2);
346 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
347 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
348 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
02fcaf41
CM
349}
350
351/* Similar like riscv_restore_reg, but restores two registers from memory
352 and marks the instruction frame-related. */
353
354static void
355th_mempair_restore_regs (rtx operands[4])
356{
357 rtx set1 = gen_rtx_SET (operands[0], operands[1]);
358 rtx set2 = gen_rtx_SET (operands[2], operands[3]);
359 rtx insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set1, set2)));
360 RTX_FRAME_RELATED_P (insn) = 1;
361 add_reg_note (insn, REG_CFA_RESTORE, operands[0]);
362 add_reg_note (insn, REG_CFA_RESTORE, operands[2]);
363}
364
365/* Prepare the OPERANDS array to emit a mempair instruction using the
366 provided information. No checks are performed, the resulting array
367 should be validated using th_mempair_operands_p(). */
368
369void
370th_mempair_prepare_save_restore_operands (rtx operands[4],
371 bool load_p, machine_mode mode,
372 int regno, HOST_WIDE_INT offset,
373 int regno2, HOST_WIDE_INT offset2)
374{
375 int reg_op = load_p ? 0 : 1;
376 int mem_op = load_p ? 1 : 0;
377
378 rtx mem1 = plus_constant (mode, stack_pointer_rtx, offset);
379 mem1 = gen_frame_mem (mode, mem1);
380 rtx mem2 = plus_constant (mode, stack_pointer_rtx, offset2);
381 mem2 = gen_frame_mem (mode, mem2);
382
383 operands[reg_op] = gen_rtx_REG (mode, regno);
384 operands[mem_op] = mem1;
385 operands[2 + reg_op] = gen_rtx_REG (mode, regno2);
386 operands[2 + mem_op] = mem2;
387}
388
389/* Emit a mempair instruction to save/restore two registers to/from stack. */
390
391void
392th_mempair_save_restore_regs (rtx operands[4], bool load_p,
393 machine_mode mode)
394{
395 gcc_assert (th_mempair_operands_p (operands, load_p, mode));
396
397 th_mempair_order_operands (operands, load_p, mode);
398
399 if (load_p)
400 th_mempair_restore_regs (operands);
401 else
402 th_mempair_save_regs (operands);
403}
2d65622f
CM
404
405/* Return true if X can be represented as signed immediate of NBITS bits.
406 The immediate is assumed to be shifted by LSHAMT bits left. */
407
408static bool
409valid_signed_immediate (rtx x, unsigned nbits, unsigned lshamt)
410{
411 if (GET_CODE (x) != CONST_INT)
412 return false;
413
414 HOST_WIDE_INT v = INTVAL (x);
415
416 HOST_WIDE_INT vunshifted = v >> lshamt;
417
418 /* Make sure we did not shift out any bits. */
419 if (vunshifted << lshamt != v)
420 return false;
421
422 unsigned HOST_WIDE_INT imm_reach = 1LL << nbits;
423 return ((unsigned HOST_WIDE_INT) vunshifted + imm_reach/2 < imm_reach);
424}
425
426/* Return the address RTX of a move to/from memory
427 instruction. */
428
429static rtx
430th_get_move_mem_addr (rtx dest, rtx src, bool load)
431{
432 rtx mem;
433
434 if (load)
435 mem = src;
436 else
437 mem = dest;
438
439 gcc_assert (GET_CODE (mem) == MEM);
440 return XEXP (mem, 0);
441}
442
443/* Return true if X is a valid address for T-Head's memory addressing modes
444 with pre/post modification for machine mode MODE.
445 If it is, fill in INFO appropriately (if non-NULL).
446 If STRICT_P is true then REG_OK_STRICT is in effect. */
447
448static bool
449th_memidx_classify_address_modify (struct riscv_address_info *info, rtx x,
450 machine_mode mode, bool strict_p)
451{
452 if (!TARGET_XTHEADMEMIDX)
453 return false;
454
455 if (!TARGET_64BIT && mode == DImode)
456 return false;
457
458 if (!(INTEGRAL_MODE_P (mode) && GET_MODE_SIZE (mode).to_constant () <= 8))
459 return false;
460
461 if (GET_CODE (x) != POST_MODIFY
462 && GET_CODE (x) != PRE_MODIFY)
463 return false;
464
465 rtx reg = XEXP (x, 0);
466 rtx exp = XEXP (x, 1);
467 rtx expreg = XEXP (exp, 0);
468 rtx expoff = XEXP (exp, 1);
469
470 if (GET_CODE (exp) != PLUS
471 || !rtx_equal_p (expreg, reg)
472 || !CONST_INT_P (expoff)
473 || !riscv_valid_base_register_p (reg, mode, strict_p))
474 return false;
475
476 /* The offset is calculated as (sign_extend(imm5) << imm2) */
477 const int shamt_bits = 2;
478 for (int shamt = 0; shamt < (1 << shamt_bits); shamt++)
479 {
480 const int nbits = 5;
481 if (valid_signed_immediate (expoff, nbits, shamt))
482 {
483 if (info)
484 {
485 info->type = ADDRESS_REG_WB;
486 info->reg = reg;
487 info->offset = expoff;
488 info->shift = shamt;
489 }
490 return true;
491 }
492 }
493
494 return false;
495}
496
497/* Return TRUE if X is a MEM with a legitimate modify address. */
498
499bool
500th_memidx_legitimate_modify_p (rtx x)
501{
502 if (!MEM_P (x))
503 return false;
504
505 /* Get the mode from the MEM and unpack it. */
506 machine_mode mode = GET_MODE (x);
507 x = XEXP (x, 0);
508
509 return th_memidx_classify_address_modify (NULL, x, mode, reload_completed);
510}
511
512/* Return TRUE if X is a MEM with a legitimate modify address
513 and the address is POST_MODIFY (if POST is true) or a PRE_MODIFY
514 (otherwise). */
515
516bool
517th_memidx_legitimate_modify_p (rtx x, bool post)
518{
519 if (!th_memidx_legitimate_modify_p (x))
520 return false;
521
522 /* Unpack the MEM and check the code. */
523 x = XEXP (x, 0);
524 if (post)
525 return GET_CODE (x) == POST_MODIFY;
526 else
527 return GET_CODE (x) == PRE_MODIFY;
528}
529
530/* Provide a buffer for a th.lXia/th.lXib/th.sXia/th.sXib instruction
531 for the given MODE. If LOAD is true, a load instruction will be
532 provided (otherwise, a store instruction). If X is not suitable
533 return NULL. */
534
535static const char *
536th_memidx_output_modify (rtx dest, rtx src, machine_mode mode, bool load)
537{
538 char format[24];
539 rtx output_operands[2];
540 rtx x = th_get_move_mem_addr (dest, src, load);
541
542 /* Validate x. */
543 if (!th_memidx_classify_address_modify (NULL, x, mode, reload_completed))
544 return NULL;
545
546 int index = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
547 bool post = GET_CODE (x) == POST_MODIFY;
548
549 const char *const insn[][4] = {
550 {
551 "th.sbi%s\t%%z1,%%0",
552 "th.shi%s\t%%z1,%%0",
553 "th.swi%s\t%%z1,%%0",
554 "th.sdi%s\t%%z1,%%0"
555 },
556 {
557 "th.lbui%s\t%%0,%%1",
558 "th.lhui%s\t%%0,%%1",
559 "th.lwi%s\t%%0,%%1",
560 "th.ldi%s\t%%0,%%1"
561 }
562 };
563
564 snprintf (format, sizeof (format), insn[load][index], post ? "a" : "b");
565 output_operands[0] = dest;
566 output_operands[1] = src;
567 output_asm_insn (format, output_operands);
568 return "";
569}
570
571static bool
572is_memidx_mode (machine_mode mode)
573{
574 if (mode == QImode || mode == HImode || mode == SImode)
575 return true;
576
577 if (mode == DImode && TARGET_64BIT)
578 return true;
579
580 return false;
581}
582
60d6c63d
CM
583static bool
584is_fmemidx_mode (machine_mode mode)
585{
586 if (mode == SFmode && TARGET_HARD_FLOAT)
587 return true;
588
589 if (mode == DFmode && TARGET_DOUBLE_FLOAT)
590 return true;
591
592 return false;
593}
594
2d65622f
CM
595/* Return true if X is a valid address for T-Head's memory addressing modes
596 with scaled register offsets for machine mode MODE.
597 If it is, fill in INFO appropriately (if non-NULL).
598 If STRICT_P is true then REG_OK_STRICT is in effect. */
599
600static bool
601th_memidx_classify_address_index (struct riscv_address_info *info, rtx x,
602 machine_mode mode, bool strict_p)
603{
604 /* Ensure that the mode is supported. */
60d6c63d 605 if (!(TARGET_XTHEADMEMIDX && is_memidx_mode (mode))
8c09c73a
CM
606 && !(TARGET_XTHEADMEMIDX
607 && TARGET_XTHEADFMEMIDX && is_fmemidx_mode (mode)))
2d65622f
CM
608 return false;
609
610 if (GET_CODE (x) != PLUS)
611 return false;
612
613 rtx reg = XEXP (x, 0);
614 enum riscv_address_type type;
615 rtx offset = XEXP (x, 1);
616 int shift;
617
618 if (!riscv_valid_base_register_p (reg, mode, strict_p))
619 return false;
620
621 /* (reg:X) */
622 if (REG_P (offset)
623 && GET_MODE (offset) == Xmode)
624 {
625 type = ADDRESS_REG_REG;
626 shift = 0;
627 offset = offset;
628 }
629 /* (zero_extend:DI (reg:SI)) */
630 else if (GET_CODE (offset) == ZERO_EXTEND
631 && GET_MODE (offset) == DImode
632 && GET_MODE (XEXP (offset, 0)) == SImode)
633 {
634 type = ADDRESS_REG_UREG;
635 shift = 0;
636 offset = XEXP (offset, 0);
637 }
638 /* (ashift:X (reg:X) (const_int shift)) */
639 else if (GET_CODE (offset) == ASHIFT
640 && GET_MODE (offset) == Xmode
641 && REG_P (XEXP (offset, 0))
642 && GET_MODE (XEXP (offset, 0)) == Xmode
643 && CONST_INT_P (XEXP (offset, 1))
644 && IN_RANGE (INTVAL (XEXP (offset, 1)), 0, 3))
645 {
646 type = ADDRESS_REG_REG;
647 shift = INTVAL (XEXP (offset, 1));
648 offset = XEXP (offset, 0);
649 }
650 /* (ashift:DI (zero_extend:DI (reg:SI)) (const_int shift)) */
651 else if (GET_CODE (offset) == ASHIFT
652 && GET_MODE (offset) == DImode
653 && GET_CODE (XEXP (offset, 0)) == ZERO_EXTEND
654 && GET_MODE (XEXP (offset, 0)) == DImode
655 && GET_MODE (XEXP (XEXP (offset, 0), 0)) == SImode
656 && CONST_INT_P (XEXP (offset, 1))
657 && IN_RANGE(INTVAL (XEXP (offset, 1)), 0, 3))
658 {
659 type = ADDRESS_REG_UREG;
660 shift = INTVAL (XEXP (offset, 1));
661 offset = XEXP (XEXP (offset, 0), 0);
662 }
663 else
664 return false;
665
666 if (!strict_p && GET_CODE (offset) == SUBREG)
667 offset = SUBREG_REG (offset);
668
669 if (!REG_P (offset)
670 || !riscv_regno_mode_ok_for_base_p (REGNO (offset), mode, strict_p))
671 return false;
672
673 if (info)
674 {
675 info->reg = reg;
676 info->type = type;
677 info->offset = offset;
678 info->shift = shift;
679 }
680 return true;
681}
682
683/* Return TRUE if X is a MEM with a legitimate indexed address. */
684
685bool
686th_memidx_legitimate_index_p (rtx x)
687{
688 if (!MEM_P (x))
689 return false;
690
691 /* Get the mode from the MEM and unpack it. */
692 machine_mode mode = GET_MODE (x);
693 x = XEXP (x, 0);
694
695 return th_memidx_classify_address_index (NULL, x, mode, reload_completed);
696}
697
698/* Return TRUE if X is a MEM with a legitimate indexed address
699 and the offset register is zero-extended (if UINDEX is true)
700 or sign-extended (otherwise). */
701
702bool
703th_memidx_legitimate_index_p (rtx x, bool uindex)
704{
705 if (!MEM_P (x))
706 return false;
707
708 /* Get the mode from the MEM and unpack it. */
709 machine_mode mode = GET_MODE (x);
710 x = XEXP (x, 0);
711
712 struct riscv_address_info info;
713 if (!th_memidx_classify_address_index (&info, x, mode, reload_completed))
714 return false;
715
716 if (uindex)
717 return info.type == ADDRESS_REG_UREG;
718 else
719 return info.type == ADDRESS_REG_REG;
720}
721
722/* Provide a buffer for a th.lrX/th.lurX/th.srX/th.surX instruction
723 for the given MODE. If LOAD is true, a load instruction will be
724 provided (otherwise, a store instruction). If X is not suitable
725 return NULL. */
726
727static const char *
728th_memidx_output_index (rtx dest, rtx src, machine_mode mode, bool load)
729{
730 struct riscv_address_info info;
731 char format[24];
732 rtx output_operands[2];
733 rtx x = th_get_move_mem_addr (dest, src, load);
734
735 /* Validate x. */
736 if (!th_memidx_classify_address_index (&info, x, mode, reload_completed))
737 return NULL;
738
739 int index = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
740 bool uindex = info.type == ADDRESS_REG_UREG;
741
742 const char *const insn[][4] = {
743 {
744 "th.s%srb\t%%z1,%%0",
745 "th.s%srh\t%%z1,%%0",
746 "th.s%srw\t%%z1,%%0",
747 "th.s%srd\t%%z1,%%0"
748 },
749 {
750 "th.l%srbu\t%%0,%%1",
751 "th.l%srhu\t%%0,%%1",
752 "th.l%srw\t%%0,%%1",
753 "th.l%srd\t%%0,%%1"
754 }
755 };
756
757 snprintf (format, sizeof (format), insn[load][index], uindex ? "u" : "");
758 output_operands[0] = dest;
759 output_operands[1] = src;
760 output_asm_insn (format, output_operands);
761 return "";
762}
763
60d6c63d
CM
764/* Provide a buffer for a th.flX/th.fluX/th.fsX/th.fsuX instruction
765 for the given MODE. If LOAD is true, a load instruction will be
766 provided (otherwise, a store instruction). If X is not suitable
767 return NULL. */
768
769static const char *
770th_fmemidx_output_index (rtx dest, rtx src, machine_mode mode, bool load)
771{
772 struct riscv_address_info info;
773 char format[24];
774 rtx output_operands[2];
775 rtx x = th_get_move_mem_addr (dest, src, load);
776
777 /* Validate x. */
778 if (!th_memidx_classify_address_index (&info, x, mode, false))
779 return NULL;
780
781 int index = exact_log2 (GET_MODE_SIZE (mode).to_constant ()) - 2;
782 bool uindex = info.type == ADDRESS_REG_UREG;
783
784 const char *const insn[][2] = {
785 {
786 "th.fs%srw\t%%z1,%%0",
787 "th.fs%srd\t%%z1,%%0"
788 },
789 {
790 "th.fl%srw\t%%0,%%1",
791 "th.fl%srd\t%%0,%%1"
792 }
793 };
794
795 snprintf (format, sizeof (format), insn[load][index], uindex ? "u" : "");
796 output_operands[0] = dest;
797 output_operands[1] = src;
798 output_asm_insn (format, output_operands);
799 return "";
800}
801
2d65622f
CM
802/* Return true if X is a valid address for T-Head's memory addressing modes
803 for machine mode MODE. If it is, fill in INFO appropriately (if non-NULL).
804 If STRICT_P is true then REG_OK_STRICT is in effect. */
805
806bool
807th_classify_address (struct riscv_address_info *info, rtx x,
808 machine_mode mode, bool strict_p)
809{
810 switch (GET_CODE (x))
811 {
812 case PLUS:
813 if (th_memidx_classify_address_index (info, x, mode, strict_p))
814 return true;
815 break;
816
817 case POST_MODIFY:
818 case PRE_MODIFY:
819 if (th_memidx_classify_address_modify (info, x, mode, strict_p))
820 return true;
821 break;
822
823 default:
824 return false;
825 }
826
827 return false;
828}
829
830/* Provide a string containing a XTheadMemIdx instruction for the given
831 MODE from the provided SRC to the provided DEST.
832 A pointer to a NULL-terminated string containing the instruction will
833 be returned if a suitable instruction is available. Otherwise, this
834 function returns NULL. */
835
836const char *
837th_output_move (rtx dest, rtx src)
838{
839 enum rtx_code dest_code, src_code;
840 machine_mode mode;
841 const char *insn = NULL;
842
843 dest_code = GET_CODE (dest);
844 src_code = GET_CODE (src);
845 mode = GET_MODE (dest);
846
847 if (!(mode == GET_MODE (src) || src == CONST0_RTX (mode)))
848 return NULL;
849
850 if (dest_code == REG && src_code == MEM)
851 {
60d6c63d
CM
852 if (GET_MODE_CLASS (mode) == MODE_INT
853 || (GET_MODE_CLASS (mode) == MODE_FLOAT && GP_REG_P (REGNO (dest))))
2d65622f
CM
854 {
855 if ((insn = th_memidx_output_index (dest, src, mode, true)))
856 return insn;
857 if ((insn = th_memidx_output_modify (dest, src, mode, true)))
858 return insn;
859 }
60d6c63d
CM
860 else if (GET_MODE_CLASS (mode) == MODE_FLOAT && HARDFP_REG_P (REGNO (dest)))
861 {
862 if ((insn = th_fmemidx_output_index (dest, src, mode, true)))
863 return insn;
864 }
2d65622f
CM
865 }
866 else if (dest_code == MEM && (src_code == REG || src == CONST0_RTX (mode)))
867 {
868 if (GET_MODE_CLASS (mode) == MODE_INT
60d6c63d
CM
869 || src == CONST0_RTX (mode)
870 || (GET_MODE_CLASS (mode) == MODE_FLOAT && GP_REG_P (REGNO (src))))
2d65622f
CM
871 {
872 if ((insn = th_memidx_output_index (dest, src, mode, false)))
873 return insn;
874 if ((insn = th_memidx_output_modify (dest, src, mode, false)))
875 return insn;
876 }
60d6c63d
CM
877 else if (GET_MODE_CLASS (mode) == MODE_FLOAT && HARDFP_REG_P (REGNO (src)))
878 {
879 if ((insn = th_fmemidx_output_index (dest, src, mode, false)))
880 return insn;
881 }
2d65622f
CM
882 }
883 return NULL;
884}
885
886/* Implement TARGET_PRINT_OPERAND_ADDRESS for XTheadMemIdx. */
887
888bool
889th_print_operand_address (FILE *file, machine_mode mode, rtx x)
890{
891 struct riscv_address_info addr;
892
893 if (!th_classify_address (&addr, x, mode, reload_completed))
894 return false;
895
896 switch (addr.type)
897 {
898 case ADDRESS_REG_REG:
899 case ADDRESS_REG_UREG:
900 fprintf (file, "%s,%s,%u", reg_names[REGNO (addr.reg)],
901 reg_names[REGNO (addr.offset)], addr.shift);
902 return true;
903
904 case ADDRESS_REG_WB:
905 fprintf (file, "(%s),%ld,%u", reg_names[REGNO (addr.reg)],
906 INTVAL (addr.offset) >> addr.shift, addr.shift);
907 return true;
908
909 default:
910 gcc_unreachable ();
911 }
912
913 gcc_unreachable ();
914}