]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/lra.cc
[PATCH v1 1/1] RISC-V: Nan-box the result of movbf on soft-bf16
[thirdparty/gcc.git] / gcc / lra.cc
CommitLineData
55a2c322 1/* LRA (local register allocator) driver and LRA utilities.
a945c346 2 Copyright (C) 2010-2024 Free Software Foundation, Inc.
55a2c322
VM
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22/* The Local Register Allocator (LRA) is a replacement of former
23 reload pass. It is focused to simplify code solving the reload
24 pass tasks, to make the code maintenance easier, and to implement new
25 perspective optimizations.
26
27 The major LRA design solutions are:
28 o division small manageable, separated sub-tasks
29 o reflection of all transformations and decisions in RTL as more
30 as possible
31 o insn constraints as a primary source of the info (minimizing
32 number of target-depended macros/hooks)
33
34 In brief LRA works by iterative insn process with the final goal is
35 to satisfy all insn and address constraints:
36 o New reload insns (in brief reloads) and reload pseudos might be
37 generated;
38 o Some pseudos might be spilled to assign hard registers to
39 new reload pseudos;
d9cf932c 40 o Recalculating spilled pseudo values (rematerialization);
55a2c322
VM
41 o Changing spilled pseudos to stack memory or their equivalences;
42 o Allocation stack memory changes the address displacement and
43 new iteration is needed.
44
45 Here is block diagram of LRA passes:
46
2b778c9d
VM
47 ------------------------
48 --------------- | Undo inheritance for | ---------------
49 | Memory-memory | | spilled pseudos, | | New (and old) |
50 | move coalesce |<---| splits for pseudos got |<-- | pseudos |
51 --------------- | the same hard regs, | | assignment |
52 Start | | and optional reloads | ---------------
53 | | ------------------------ ^
f38e1b0a
VM
54 V | ---------------- |
55 ----------- V | Update virtual | |
56| Remove |----> ------------>| register | |
57| scratches | ^ | displacements | |
58 ----------- | ---------------- |
59 | | |
60 | V New |
d9cf932c
VM
61 | ------------ pseudos -------------------
62 | |Constraints:| or insns | Inheritance/split |
63 | | RTL |--------->| transformations |
64 | | transfor- | | in EBB scope |
65 | substi- | mations | -------------------
66 | tutions ------------
67 | | No change
68 ---------------- V
69 | Spilled pseudo | -------------------
70 | to memory |<----| Rematerialization |
71 | substitution | -------------------
72 ----------------
73 | No susbtitions
74 V
75 -------------------------
76 | Hard regs substitution, |
77 | devirtalization, and |------> Finish
78 | restoring scratches got |
79 | memory |
80 -------------------------
55a2c322
VM
81
82 To speed up the process:
83 o We process only insns affected by changes on previous
84 iterations;
85 o We don't use DFA-infrastructure because it results in much slower
86 compiler speed than a special IR described below does;
87 o We use a special insn representation for quick access to insn
88 info which is always *synchronized* with the current RTL;
89 o Insn IR is minimized by memory. It is divided on three parts:
90 o one specific for each insn in RTL (only operand locations);
91 o one common for all insns in RTL with the same insn code
92 (different operand attributes from machine descriptions);
93 o one oriented for maintenance of live info (list of pseudos).
94 o Pseudo data:
95 o all insns where the pseudo is referenced;
96 o live info (conflicting hard regs, live ranges, # of
97 references etc);
98 o data used for assigning (preferred hard regs, costs etc).
99
100 This file contains LRA driver, LRA utility functions and data, and
101 code for dealing with scratches. */
102
103#include "config.h"
104#include "system.h"
105#include "coretypes.h"
c7131fb2 106#include "backend.h"
957060b5 107#include "target.h"
55a2c322 108#include "rtl.h"
b175b488 109#include "rtl-error.h"
957060b5
AM
110#include "tree.h"
111#include "predict.h"
c7131fb2 112#include "df.h"
4d0cdd0c 113#include "memmodel.h"
55a2c322 114#include "tm_p.h"
957060b5 115#include "optabs.h"
55a2c322 116#include "regs.h"
957060b5 117#include "ira.h"
55a2c322 118#include "recog.h"
55a2c322 119#include "expr.h"
60393bbc
AM
120#include "cfgrtl.h"
121#include "cfgbuild.h"
c7131fb2 122#include "lra.h"
55a2c322 123#include "lra-int.h"
013a8899 124#include "print-rtl.h"
a1e6ee38 125#include "function-abi.h"
55a2c322 126
8160cd3e
VM
127/* Dump bitmap SET with TITLE and BB INDEX. */
128void
129lra_dump_bitmap_with_title (const char *title, bitmap set, int index)
130{
131 unsigned int i;
132 int count;
133 bitmap_iterator bi;
134 static const int max_nums_on_line = 10;
135
136 if (bitmap_empty_p (set))
137 return;
138 fprintf (lra_dump_file, " %s %d:", title, index);
139 fprintf (lra_dump_file, "\n");
140 count = max_nums_on_line + 1;
141 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
142 {
143 if (count > max_nums_on_line)
144 {
145 fprintf (lra_dump_file, "\n ");
146 count = 0;
147 }
148 fprintf (lra_dump_file, " %4u", i);
149 count++;
150 }
151 fprintf (lra_dump_file, "\n");
152}
153
55a2c322
VM
154/* Hard registers currently not available for allocation. It can
155 changed after some hard registers become not eliminable. */
156HARD_REG_SET lra_no_alloc_regs;
157
158static int get_new_reg_value (void);
159static void expand_reg_info (void);
160static void invalidate_insn_recog_data (int);
cfa434f6
DM
161static int get_insn_freq (rtx_insn *);
162static void invalidate_insn_data_regno_info (lra_insn_recog_data_t,
163 rtx_insn *, int);
55a2c322
VM
164/* Expand all regno related info needed for LRA. */
165static void
a2d0d374 166expand_reg_data (int old)
55a2c322
VM
167{
168 resize_reg_info ();
169 expand_reg_info ();
170 ira_expand_reg_equiv ();
a2d0d374
VM
171 for (int i = (int) max_reg_num () - 1; i >= old; i--)
172 lra_change_class (i, ALL_REGS, " Set", true);
55a2c322
VM
173}
174
175/* Create and return a new reg of ORIGINAL mode. If ORIGINAL is NULL
176 or of VOIDmode, use MD_MODE for the new reg. Initialize its
177 register class to RCLASS. Print message about assigning class
178 RCLASS containing new register name TITLE unless it is NULL. Use
179 attributes of ORIGINAL if it is a register. The created register
180 will have unique held value. */
181rtx
ef4bddc2 182lra_create_new_reg_with_unique_value (machine_mode md_mode, rtx original,
85419ac5
VM
183 enum reg_class rclass,
184 HARD_REG_SET *exclude_start_hard_regs,
185 const char *title)
55a2c322 186{
ef4bddc2 187 machine_mode mode;
55a2c322
VM
188 rtx new_reg;
189
190 if (original == NULL_RTX || (mode = GET_MODE (original)) == VOIDmode)
191 mode = md_mode;
192 lra_assert (mode != VOIDmode);
193 new_reg = gen_reg_rtx (mode);
194 if (original == NULL_RTX || ! REG_P (original))
195 {
196 if (lra_dump_file != NULL)
197 fprintf (lra_dump_file, " Creating newreg=%i", REGNO (new_reg));
198 }
199 else
200 {
201 if (ORIGINAL_REGNO (original) >= FIRST_PSEUDO_REGISTER)
202 ORIGINAL_REGNO (new_reg) = ORIGINAL_REGNO (original);
203 REG_USERVAR_P (new_reg) = REG_USERVAR_P (original);
204 REG_POINTER (new_reg) = REG_POINTER (original);
205 REG_ATTRS (new_reg) = REG_ATTRS (original);
206 if (lra_dump_file != NULL)
207 fprintf (lra_dump_file, " Creating newreg=%i from oldreg=%i",
208 REGNO (new_reg), REGNO (original));
209 }
210 if (lra_dump_file != NULL)
211 {
212 if (title != NULL)
213 fprintf (lra_dump_file, ", assigning class %s to%s%s r%d",
214 reg_class_names[rclass], *title == '\0' ? "" : " ",
215 title, REGNO (new_reg));
216 fprintf (lra_dump_file, "\n");
217 }
a2d0d374 218 expand_reg_data (max_reg_num ());
55a2c322 219 setup_reg_classes (REGNO (new_reg), rclass, NO_REGS, rclass);
85419ac5
VM
220 if (exclude_start_hard_regs != NULL)
221 lra_reg_info[REGNO (new_reg)].exclude_start_hard_regs
222 = *exclude_start_hard_regs;
55a2c322
VM
223 return new_reg;
224}
225
226/* Analogous to the previous function but also inherits value of
227 ORIGINAL. */
228rtx
85419ac5
VM
229lra_create_new_reg (machine_mode md_mode, rtx original, enum reg_class rclass,
230 HARD_REG_SET *exclude_start_hard_regs, const char *title)
55a2c322
VM
231{
232 rtx new_reg;
233
234 new_reg
85419ac5
VM
235 = lra_create_new_reg_with_unique_value (md_mode, original, rclass,
236 exclude_start_hard_regs, title);
55a2c322 237 if (original != NULL_RTX && REG_P (original))
d70a81dd 238 lra_assign_reg_val (REGNO (original), REGNO (new_reg));
55a2c322
VM
239 return new_reg;
240}
241
242/* Set up for REGNO unique hold value. */
243void
244lra_set_regno_unique_value (int regno)
245{
246 lra_reg_info[regno].val = get_new_reg_value ();
247}
248
8d49e7ef
VM
249/* Invalidate INSN related info used by LRA. The info should never be
250 used after that. */
55a2c322 251void
cfa434f6 252lra_invalidate_insn_data (rtx_insn *insn)
55a2c322
VM
253{
254 lra_invalidate_insn_regno_info (insn);
255 invalidate_insn_recog_data (INSN_UID (insn));
256}
257
258/* Mark INSN deleted and invalidate the insn related info used by
259 LRA. */
260void
cfa434f6 261lra_set_insn_deleted (rtx_insn *insn)
55a2c322
VM
262{
263 lra_invalidate_insn_data (insn);
264 SET_INSN_DELETED (insn);
265}
266
267/* Delete an unneeded INSN and any previous insns who sole purpose is
268 loading data that is dead in INSN. */
269void
cfa434f6 270lra_delete_dead_insn (rtx_insn *insn)
55a2c322 271{
cfa434f6 272 rtx_insn *prev = prev_real_insn (insn);
55a2c322
VM
273 rtx prev_dest;
274
275 /* If the previous insn sets a register that dies in our insn,
276 delete it too. */
277 if (prev && GET_CODE (PATTERN (prev)) == SET
278 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
279 && reg_mentioned_p (prev_dest, PATTERN (insn))
280 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
281 && ! side_effects_p (SET_SRC (PATTERN (prev))))
282 lra_delete_dead_insn (prev);
283
284 lra_set_insn_deleted (insn);
285}
286
11f5b71a
VM
287/* Emit insn x = y + z. Return NULL if we failed to do it.
288 Otherwise, return the insn. We don't use gen_add3_insn as it might
289 clobber CC. */
e67d1102 290static rtx_insn *
11f5b71a
VM
291emit_add3_insn (rtx x, rtx y, rtx z)
292{
fee3e72c 293 rtx_insn *last;
11f5b71a
VM
294
295 last = get_last_insn ();
72a4ddf2
AK
296
297 if (have_addptr3_insn (x, y, z))
298 {
e67d1102 299 rtx_insn *insn = gen_addptr3_insn (x, y, z);
72a4ddf2
AK
300
301 /* If the target provides an "addptr" pattern it hopefully does
302 for a reason. So falling back to the normal add would be
303 a bug. */
304 lra_assert (insn != NULL_RTX);
305 emit_insn (insn);
306 return insn;
307 }
308
f7df4a84
RS
309 rtx_insn *insn = emit_insn (gen_rtx_SET (x, gen_rtx_PLUS (GET_MODE (y),
310 y, z)));
11f5b71a
VM
311 if (recog_memoized (insn) < 0)
312 {
313 delete_insns_since (last);
647d790d 314 insn = NULL;
11f5b71a
VM
315 }
316 return insn;
317}
318
319/* Emit insn x = x + y. Return the insn. We use gen_add2_insn as the
320 last resort. */
e67d1102 321static rtx_insn *
11f5b71a
VM
322emit_add2_insn (rtx x, rtx y)
323{
e67d1102 324 rtx_insn *insn = emit_add3_insn (x, x, y);
11f5b71a
VM
325 if (insn == NULL_RTX)
326 {
327 insn = gen_add2_insn (x, y);
328 if (insn != NULL_RTX)
329 emit_insn (insn);
330 }
331 return insn;
332}
333
55a2c322
VM
334/* Target checks operands through operand predicates to recognize an
335 insn. We should have a special precaution to generate add insns
336 which are frequent results of elimination.
337
338 Emit insns for x = y + z. X can be used to store intermediate
339 values and should be not in Y and Z when we use X to store an
340 intermediate value. Y + Z should form [base] [+ index[ * scale]] [
341 + disp] where base and index are registers, disp and scale are
342 constants. Y should contain base if it is present, Z should
343 contain disp if any. index[*scale] can be part of Y or Z. */
344void
345lra_emit_add (rtx x, rtx y, rtx z)
346{
347 int old;
fee3e72c 348 rtx_insn *last;
55a2c322
VM
349 rtx a1, a2, base, index, disp, scale, index_scale;
350 bool ok_p;
351
e67d1102 352 rtx_insn *add3_insn = emit_add3_insn (x, y, z);
55a2c322 353 old = max_reg_num ();
647d790d 354 if (add3_insn != NULL)
11f5b71a 355 ;
55a2c322
VM
356 else
357 {
358 disp = a2 = NULL_RTX;
359 if (GET_CODE (y) == PLUS)
360 {
361 a1 = XEXP (y, 0);
362 a2 = XEXP (y, 1);
363 disp = z;
364 }
365 else
366 {
367 a1 = y;
368 if (CONSTANT_P (z))
369 disp = z;
370 else
371 a2 = z;
372 }
373 index_scale = scale = NULL_RTX;
374 if (GET_CODE (a1) == MULT)
375 {
376 index_scale = a1;
377 index = XEXP (a1, 0);
378 scale = XEXP (a1, 1);
379 base = a2;
380 }
381 else if (a2 != NULL_RTX && GET_CODE (a2) == MULT)
382 {
383 index_scale = a2;
384 index = XEXP (a2, 0);
385 scale = XEXP (a2, 1);
386 base = a1;
387 }
388 else
389 {
390 base = a1;
391 index = a2;
392 }
0b87be09 393 if ((base != NULL_RTX && ! (REG_P (base) || GET_CODE (base) == SUBREG))
93aaa05d
VM
394 || (index != NULL_RTX
395 && ! (REG_P (index) || GET_CODE (index) == SUBREG))
55a2c322
VM
396 || (disp != NULL_RTX && ! CONSTANT_P (disp))
397 || (scale != NULL_RTX && ! CONSTANT_P (scale)))
398 {
11f5b71a
VM
399 /* Probably we have no 3 op add. Last chance is to use 2-op
400 add insn. To succeed, don't move Z to X as an address
401 segment always comes in Y. Otherwise, we might fail when
402 adding the address segment to register. */
55a2c322 403 lra_assert (x != y && x != z);
ed52a84e 404 emit_move_insn (x, y);
e67d1102 405 rtx_insn *insn = emit_add2_insn (x, z);
11f5b71a 406 lra_assert (insn != NULL_RTX);
55a2c322
VM
407 }
408 else
409 {
410 if (index_scale == NULL_RTX)
411 index_scale = index;
412 if (disp == NULL_RTX)
413 {
414 /* Generate x = index_scale; x = x + base. */
415 lra_assert (index_scale != NULL_RTX && base != NULL_RTX);
416 emit_move_insn (x, index_scale);
e67d1102 417 rtx_insn *insn = emit_add2_insn (x, base);
11f5b71a 418 lra_assert (insn != NULL_RTX);
55a2c322
VM
419 }
420 else if (scale == NULL_RTX)
421 {
422 /* Try x = base + disp. */
423 lra_assert (base != NULL_RTX);
424 last = get_last_insn ();
647d790d
DM
425 rtx_insn *move_insn =
426 emit_move_insn (x, gen_rtx_PLUS (GET_MODE (base), base, disp));
427 if (recog_memoized (move_insn) < 0)
55a2c322
VM
428 {
429 delete_insns_since (last);
430 /* Generate x = disp; x = x + base. */
431 emit_move_insn (x, disp);
e67d1102 432 rtx_insn *add2_insn = emit_add2_insn (x, base);
647d790d 433 lra_assert (add2_insn != NULL_RTX);
55a2c322
VM
434 }
435 /* Generate x = x + index. */
436 if (index != NULL_RTX)
437 {
e67d1102 438 rtx_insn *insn = emit_add2_insn (x, index);
11f5b71a 439 lra_assert (insn != NULL_RTX);
55a2c322
VM
440 }
441 }
442 else
443 {
444 /* Try x = index_scale; x = x + disp; x = x + base. */
445 last = get_last_insn ();
647d790d 446 rtx_insn *move_insn = emit_move_insn (x, index_scale);
55a2c322 447 ok_p = false;
647d790d 448 if (recog_memoized (move_insn) >= 0)
55a2c322 449 {
e67d1102 450 rtx_insn *insn = emit_add2_insn (x, disp);
55a2c322
VM
451 if (insn != NULL_RTX)
452 {
0b87be09 453 if (base == NULL_RTX)
11f5b71a 454 ok_p = true;
0b87be09
VM
455 else
456 {
457 insn = emit_add2_insn (x, base);
458 if (insn != NULL_RTX)
459 ok_p = true;
460 }
55a2c322
VM
461 }
462 }
463 if (! ok_p)
464 {
0b87be09
VM
465 rtx_insn *insn;
466
55a2c322
VM
467 delete_insns_since (last);
468 /* Generate x = disp; x = x + base; x = x + index_scale. */
469 emit_move_insn (x, disp);
0b87be09
VM
470 if (base != NULL_RTX)
471 {
472 insn = emit_add2_insn (x, base);
473 lra_assert (insn != NULL_RTX);
474 }
11f5b71a
VM
475 insn = emit_add2_insn (x, index_scale);
476 lra_assert (insn != NULL_RTX);
55a2c322
VM
477 }
478 }
479 }
480 }
481 /* Functions emit_... can create pseudos -- so expand the pseudo
482 data. */
483 if (old != max_reg_num ())
a2d0d374 484 expand_reg_data (old);
55a2c322
VM
485}
486
487/* The number of emitted reload insns so far. */
488int lra_curr_reload_num;
489
44fbc9c6
VM
490static void remove_insn_scratches (rtx_insn *insn);
491
55a2c322
VM
492/* Emit x := y, processing special case when y = u + v or y = u + v *
493 scale + w through emit_add (Y can be an address which is base +
494 index reg * scale + displacement in general case). X may be used
495 as intermediate result therefore it should be not in Y. */
496void
497lra_emit_move (rtx x, rtx y)
498{
499 int old;
5261cf8c
VM
500 rtx_insn *insn;
501
55a2c322
VM
502 if (GET_CODE (y) != PLUS)
503 {
504 if (rtx_equal_p (x, y))
505 return;
506 old = max_reg_num ();
5261cf8c
VM
507
508 insn = (GET_CODE (x) != STRICT_LOW_PART
509 ? emit_move_insn (x, y) : emit_insn (gen_rtx_SET (x, y)));
2713e5db
PB
510 /* The move pattern may require scratch registers, so convert them
511 into real registers now. */
512 if (insn != NULL_RTX)
44fbc9c6 513 remove_insn_scratches (insn);
55a2c322
VM
514 if (REG_P (x))
515 lra_reg_info[ORIGINAL_REGNO (x)].last_reload = ++lra_curr_reload_num;
516 /* Function emit_move can create pseudos -- so expand the pseudo
517 data. */
518 if (old != max_reg_num ())
a2d0d374 519 expand_reg_data (old);
55a2c322
VM
520 return;
521 }
522 lra_emit_add (x, XEXP (y, 0), XEXP (y, 1));
523}
524
525/* Update insn operands which are duplication of operands whose
526 numbers are in array of NOPS (with end marker -1). The insn is
527 represented by its LRA internal representation ID. */
528void
529lra_update_dups (lra_insn_recog_data_t id, signed char *nops)
530{
531 int i, j, nop;
532 struct lra_static_insn_data *static_id = id->insn_static_data;
533
534 for (i = 0; i < static_id->n_dups; i++)
535 for (j = 0; (nop = nops[j]) >= 0; j++)
536 if (static_id->dup_num[i] == nop)
537 *id->dup_loc[i] = *id->operand_loc[nop];
538}
539
b175b488
VM
540/* Report asm insn error and modify the asm insn. */
541void
542lra_asm_insn_error (rtx_insn *insn)
543{
544 lra_asm_error_p = true;
545 error_for_asm (insn,
546 "%<asm%> operand has impossible constraints"
547 " or there are not enough registers");
548 /* Avoid further trouble with this insn. */
549 if (JUMP_P (insn))
550 {
551 ira_nullify_asm_goto (insn);
552 lra_update_insn_regno_info (insn);
553 }
554 else
555 {
556 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
557 lra_set_insn_deleted (insn);
558 }
559}
560
55a2c322
VM
561\f
562
563/* This page contains code dealing with info about registers in the
564 insns. */
565
566/* Pools for insn reg info. */
fcb87c50 567object_allocator<lra_insn_reg> lra_insn_reg_pool ("insn regs");
55a2c322 568
584898ee
VM
569/* Create LRA insn related info about a reference to REGNO in INSN
570 with TYPE (in/out/inout), biggest reference mode MODE, flag that it
a25f3e8e 571 is reference through subreg (SUBREG_P), and reference to the next
584898ee
VM
572 insn reg info (NEXT). If REGNO can be early clobbered,
573 alternatives in which it can be early clobbered are given by
17d184e5 574 EARLY_CLOBBER_ALTS. */
55a2c322 575static struct lra_insn_reg *
cfa434f6 576new_insn_reg (rtx_insn *insn, int regno, enum op_type type,
a25f3e8e 577 machine_mode mode, bool subreg_p,
584898ee 578 alternative_mask early_clobber_alts,
17d184e5 579 struct lra_insn_reg *next)
55a2c322 580{
fb0b2914 581 lra_insn_reg *ir = lra_insn_reg_pool.allocate ();
55a2c322
VM
582 ir->type = type;
583 ir->biggest_mode = mode;
6e2e0ce6
RS
584 if (NONDEBUG_INSN_P (insn))
585 lra_update_biggest_mode (regno, mode);
55a2c322 586 ir->subreg_p = subreg_p;
584898ee 587 ir->early_clobber_alts = early_clobber_alts;
55a2c322
VM
588 ir->regno = regno;
589 ir->next = next;
590 return ir;
591}
592
55a2c322
VM
593/* Free insn reg info list IR. */
594static void
595free_insn_regs (struct lra_insn_reg *ir)
596{
597 struct lra_insn_reg *next_ir;
598
599 for (; ir != NULL; ir = next_ir)
600 {
601 next_ir = ir->next;
fb0b2914 602 lra_insn_reg_pool.remove (ir);
55a2c322
VM
603 }
604}
605
606/* Finish pool for insn reg info. */
607static void
608finish_insn_regs (void)
609{
fb0b2914 610 lra_insn_reg_pool.release ();
55a2c322
VM
611}
612
613\f
614
615/* This page contains code dealing LRA insn info (or in other words
616 LRA internal insn representation). */
617
55a2c322
VM
618/* Map INSN_CODE -> the static insn data. This info is valid during
619 all translation unit. */
523ba738 620struct lra_static_insn_data *insn_code_data[NUM_INSN_CODES];
55a2c322
VM
621
622/* Debug insns are represented as a special insn with one input
623 operand which is RTL expression in var_location. */
624
625/* The following data are used as static insn operand data for all
626 debug insns. If structure lra_operand_data is changed, the
627 initializer should be changed too. */
628static struct lra_operand_data debug_operand_data =
629 {
630 NULL, /* alternative */
584898ee 631 0, /* early_clobber_alts */
0d4a1197 632 E_VOIDmode, /* We are not interesting in the operand mode. */
55a2c322 633 OP_IN,
a25f3e8e 634 0, 0, 0
55a2c322
VM
635 };
636
637/* The following data are used as static insn data for all debug
96a95ac1 638 bind insns. If structure lra_static_insn_data is changed, the
55a2c322 639 initializer should be changed too. */
96a95ac1 640static struct lra_static_insn_data debug_bind_static_data =
55a2c322
VM
641 {
642 &debug_operand_data,
643 0, /* Duplication operands #. */
644 -1, /* Commutative operand #. */
645 1, /* Operands #. There is only one operand which is debug RTL
646 expression. */
647 0, /* Duplications #. */
648 0, /* Alternatives #. We are not interesting in alternatives
649 because we does not proceed debug_insns for reloads. */
650 NULL, /* Hard registers referenced in machine description. */
651 NULL /* Descriptions of operands in alternatives. */
652 };
653
96a95ac1
AO
654/* The following data are used as static insn data for all debug
655 marker insns. If structure lra_static_insn_data is changed, the
656 initializer should be changed too. */
657static struct lra_static_insn_data debug_marker_static_data =
658 {
659 &debug_operand_data,
660 0, /* Duplication operands #. */
661 -1, /* Commutative operand #. */
662 0, /* Operands #. There isn't any operand. */
663 0, /* Duplications #. */
664 0, /* Alternatives #. We are not interesting in alternatives
665 because we does not proceed debug_insns for reloads. */
666 NULL, /* Hard registers referenced in machine description. */
667 NULL /* Descriptions of operands in alternatives. */
668 };
669
55a2c322
VM
670/* Called once per compiler work to initialize some LRA data related
671 to insns. */
672static void
673init_insn_code_data_once (void)
674{
675 memset (insn_code_data, 0, sizeof (insn_code_data));
55a2c322
VM
676}
677
678/* Called once per compiler work to finalize some LRA data related to
679 insns. */
680static void
681finish_insn_code_data_once (void)
682{
523ba738 683 for (unsigned int i = 0; i < NUM_INSN_CODES; i++)
55a2c322
VM
684 {
685 if (insn_code_data[i] != NULL)
3809bcd6
DM
686 {
687 free (insn_code_data[i]);
688 insn_code_data[i] = NULL;
689 }
55a2c322
VM
690 }
691}
692
55a2c322
VM
693/* Return static insn data, allocate and setup if necessary. Although
694 dup_num is static data (it depends only on icode), to set it up we
695 need to extract insn first. So recog_data should be valid for
696 normal insn (ICODE >= 0) before the call. */
697static struct lra_static_insn_data *
698get_static_insn_data (int icode, int nop, int ndup, int nalt)
699{
700 struct lra_static_insn_data *data;
701 size_t n_bytes;
702
523ba738 703 lra_assert (icode < (int) NUM_INSN_CODES);
55a2c322
VM
704 if (icode >= 0 && (data = insn_code_data[icode]) != NULL)
705 return data;
706 lra_assert (nop >= 0 && ndup >= 0 && nalt >= 0);
707 n_bytes = sizeof (struct lra_static_insn_data)
708 + sizeof (struct lra_operand_data) * nop
709 + sizeof (int) * ndup;
710 data = XNEWVAR (struct lra_static_insn_data, n_bytes);
0c331756 711 data->operand_alternative = NULL;
55a2c322
VM
712 data->n_operands = nop;
713 data->n_dups = ndup;
714 data->n_alternatives = nalt;
715 data->operand = ((struct lra_operand_data *)
716 ((char *) data + sizeof (struct lra_static_insn_data)));
717 data->dup_num = ((int *) ((char *) data->operand
718 + sizeof (struct lra_operand_data) * nop));
719 if (icode >= 0)
720 {
721 int i;
722
723 insn_code_data[icode] = data;
724 for (i = 0; i < nop; i++)
725 {
726 data->operand[i].constraint
727 = insn_data[icode].operand[i].constraint;
728 data->operand[i].mode = insn_data[icode].operand[i].mode;
729 data->operand[i].strict_low = insn_data[icode].operand[i].strict_low;
730 data->operand[i].is_operator
731 = insn_data[icode].operand[i].is_operator;
732 data->operand[i].type
733 = (data->operand[i].constraint[0] == '=' ? OP_OUT
734 : data->operand[i].constraint[0] == '+' ? OP_INOUT
735 : OP_IN);
736 data->operand[i].is_address = false;
737 }
738 for (i = 0; i < ndup; i++)
739 data->dup_num[i] = recog_data.dup_num[i];
740 }
741 return data;
742}
743
744/* The current length of the following array. */
745int lra_insn_recog_data_len;
746
747/* Map INSN_UID -> the insn recog data (NULL if unknown). */
748lra_insn_recog_data_t *lra_insn_recog_data;
749
033bd26e
RB
750/* Alloc pool we allocate entries for lra_insn_recog_data from. */
751static object_allocator<class lra_insn_recog_data>
752 lra_insn_recog_data_pool ("insn recog data pool");
753
55a2c322
VM
754/* Initialize LRA data about insns. */
755static void
756init_insn_recog_data (void)
757{
758 lra_insn_recog_data_len = 0;
759 lra_insn_recog_data = NULL;
55a2c322
VM
760}
761
762/* Expand, if necessary, LRA data about insns. */
763static void
764check_and_expand_insn_recog_data (int index)
765{
766 int i, old;
767
768 if (lra_insn_recog_data_len > index)
769 return;
770 old = lra_insn_recog_data_len;
39a1ab9c
JJ
771 lra_insn_recog_data_len = index * 3U / 2;
772 if (lra_insn_recog_data_len <= index)
773 lra_insn_recog_data_len = index + 1;
55a2c322
VM
774 lra_insn_recog_data = XRESIZEVEC (lra_insn_recog_data_t,
775 lra_insn_recog_data,
776 lra_insn_recog_data_len);
777 for (i = old; i < lra_insn_recog_data_len; i++)
778 lra_insn_recog_data[i] = NULL;
779}
780
781/* Finish LRA DATA about insn. */
782static void
783free_insn_recog_data (lra_insn_recog_data_t data)
784{
785 if (data->operand_loc != NULL)
786 free (data->operand_loc);
787 if (data->dup_loc != NULL)
788 free (data->dup_loc);
789 if (data->arg_hard_regs != NULL)
790 free (data->arg_hard_regs);
55a2c322
VM
791 if (data->icode < 0 && NONDEBUG_INSN_P (data->insn))
792 {
793 if (data->insn_static_data->operand_alternative != NULL)
0c331756
RS
794 free (const_cast <operand_alternative *>
795 (data->insn_static_data->operand_alternative));
55a2c322
VM
796 free_insn_regs (data->insn_static_data->hard_regs);
797 free (data->insn_static_data);
798 }
799 free_insn_regs (data->regs);
800 data->regs = NULL;
033bd26e 801 lra_insn_recog_data_pool.remove (data);
55a2c322
VM
802}
803
fb0b2914 804/* Pools for copies. */
fcb87c50 805static object_allocator<lra_copy> lra_copy_pool ("lra copies");
fb0b2914 806
55a2c322
VM
807/* Finish LRA data about all insns. */
808static void
809finish_insn_recog_data (void)
810{
811 int i;
812 lra_insn_recog_data_t data;
813
814 for (i = 0; i < lra_insn_recog_data_len; i++)
815 if ((data = lra_insn_recog_data[i]) != NULL)
816 free_insn_recog_data (data);
817 finish_insn_regs ();
fb0b2914
ML
818 lra_copy_pool.release ();
819 lra_insn_reg_pool.release ();
033bd26e 820 lra_insn_recog_data_pool.release ();
55a2c322
VM
821 free (lra_insn_recog_data);
822}
823
824/* Setup info about operands in alternatives of LRA DATA of insn. */
825static void
0c331756
RS
826setup_operand_alternative (lra_insn_recog_data_t data,
827 const operand_alternative *op_alt)
55a2c322 828{
0c331756 829 int i, j, nop, nalt;
55a2c322
VM
830 int icode = data->icode;
831 struct lra_static_insn_data *static_data = data->insn_static_data;
832
55a2c322
VM
833 static_data->commutative = -1;
834 nop = static_data->n_operands;
55a2c322 835 nalt = static_data->n_alternatives;
0c331756 836 static_data->operand_alternative = op_alt;
55a2c322
VM
837 for (i = 0; i < nop; i++)
838 {
584898ee 839 static_data->operand[i].early_clobber_alts = 0;
0c331756
RS
840 static_data->operand[i].is_address = false;
841 if (static_data->operand[i].constraint[0] == '%')
55a2c322 842 {
0c331756
RS
843 /* We currently only support one commutative pair of operands. */
844 if (static_data->commutative < 0)
845 static_data->commutative = i;
846 else
847 lra_assert (icode < 0); /* Asm */
848 /* The last operand should not be marked commutative. */
849 lra_assert (i != nop - 1);
55a2c322
VM
850 }
851 }
0c331756
RS
852 for (j = 0; j < nalt; j++)
853 for (i = 0; i < nop; i++, op_alt++)
854 {
584898ee
VM
855 if (op_alt->earlyclobber)
856 static_data->operand[i].early_clobber_alts |= (alternative_mask) 1 << j;
0c331756
RS
857 static_data->operand[i].is_address |= op_alt->is_address;
858 }
55a2c322
VM
859}
860
861/* Recursively process X and collect info about registers, which are
862 not the insn operands, in X with TYPE (in/out/inout) and flag that
863 it is early clobbered in the insn (EARLY_CLOBBER) and add the info
864 to LIST. X is a part of insn given by DATA. Return the result
17d184e5 865 list. */
55a2c322 866static struct lra_insn_reg *
561c58b4
VM
867collect_non_operand_hard_regs (rtx_insn *insn, rtx *x,
868 lra_insn_recog_data_t data,
55a2c322 869 struct lra_insn_reg *list,
17d184e5 870 enum op_type type, bool early_clobber)
55a2c322
VM
871{
872 int i, j, regno, last;
873 bool subreg_p;
ef4bddc2 874 machine_mode mode;
55a2c322
VM
875 struct lra_insn_reg *curr;
876 rtx op = *x;
877 enum rtx_code code = GET_CODE (op);
878 const char *fmt = GET_RTX_FORMAT (code);
879
880 for (i = 0; i < data->insn_static_data->n_operands; i++)
2de3d3c6
VM
881 if (! data->insn_static_data->operand[i].is_operator
882 && x == data->operand_loc[i])
55a2c322
VM
883 /* It is an operand loc. Stop here. */
884 return list;
885 for (i = 0; i < data->insn_static_data->n_dups; i++)
886 if (x == data->dup_loc[i])
887 /* It is a dup loc. Stop here. */
888 return list;
889 mode = GET_MODE (op);
890 subreg_p = false;
891 if (code == SUBREG)
892 {
bd5a2c67 893 mode = wider_subreg_mode (op);
9eaf97d6
RS
894 if (read_modify_subreg_p (op))
895 subreg_p = true;
55a2c322
VM
896 op = SUBREG_REG (op);
897 code = GET_CODE (op);
55a2c322
VM
898 }
899 if (REG_P (op))
900 {
901 if ((regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER)
902 return list;
d9cf932c
VM
903 /* Process all regs even unallocatable ones as we need info
904 about all regs for rematerialization pass. */
4edd6298 905 for (last = end_hard_regno (mode, regno); regno < last; regno++)
d9cf932c
VM
906 {
907 for (curr = list; curr != NULL; curr = curr->next)
908 if (curr->regno == regno && curr->subreg_p == subreg_p
909 && curr->biggest_mode == mode)
55a2c322 910 {
d9cf932c
VM
911 if (curr->type != type)
912 curr->type = OP_INOUT;
584898ee 913 if (early_clobber)
a25f3e8e 914 curr->early_clobber_alts = ALL_ALTERNATIVES;
d9cf932c
VM
915 break;
916 }
917 if (curr == NULL)
918 {
67914693 919 /* This is a new hard regno or the info cannot be
d9cf932c 920 integrated into the found structure. */
55a2c322 921#ifdef STACK_REGS
d9cf932c
VM
922 early_clobber
923 = (early_clobber
924 /* This clobber is to inform popping floating
925 point stack only. */
926 && ! (FIRST_STACK_REG <= regno
927 && regno <= LAST_STACK_REG));
55a2c322 928#endif
d9cf932c 929 list = new_insn_reg (data->insn, regno, type, mode, subreg_p,
17d184e5 930 early_clobber ? ALL_ALTERNATIVES : 0, list);
d9cf932c
VM
931 }
932 }
55a2c322
VM
933 return list;
934 }
935 switch (code)
936 {
937 case SET:
561c58b4 938 list = collect_non_operand_hard_regs (insn, &SET_DEST (op), data,
17d184e5 939 list, OP_OUT, false);
561c58b4 940 list = collect_non_operand_hard_regs (insn, &SET_SRC (op), data,
17d184e5 941 list, OP_IN, false);
55a2c322
VM
942 break;
943 case CLOBBER:
80850da1
SB
944 /* We treat clobber of non-operand hard registers as early clobber. */
945 list = collect_non_operand_hard_regs (insn, &XEXP (op, 0), data,
17d184e5 946 list, OP_OUT, true);
80850da1 947 break;
55a2c322 948 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
561c58b4 949 list = collect_non_operand_hard_regs (insn, &XEXP (op, 0), data,
17d184e5 950 list, OP_INOUT, false);
55a2c322
VM
951 break;
952 case PRE_MODIFY: case POST_MODIFY:
561c58b4 953 list = collect_non_operand_hard_regs (insn, &XEXP (op, 0), data,
17d184e5 954 list, OP_INOUT, false);
561c58b4 955 list = collect_non_operand_hard_regs (insn, &XEXP (op, 1), data,
17d184e5 956 list, OP_IN, false);
55a2c322
VM
957 break;
958 default:
959 fmt = GET_RTX_FORMAT (code);
960 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
961 {
962 if (fmt[i] == 'e')
561c58b4 963 list = collect_non_operand_hard_regs (insn, &XEXP (op, i), data,
17d184e5 964 list, OP_IN, false);
55a2c322
VM
965 else if (fmt[i] == 'E')
966 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
561c58b4 967 list = collect_non_operand_hard_regs (insn, &XVECEXP (op, i, j),
17d184e5 968 data, list, OP_IN, false);
55a2c322
VM
969 }
970 }
971 return list;
972}
973
974/* Set up and return info about INSN. Set up the info if it is not set up
975 yet. */
976lra_insn_recog_data_t
cfa434f6 977lra_set_insn_recog_data (rtx_insn *insn)
55a2c322
VM
978{
979 lra_insn_recog_data_t data;
980 int i, n, icode;
981 rtx **locs;
982 unsigned int uid = INSN_UID (insn);
983 struct lra_static_insn_data *insn_static_data;
984
985 check_and_expand_insn_recog_data (uid);
986 if (DEBUG_INSN_P (insn))
987 icode = -1;
988 else
989 {
990 icode = INSN_CODE (insn);
991 if (icode < 0)
992 /* It might be a new simple insn which is not recognized yet. */
993 INSN_CODE (insn) = icode = recog_memoized (insn);
994 }
033bd26e 995 data = lra_insn_recog_data_pool.allocate ();
55a2c322
VM
996 lra_insn_recog_data[uid] = data;
997 data->insn = insn;
7874b7c5 998 data->used_insn_alternative = LRA_UNKNOWN_ALT;
b175b488 999 data->asm_reloads_num = 0;
55a2c322
VM
1000 data->icode = icode;
1001 data->regs = NULL;
1002 if (DEBUG_INSN_P (insn))
1003 {
55a2c322
VM
1004 data->dup_loc = NULL;
1005 data->arg_hard_regs = NULL;
9840b2fa 1006 data->preferred_alternatives = ALL_ALTERNATIVES;
96a95ac1
AO
1007 if (DEBUG_BIND_INSN_P (insn))
1008 {
1009 data->insn_static_data = &debug_bind_static_data;
1010 data->operand_loc = XNEWVEC (rtx *, 1);
1011 data->operand_loc[0] = &INSN_VAR_LOCATION_LOC (insn);
1012 }
1013 else if (DEBUG_MARKER_INSN_P (insn))
1014 {
1015 data->insn_static_data = &debug_marker_static_data;
1016 data->operand_loc = NULL;
1017 }
55a2c322
VM
1018 return data;
1019 }
1020 if (icode < 0)
1021 {
0c331756 1022 int nop, nalt;
ef4bddc2 1023 machine_mode operand_mode[MAX_RECOG_OPERANDS];
55a2c322
VM
1024 const char *constraints[MAX_RECOG_OPERANDS];
1025
1026 nop = asm_noperands (PATTERN (insn));
1027 data->operand_loc = data->dup_loc = NULL;
0c331756 1028 nalt = 1;
55a2c322 1029 if (nop < 0)
91c5ee5b 1030 {
0e9e0a21 1031 /* It is a special insn like USE or CLOBBER. We should
91c5ee5b
VM
1032 recognize any regular insn otherwise LRA can do nothing
1033 with this insn. */
1034 gcc_assert (GET_CODE (PATTERN (insn)) == USE
1035 || GET_CODE (PATTERN (insn)) == CLOBBER
1036 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
1037 data->insn_static_data = insn_static_data
0c331756 1038 = get_static_insn_data (-1, 0, 0, nalt);
91c5ee5b 1039 }
55a2c322
VM
1040 else
1041 {
1042 /* expand_asm_operands makes sure there aren't too many
1043 operands. */
1044 lra_assert (nop <= MAX_RECOG_OPERANDS);
1045 if (nop != 0)
1046 data->operand_loc = XNEWVEC (rtx *, nop);
1047 /* Now get the operand values and constraints out of the
1048 insn. */
1049 decode_asm_operands (PATTERN (insn), NULL,
1050 data->operand_loc,
1051 constraints, operand_mode, NULL);
55a2c322 1052 if (nop > 0)
ef874db6
ML
1053 for (const char *p =constraints[0]; *p; p++)
1054 nalt += *p == ',';
55a2c322 1055 data->insn_static_data = insn_static_data
0c331756 1056 = get_static_insn_data (-1, nop, 0, nalt);
55a2c322
VM
1057 for (i = 0; i < nop; i++)
1058 {
1059 insn_static_data->operand[i].mode = operand_mode[i];
1060 insn_static_data->operand[i].constraint = constraints[i];
1061 insn_static_data->operand[i].strict_low = false;
1062 insn_static_data->operand[i].is_operator = false;
1063 insn_static_data->operand[i].is_address = false;
1064 }
1065 }
1066 for (i = 0; i < insn_static_data->n_operands; i++)
1067 insn_static_data->operand[i].type
1068 = (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1069 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1070 : OP_IN);
9840b2fa 1071 data->preferred_alternatives = ALL_ALTERNATIVES;
0c331756
RS
1072 if (nop > 0)
1073 {
1074 operand_alternative *op_alt = XCNEWVEC (operand_alternative,
1075 nalt * nop);
998fd141
AO
1076 preprocess_constraints (nop, nalt, constraints, op_alt,
1077 data->operand_loc);
0c331756
RS
1078 setup_operand_alternative (data, op_alt);
1079 }
55a2c322
VM
1080 }
1081 else
1082 {
1083 insn_extract (insn);
1084 data->insn_static_data = insn_static_data
1085 = get_static_insn_data (icode, insn_data[icode].n_operands,
1086 insn_data[icode].n_dups,
1087 insn_data[icode].n_alternatives);
1088 n = insn_static_data->n_operands;
1089 if (n == 0)
1090 locs = NULL;
1091 else
1092 {
1093 locs = XNEWVEC (rtx *, n);
1094 memcpy (locs, recog_data.operand_loc, n * sizeof (rtx *));
1095 }
1096 data->operand_loc = locs;
1097 n = insn_static_data->n_dups;
1098 if (n == 0)
1099 locs = NULL;
1100 else
1101 {
1102 locs = XNEWVEC (rtx *, n);
1103 memcpy (locs, recog_data.dup_loc, n * sizeof (rtx *));
1104 }
1105 data->dup_loc = locs;
9840b2fa 1106 data->preferred_alternatives = get_preferred_alternatives (insn);
0c331756
RS
1107 const operand_alternative *op_alt = preprocess_insn_constraints (icode);
1108 if (!insn_static_data->operand_alternative)
1109 setup_operand_alternative (data, op_alt);
1110 else if (op_alt != insn_static_data->operand_alternative)
1111 insn_static_data->operand_alternative = op_alt;
55a2c322
VM
1112 }
1113 if (GET_CODE (PATTERN (insn)) == CLOBBER || GET_CODE (PATTERN (insn)) == USE)
1114 insn_static_data->hard_regs = NULL;
1115 else
1116 insn_static_data->hard_regs
561c58b4 1117 = collect_non_operand_hard_regs (insn, &PATTERN (insn), data,
17d184e5 1118 NULL, OP_IN, false);
55a2c322
VM
1119 data->arg_hard_regs = NULL;
1120 if (CALL_P (insn))
1121 {
9d86e84e 1122 bool use_p;
55a2c322
VM
1123 rtx link;
1124 int n_hard_regs, regno, arg_hard_regs[FIRST_PSEUDO_REGISTER];
1125
1126 n_hard_regs = 0;
1127 /* Finding implicit hard register usage. We believe it will be
1128 not changed whatever transformations are used. Call insns
1129 are such example. */
1130 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1131 link != NULL_RTX;
1132 link = XEXP (link, 1))
9d86e84e
VM
1133 if (((use_p = GET_CODE (XEXP (link, 0)) == USE)
1134 || GET_CODE (XEXP (link, 0)) == CLOBBER)
55a2c322
VM
1135 && REG_P (XEXP (XEXP (link, 0), 0)))
1136 {
1137 regno = REGNO (XEXP (XEXP (link, 0), 0));
1138 lra_assert (regno < FIRST_PSEUDO_REGISTER);
1139 /* It is an argument register. */
dc8afb70 1140 for (i = REG_NREGS (XEXP (XEXP (link, 0), 0)) - 1; i >= 0; i--)
9d86e84e
VM
1141 arg_hard_regs[n_hard_regs++]
1142 = regno + i + (use_p ? 0 : FIRST_PSEUDO_REGISTER);
55a2c322 1143 }
30dc1902 1144
55a2c322
VM
1145 if (n_hard_regs != 0)
1146 {
1147 arg_hard_regs[n_hard_regs++] = -1;
1148 data->arg_hard_regs = XNEWVEC (int, n_hard_regs);
1149 memcpy (data->arg_hard_regs, arg_hard_regs,
1150 sizeof (int) * n_hard_regs);
1151 }
1152 }
1153 /* Some output operand can be recognized only from the context not
1154 from the constraints which are empty in this case. Call insn may
1155 contain a hard register in set destination with empty constraint
1156 and extract_insn treats them as an input. */
1157 for (i = 0; i < insn_static_data->n_operands; i++)
1158 {
1159 int j;
1160 rtx pat, set;
1161 struct lra_operand_data *operand = &insn_static_data->operand[i];
1162
1163 /* ??? Should we treat 'X' the same way. It looks to me that
1164 'X' means anything and empty constraint means we do not
1165 care. */
1166 if (operand->type != OP_IN || *operand->constraint != '\0'
1167 || operand->is_operator)
1168 continue;
1169 pat = PATTERN (insn);
1170 if (GET_CODE (pat) == SET)
1171 {
1172 if (data->operand_loc[i] != &SET_DEST (pat))
1173 continue;
1174 }
1175 else if (GET_CODE (pat) == PARALLEL)
1176 {
1177 for (j = XVECLEN (pat, 0) - 1; j >= 0; j--)
1178 {
1179 set = XVECEXP (PATTERN (insn), 0, j);
1180 if (GET_CODE (set) == SET
1181 && &SET_DEST (set) == data->operand_loc[i])
1182 break;
1183 }
1184 if (j < 0)
1185 continue;
1186 }
1187 else
1188 continue;
1189 operand->type = OP_OUT;
1190 }
1191 return data;
1192}
1193
1194/* Return info about insn give by UID. The info should be already set
1195 up. */
1196static lra_insn_recog_data_t
1197get_insn_recog_data_by_uid (int uid)
1198{
1199 lra_insn_recog_data_t data;
1200
1201 data = lra_insn_recog_data[uid];
1202 lra_assert (data != NULL);
1203 return data;
1204}
1205
1206/* Invalidate all info about insn given by its UID. */
1207static void
1208invalidate_insn_recog_data (int uid)
1209{
1210 lra_insn_recog_data_t data;
1211
1212 data = lra_insn_recog_data[uid];
1213 lra_assert (data != NULL);
1214 free_insn_recog_data (data);
1215 lra_insn_recog_data[uid] = NULL;
1216}
1217
1218/* Update all the insn info about INSN. It is usually called when
1219 something in the insn was changed. Return the updated info. */
1220lra_insn_recog_data_t
cfa434f6 1221lra_update_insn_recog_data (rtx_insn *insn)
55a2c322
VM
1222{
1223 lra_insn_recog_data_t data;
1224 int n;
1225 unsigned int uid = INSN_UID (insn);
1226 struct lra_static_insn_data *insn_static_data;
73ca989c 1227 poly_int64 sp_offset = 0;
f4eafc30 1228
55a2c322
VM
1229 check_and_expand_insn_recog_data (uid);
1230 if ((data = lra_insn_recog_data[uid]) != NULL
1231 && data->icode != INSN_CODE (insn))
1232 {
8d49e7ef 1233 sp_offset = data->sp_offset;
55a2c322
VM
1234 invalidate_insn_data_regno_info (data, insn, get_insn_freq (insn));
1235 invalidate_insn_recog_data (uid);
1236 data = NULL;
1237 }
1238 if (data == NULL)
8d49e7ef
VM
1239 {
1240 data = lra_get_insn_recog_data (insn);
1241 /* Initiate or restore SP offset. */
1242 data->sp_offset = sp_offset;
1243 return data;
1244 }
55a2c322 1245 insn_static_data = data->insn_static_data;
7874b7c5 1246 data->used_insn_alternative = LRA_UNKNOWN_ALT;
55a2c322
VM
1247 if (DEBUG_INSN_P (insn))
1248 return data;
1249 if (data->icode < 0)
1250 {
1251 int nop;
ef4bddc2 1252 machine_mode operand_mode[MAX_RECOG_OPERANDS];
55a2c322
VM
1253 const char *constraints[MAX_RECOG_OPERANDS];
1254
1255 nop = asm_noperands (PATTERN (insn));
1256 if (nop >= 0)
1257 {
1258 lra_assert (nop == data->insn_static_data->n_operands);
1259 /* Now get the operand values and constraints out of the
1260 insn. */
1261 decode_asm_operands (PATTERN (insn), NULL,
1262 data->operand_loc,
1263 constraints, operand_mode, NULL);
55a2c322 1264
b2b29377
MM
1265 if (flag_checking)
1266 for (int i = 0; i < nop; i++)
55a2c322
VM
1267 lra_assert
1268 (insn_static_data->operand[i].mode == operand_mode[i]
1269 && insn_static_data->operand[i].constraint == constraints[i]
1270 && ! insn_static_data->operand[i].is_operator);
55a2c322 1271 }
55a2c322 1272
b2b29377
MM
1273 if (flag_checking)
1274 for (int i = 0; i < insn_static_data->n_operands; i++)
55a2c322
VM
1275 lra_assert
1276 (insn_static_data->operand[i].type
1277 == (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1278 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1279 : OP_IN));
55a2c322
VM
1280 }
1281 else
1282 {
1283 insn_extract (insn);
1284 n = insn_static_data->n_operands;
1285 if (n != 0)
1286 memcpy (data->operand_loc, recog_data.operand_loc, n * sizeof (rtx *));
1287 n = insn_static_data->n_dups;
1288 if (n != 0)
1289 memcpy (data->dup_loc, recog_data.dup_loc, n * sizeof (rtx *));
9840b2fa 1290 lra_assert (check_bool_attrs (insn));
55a2c322
VM
1291 }
1292 return data;
1293}
1294
1295/* Set up that INSN is using alternative ALT now. */
1296void
cfa434f6 1297lra_set_used_insn_alternative (rtx_insn *insn, int alt)
55a2c322
VM
1298{
1299 lra_insn_recog_data_t data;
1300
1301 data = lra_get_insn_recog_data (insn);
1302 data->used_insn_alternative = alt;
1303}
1304
1305/* Set up that insn with UID is using alternative ALT now. The insn
1306 info should be already set up. */
1307void
1308lra_set_used_insn_alternative_by_uid (int uid, int alt)
1309{
1310 lra_insn_recog_data_t data;
1311
1312 check_and_expand_insn_recog_data (uid);
1313 data = lra_insn_recog_data[uid];
1314 lra_assert (data != NULL);
1315 data->used_insn_alternative = alt;
1316}
1317
1318\f
1319
1320/* This page contains code dealing with common register info and
1321 pseudo copies. */
1322
1323/* The size of the following array. */
1324static int reg_info_size;
1325/* Common info about each register. */
99b1c316 1326class lra_reg *lra_reg_info;
55a2c322 1327
54178a01
TV
1328HARD_REG_SET hard_regs_spilled_into;
1329
55a2c322
VM
1330/* Last register value. */
1331static int last_reg_value;
1332
1333/* Return new register value. */
1334static int
1335get_new_reg_value (void)
1336{
1337 return ++last_reg_value;
1338}
1339
55a2c322 1340/* Vec referring to pseudo copies. */
9771b263 1341static vec<lra_copy_t> copy_vec;
55a2c322
VM
1342
1343/* Initialize I-th element of lra_reg_info. */
1344static inline void
1345initialize_lra_reg_info_element (int i)
1346{
1347 bitmap_initialize (&lra_reg_info[i].insn_bitmap, &reg_obstack);
1348#ifdef STACK_REGS
1349 lra_reg_info[i].no_stack_p = false;
1350#endif
1351 CLEAR_HARD_REG_SET (lra_reg_info[i].conflict_hard_regs);
85419ac5 1352 CLEAR_HARD_REG_SET (lra_reg_info[i].exclude_start_hard_regs);
55a2c322
VM
1353 lra_reg_info[i].preferred_hard_regno1 = -1;
1354 lra_reg_info[i].preferred_hard_regno2 = -1;
1355 lra_reg_info[i].preferred_hard_regno_profit1 = 0;
1356 lra_reg_info[i].preferred_hard_regno_profit2 = 0;
b28ece32 1357 lra_reg_info[i].biggest_mode = VOIDmode;
55a2c322
VM
1358 lra_reg_info[i].live_ranges = NULL;
1359 lra_reg_info[i].nrefs = lra_reg_info[i].freq = 0;
1360 lra_reg_info[i].last_reload = 0;
8a8330b7 1361 lra_reg_info[i].restore_rtx = NULL_RTX;
55a2c322 1362 lra_reg_info[i].val = get_new_reg_value ();
d70a81dd 1363 lra_reg_info[i].offset = 0;
55a2c322
VM
1364 lra_reg_info[i].copies = NULL;
1365}
1366
1367/* Initialize common reg info and copies. */
1368static void
1369init_reg_info (void)
1370{
1371 int i;
1372
1373 last_reg_value = 0;
1374 reg_info_size = max_reg_num () * 3 / 2 + 1;
99b1c316 1375 lra_reg_info = XNEWVEC (class lra_reg, reg_info_size);
55a2c322
VM
1376 for (i = 0; i < reg_info_size; i++)
1377 initialize_lra_reg_info_element (i);
ff569744 1378 copy_vec.truncate (0);
54178a01 1379 CLEAR_HARD_REG_SET (hard_regs_spilled_into);
55a2c322
VM
1380}
1381
1382
1383/* Finish common reg info and copies. */
1384static void
1385finish_reg_info (void)
1386{
1387 int i;
1388
1389 for (i = 0; i < reg_info_size; i++)
1390 bitmap_clear (&lra_reg_info[i].insn_bitmap);
1391 free (lra_reg_info);
1392 reg_info_size = 0;
55a2c322
VM
1393}
1394
1395/* Expand common reg info if it is necessary. */
1396static void
1397expand_reg_info (void)
1398{
1399 int i, old = reg_info_size;
1400
1401 if (reg_info_size > max_reg_num ())
1402 return;
1403 reg_info_size = max_reg_num () * 3 / 2 + 1;
99b1c316 1404 lra_reg_info = XRESIZEVEC (class lra_reg, lra_reg_info, reg_info_size);
55a2c322
VM
1405 for (i = old; i < reg_info_size; i++)
1406 initialize_lra_reg_info_element (i);
1407}
1408
1409/* Free all copies. */
1410void
1411lra_free_copies (void)
1412{
1413 lra_copy_t cp;
1414
9771b263 1415 while (copy_vec.length () != 0)
55a2c322 1416 {
9771b263 1417 cp = copy_vec.pop ();
55a2c322 1418 lra_reg_info[cp->regno1].copies = lra_reg_info[cp->regno2].copies = NULL;
fb0b2914 1419 lra_copy_pool.remove (cp);
55a2c322
VM
1420 }
1421}
1422
1423/* Create copy of two pseudos REGNO1 and REGNO2. The copy execution
1424 frequency is FREQ. */
1425void
1426lra_create_copy (int regno1, int regno2, int freq)
1427{
1428 bool regno1_dest_p;
1429 lra_copy_t cp;
1430
1431 lra_assert (regno1 != regno2);
1432 regno1_dest_p = true;
1433 if (regno1 > regno2)
1434 {
6b4db501 1435 std::swap (regno1, regno2);
55a2c322 1436 regno1_dest_p = false;
55a2c322 1437 }
fb0b2914 1438 cp = lra_copy_pool.allocate ();
9771b263 1439 copy_vec.safe_push (cp);
55a2c322
VM
1440 cp->regno1_dest_p = regno1_dest_p;
1441 cp->freq = freq;
1442 cp->regno1 = regno1;
1443 cp->regno2 = regno2;
1444 cp->regno1_next = lra_reg_info[regno1].copies;
1445 lra_reg_info[regno1].copies = cp;
1446 cp->regno2_next = lra_reg_info[regno2].copies;
1447 lra_reg_info[regno2].copies = cp;
1448 if (lra_dump_file != NULL)
1449 fprintf (lra_dump_file, " Creating copy r%d%sr%d@%d\n",
1450 regno1, regno1_dest_p ? "<-" : "->", regno2, freq);
1451}
1452
1453/* Return N-th (0, 1, ...) copy. If there is no copy, return
1454 NULL. */
1455lra_copy_t
1456lra_get_copy (int n)
1457{
9771b263 1458 if (n >= (int) copy_vec.length ())
55a2c322 1459 return NULL;
9771b263 1460 return copy_vec[n];
55a2c322
VM
1461}
1462
1463\f
1464
1465/* This page contains code dealing with info about registers in
1466 insns. */
1467
a25f3e8e
RS
1468/* Process X of INSN recursively and add info (operand type is given
1469 by TYPE) about registers in X to the insn DATA. If X can be early
1470 clobbered, alternatives in which it can be early clobbered are given
1471 by EARLY_CLOBBER_ALTS. */
55a2c322 1472static void
561c58b4 1473add_regs_to_insn_regno_info (lra_insn_recog_data_t data, rtx x,
a25f3e8e 1474 rtx_insn *insn, enum op_type type,
584898ee 1475 alternative_mask early_clobber_alts)
55a2c322
VM
1476{
1477 int i, j, regno;
1478 bool subreg_p;
ef4bddc2 1479 machine_mode mode;
55a2c322
VM
1480 const char *fmt;
1481 enum rtx_code code;
1482 struct lra_insn_reg *curr;
1483
1484 code = GET_CODE (x);
1485 mode = GET_MODE (x);
1486 subreg_p = false;
1487 if (GET_CODE (x) == SUBREG)
1488 {
bd5a2c67 1489 mode = wider_subreg_mode (x);
9eaf97d6
RS
1490 if (read_modify_subreg_p (x))
1491 subreg_p = true;
55a2c322
VM
1492 x = SUBREG_REG (x);
1493 code = GET_CODE (x);
55a2c322
VM
1494 }
1495 if (REG_P (x))
1496 {
1497 regno = REGNO (x);
d9cf932c
VM
1498 /* Process all regs even unallocatable ones as we need info about
1499 all regs for rematerialization pass. */
55a2c322 1500 expand_reg_info ();
561c58b4 1501 if (bitmap_set_bit (&lra_reg_info[regno].insn_bitmap, INSN_UID (insn)))
55a2c322 1502 {
c34c46dd 1503 data->regs = new_insn_reg (data->insn, regno, type, mode, subreg_p,
17d184e5 1504 early_clobber_alts, data->regs);
55a2c322
VM
1505 return;
1506 }
1507 else
1508 {
1509 for (curr = data->regs; curr != NULL; curr = curr->next)
1510 if (curr->regno == regno)
1511 {
1512 if (curr->subreg_p != subreg_p || curr->biggest_mode != mode)
67914693 1513 /* The info cannot be integrated into the found
55a2c322 1514 structure. */
c34c46dd 1515 data->regs = new_insn_reg (data->insn, regno, type, mode,
a25f3e8e 1516 subreg_p, early_clobber_alts,
17d184e5 1517 data->regs);
55a2c322
VM
1518 else
1519 {
1520 if (curr->type != type)
1521 curr->type = OP_INOUT;
584898ee 1522 curr->early_clobber_alts |= early_clobber_alts;
55a2c322
VM
1523 }
1524 return;
1525 }
1526 gcc_unreachable ();
1527 }
1528 }
1529
1530 switch (code)
1531 {
1532 case SET:
a25f3e8e
RS
1533 add_regs_to_insn_regno_info (data, SET_DEST (x), insn, OP_OUT, 0);
1534 add_regs_to_insn_regno_info (data, SET_SRC (x), insn, OP_IN, 0);
55a2c322
VM
1535 break;
1536 case CLOBBER:
75214935
VM
1537 /* We treat clobber of non-operand hard registers as early
1538 clobber. */
1539 add_regs_to_insn_regno_info (data, XEXP (x, 0), insn, OP_OUT,
a25f3e8e 1540 ALL_ALTERNATIVES);
75214935 1541 break;
55a2c322 1542 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
a25f3e8e 1543 add_regs_to_insn_regno_info (data, XEXP (x, 0), insn, OP_INOUT, 0);
55a2c322
VM
1544 break;
1545 case PRE_MODIFY: case POST_MODIFY:
a25f3e8e
RS
1546 add_regs_to_insn_regno_info (data, XEXP (x, 0), insn, OP_INOUT, 0);
1547 add_regs_to_insn_regno_info (data, XEXP (x, 1), insn, OP_IN, 0);
55a2c322
VM
1548 break;
1549 default:
1550 if ((code != PARALLEL && code != EXPR_LIST) || type != OP_OUT)
1551 /* Some targets place small structures in registers for return
1552 values of functions, and those registers are wrapped in
1553 PARALLEL that we may see as the destination of a SET. Here
1554 is an example:
1555
1556 (call_insn 13 12 14 2 (set (parallel:BLK [
1557 (expr_list:REG_DEP_TRUE (reg:DI 0 ax)
1558 (const_int 0 [0]))
1559 (expr_list:REG_DEP_TRUE (reg:DI 1 dx)
1560 (const_int 8 [0x8]))
1561 ])
1562 (call (mem:QI (symbol_ref:DI (... */
1563 type = OP_IN;
1564 fmt = GET_RTX_FORMAT (code);
1565 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1566 {
1567 if (fmt[i] == 'e')
a25f3e8e 1568 add_regs_to_insn_regno_info (data, XEXP (x, i), insn, type, 0);
55a2c322
VM
1569 else if (fmt[i] == 'E')
1570 {
1571 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
561c58b4 1572 add_regs_to_insn_regno_info (data, XVECEXP (x, i, j), insn,
a25f3e8e 1573 type, 0);
55a2c322
VM
1574 }
1575 }
1576 }
1577}
1578
1579/* Return execution frequency of INSN. */
1580static int
cfa434f6 1581get_insn_freq (rtx_insn *insn)
55a2c322 1582{
39718607 1583 basic_block bb = BLOCK_FOR_INSN (insn);
55a2c322 1584
39718607
SB
1585 gcc_checking_assert (bb != NULL);
1586 return REG_FREQ_FROM_BB (bb);
55a2c322
VM
1587}
1588
1589/* Invalidate all reg info of INSN with DATA and execution frequency
1590 FREQ. Update common info about the invalidated registers. */
1591static void
cfa434f6 1592invalidate_insn_data_regno_info (lra_insn_recog_data_t data, rtx_insn *insn,
55a2c322
VM
1593 int freq)
1594{
1595 int uid;
1596 bool debug_p;
1597 unsigned int i;
1598 struct lra_insn_reg *ir, *next_ir;
1599
1600 uid = INSN_UID (insn);
1601 debug_p = DEBUG_INSN_P (insn);
1602 for (ir = data->regs; ir != NULL; ir = next_ir)
1603 {
1604 i = ir->regno;
1605 next_ir = ir->next;
fb0b2914 1606 lra_insn_reg_pool.remove (ir);
55a2c322
VM
1607 bitmap_clear_bit (&lra_reg_info[i].insn_bitmap, uid);
1608 if (i >= FIRST_PSEUDO_REGISTER && ! debug_p)
1609 {
1610 lra_reg_info[i].nrefs--;
1611 lra_reg_info[i].freq -= freq;
1612 lra_assert (lra_reg_info[i].nrefs >= 0 && lra_reg_info[i].freq >= 0);
1613 }
1614 }
1615 data->regs = NULL;
1616}
1617
1618/* Invalidate all reg info of INSN. Update common info about the
1619 invalidated registers. */
1620void
cfa434f6 1621lra_invalidate_insn_regno_info (rtx_insn *insn)
55a2c322
VM
1622{
1623 invalidate_insn_data_regno_info (lra_get_insn_recog_data (insn), insn,
1624 get_insn_freq (insn));
1625}
1626
1627/* Update common reg info from reg info of insn given by its DATA and
1628 execution frequency FREQ. */
1629static void
1630setup_insn_reg_info (lra_insn_recog_data_t data, int freq)
1631{
1632 unsigned int i;
1633 struct lra_insn_reg *ir;
1634
1635 for (ir = data->regs; ir != NULL; ir = ir->next)
1636 if ((i = ir->regno) >= FIRST_PSEUDO_REGISTER)
1637 {
1638 lra_reg_info[i].nrefs++;
1639 lra_reg_info[i].freq += freq;
1640 }
1641}
1642
1643/* Set up insn reg info of INSN. Update common reg info from reg info
1644 of INSN. */
1645void
cfa434f6 1646lra_update_insn_regno_info (rtx_insn *insn)
55a2c322 1647{
561c58b4 1648 int i, freq;
55a2c322
VM
1649 lra_insn_recog_data_t data;
1650 struct lra_static_insn_data *static_data;
1651 enum rtx_code code;
05f23918
VM
1652 rtx link;
1653
55a2c322
VM
1654 if (! INSN_P (insn))
1655 return;
1656 data = lra_get_insn_recog_data (insn);
1657 static_data = data->insn_static_data;
96a95ac1 1658 freq = NONDEBUG_INSN_P (insn) ? get_insn_freq (insn) : 0;
55a2c322 1659 invalidate_insn_data_regno_info (data, insn, freq);
55a2c322 1660 for (i = static_data->n_operands - 1; i >= 0; i--)
561c58b4 1661 add_regs_to_insn_regno_info (data, *data->operand_loc[i], insn,
55a2c322 1662 static_data->operand[i].type,
584898ee 1663 static_data->operand[i].early_clobber_alts);
55a2c322 1664 if ((code = GET_CODE (PATTERN (insn))) == CLOBBER || code == USE)
561c58b4 1665 add_regs_to_insn_regno_info (data, XEXP (PATTERN (insn), 0), insn,
a25f3e8e 1666 code == USE ? OP_IN : OP_OUT, 0);
05f23918
VM
1667 if (CALL_P (insn))
1668 /* On some targets call insns can refer to pseudos in memory in
1669 CALL_INSN_FUNCTION_USAGE list. Process them in order to
1670 consider their occurrences in calls for different
1671 transformations (e.g. inheritance) with given pseudos. */
1672 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1673 link != NULL_RTX;
1674 link = XEXP (link, 1))
30dc1902
AH
1675 {
1676 code = GET_CODE (XEXP (link, 0));
30dc1902
AH
1677 if ((code == USE || code == CLOBBER)
1678 && MEM_P (XEXP (XEXP (link, 0), 0)))
1679 add_regs_to_insn_regno_info (data, XEXP (XEXP (link, 0), 0), insn,
a25f3e8e 1680 code == USE ? OP_IN : OP_OUT, 0);
30dc1902 1681 }
55a2c322
VM
1682 if (NONDEBUG_INSN_P (insn))
1683 setup_insn_reg_info (data, freq);
1684}
1685
1686/* Return reg info of insn given by it UID. */
1687struct lra_insn_reg *
1688lra_get_insn_regs (int uid)
1689{
1690 lra_insn_recog_data_t data;
1691
1692 data = get_insn_recog_data_by_uid (uid);
1693 return data->regs;
1694}
1695
1696\f
1697
8a8330b7
VM
1698/* Recursive hash function for RTL X. */
1699hashval_t
1700lra_rtx_hash (rtx x)
1701{
1702 int i, j;
1703 enum rtx_code code;
1704 const char *fmt;
1705 hashval_t val = 0;
1706
1707 if (x == 0)
1708 return val;
1709
1710 code = GET_CODE (x);
1711 val += (int) code + 4095;
1712
1713 /* Some RTL can be compared nonrecursively. */
1714 switch (code)
1715 {
1716 case REG:
1717 return val + REGNO (x);
1718
1719 case LABEL_REF:
1720 return iterative_hash_object (XEXP (x, 0), val);
1721
1722 case SYMBOL_REF:
1723 return iterative_hash_object (XSTR (x, 0), val);
1724
1725 case SCRATCH:
1726 case CONST_DOUBLE:
8a8330b7
VM
1727 case CONST_VECTOR:
1728 return val;
1729
f4caead0
RB
1730 case CONST_INT:
1731 return val + UINTVAL (x);
1732
8a8330b7
VM
1733 default:
1734 break;
1735 }
1736
1737 /* Hash the elements. */
1738 fmt = GET_RTX_FORMAT (code);
1739 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1740 {
1741 switch (fmt[i])
1742 {
1743 case 'w':
1744 val += XWINT (x, i);
1745 break;
1746
1747 case 'n':
1748 case 'i':
1749 val += XINT (x, i);
1750 break;
1751
1752 case 'V':
1753 case 'E':
1754 val += XVECLEN (x, i);
1755
1756 for (j = 0; j < XVECLEN (x, i); j++)
1757 val += lra_rtx_hash (XVECEXP (x, i, j));
1758 break;
1759
1760 case 'e':
1761 val += lra_rtx_hash (XEXP (x, i));
1762 break;
1763
1764 case 'S':
1765 case 's':
1766 val += htab_hash_string (XSTR (x, i));
1767 break;
1768
1769 case 'u':
1770 case '0':
1771 case 't':
1772 break;
1773
1774 /* It is believed that rtx's at this level will never
1775 contain anything but integers and other rtx's, except for
1776 within LABEL_REFs and SYMBOL_REFs. */
1777 default:
1778 abort ();
1779 }
1780 }
1781 return val;
1782}
1783
1784\f
1785
55a2c322
VM
1786/* This page contains code dealing with stack of the insns which
1787 should be processed by the next constraint pass. */
1788
1789/* Bitmap used to put an insn on the stack only in one exemplar. */
1790static sbitmap lra_constraint_insn_stack_bitmap;
1791
1792/* The stack itself. */
cfa434f6 1793vec<rtx_insn *> lra_constraint_insn_stack;
55a2c322
VM
1794
1795/* Put INSN on the stack. If ALWAYS_UPDATE is true, always update the reg
1796 info for INSN, otherwise only update it if INSN is not already on the
1797 stack. */
1798static inline void
cfa434f6 1799lra_push_insn_1 (rtx_insn *insn, bool always_update)
55a2c322
VM
1800{
1801 unsigned int uid = INSN_UID (insn);
1802 if (always_update)
1803 lra_update_insn_regno_info (insn);
1804 if (uid >= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap))
1805 lra_constraint_insn_stack_bitmap =
1806 sbitmap_resize (lra_constraint_insn_stack_bitmap, 3 * uid / 2, 0);
d7c028c0 1807 if (bitmap_bit_p (lra_constraint_insn_stack_bitmap, uid))
55a2c322 1808 return;
d7c028c0 1809 bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
55a2c322
VM
1810 if (! always_update)
1811 lra_update_insn_regno_info (insn);
9771b263 1812 lra_constraint_insn_stack.safe_push (insn);
55a2c322
VM
1813}
1814
1815/* Put INSN on the stack. */
1816void
cfa434f6 1817lra_push_insn (rtx_insn *insn)
55a2c322
VM
1818{
1819 lra_push_insn_1 (insn, false);
1820}
1821
1822/* Put INSN on the stack and update its reg info. */
1823void
cfa434f6 1824lra_push_insn_and_update_insn_regno_info (rtx_insn *insn)
55a2c322
VM
1825{
1826 lra_push_insn_1 (insn, true);
1827}
1828
1829/* Put insn with UID on the stack. */
1830void
1831lra_push_insn_by_uid (unsigned int uid)
1832{
1833 lra_push_insn (lra_insn_recog_data[uid]->insn);
1834}
1835
1836/* Take the last-inserted insns off the stack and return it. */
cfa434f6 1837rtx_insn *
55a2c322
VM
1838lra_pop_insn (void)
1839{
cfa434f6 1840 rtx_insn *insn = lra_constraint_insn_stack.pop ();
d7c028c0 1841 bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
55a2c322
VM
1842 return insn;
1843}
1844
1845/* Return the current size of the insn stack. */
1846unsigned int
1847lra_insn_stack_length (void)
1848{
9771b263 1849 return lra_constraint_insn_stack.length ();
55a2c322
VM
1850}
1851
1852/* Push insns FROM to TO (excluding it) going in reverse order. */
1853static void
cfa434f6 1854push_insns (rtx_insn *from, rtx_insn *to)
55a2c322 1855{
cfa434f6 1856 rtx_insn *insn;
55a2c322
VM
1857
1858 if (from == NULL_RTX)
1859 return;
1860 for (insn = from; insn != to; insn = PREV_INSN (insn))
1861 if (INSN_P (insn))
1862 lra_push_insn (insn);
1863}
1864
30038a20 1865/* Set up and return sp offset for insns in range [FROM, LAST]. The offset is
8d49e7ef
VM
1866 taken from the next BB insn after LAST or zero if there in such
1867 insn. */
30038a20 1868static poly_int64
cfa434f6 1869setup_sp_offset (rtx_insn *from, rtx_insn *last)
8d49e7ef 1870{
f40dd646 1871 rtx_insn *before = next_nonnote_nondebug_insn_bb (last);
73ca989c
RS
1872 poly_int64 offset = (before == NULL_RTX || ! INSN_P (before)
1873 ? 0 : lra_get_insn_recog_data (before)->sp_offset);
8d49e7ef 1874
cfa434f6 1875 for (rtx_insn *insn = from; insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
30038a20
VM
1876 {
1877 lra_get_insn_recog_data (insn)->sp_offset = offset;
1878 offset = lra_update_sp_offset (PATTERN (insn), offset);
1879 }
1880 return offset;
8d49e7ef
VM
1881}
1882
1390bf52
VM
1883/* Dump all func insns in a slim form. */
1884void
1885lra_dump_insns (FILE *f)
1886{
1887 dump_rtl_slim (f, get_insns (), NULL, -1, 0);
1888}
1889
1890/* Dump all func insns in a slim form with TITLE when the dump file is open and
1891 lra_verbose >=7. */
1892void
1893lra_dump_insns_if_possible (const char *title)
1894{
1895 if (lra_dump_file == NULL || lra_verbose < 7)
1896 return;
1897 fprintf (lra_dump_file, "%s:", title);
1898 lra_dump_insns (lra_dump_file);
1899}
1900
55a2c322
VM
1901/* Emit insns BEFORE before INSN and insns AFTER after INSN. Put the
1902 insns onto the stack. Print about emitting the insns with
1903 TITLE. */
1904void
cfa434f6
DM
1905lra_process_new_insns (rtx_insn *insn, rtx_insn *before, rtx_insn *after,
1906 const char *title)
55a2c322 1907{
8d49e7ef
VM
1908 if (before == NULL_RTX && after == NULL_RTX)
1909 return;
1910 if (lra_dump_file != NULL)
55a2c322 1911 {
cfbeaedf 1912 dump_insn_slim (lra_dump_file, insn);
55a2c322
VM
1913 if (before != NULL_RTX)
1914 {
1915 fprintf (lra_dump_file," %s before:\n", title);
dc01c3d1 1916 dump_rtl_slim (lra_dump_file, before, NULL, -1, 0);
55a2c322 1917 }
55a2c322
VM
1918 }
1919 if (before != NULL_RTX)
1920 {
de4fcb99
JJ
1921 if (cfun->can_throw_non_call_exceptions)
1922 copy_reg_eh_region_note_forward (insn, before, NULL);
55a2c322 1923 emit_insn_before (before, insn);
30038a20
VM
1924 poly_int64 old_sp_offset = lra_get_insn_recog_data (insn)->sp_offset;
1925 poly_int64 new_sp_offset = setup_sp_offset (before, PREV_INSN (insn));
1926 if (maybe_ne (old_sp_offset, new_sp_offset))
1927 {
1928 if (lra_dump_file != NULL)
1929 {
1930 fprintf (lra_dump_file, " Changing sp offset from ");
1931 print_dec (old_sp_offset, lra_dump_file);
1932 fprintf (lra_dump_file, " to ");
1933 print_dec (new_sp_offset, lra_dump_file);
1934 fprintf (lra_dump_file, " for insn");
1935 dump_rtl_slim (lra_dump_file, insn, NULL, -1, 0);
1936 }
1937 lra_get_insn_recog_data (insn)->sp_offset = new_sp_offset;
1938 eliminate_regs_in_insn (insn, false, false,
1939 old_sp_offset - new_sp_offset);
1940 lra_push_insn (insn);
1941 }
55a2c322
VM
1942 push_insns (PREV_INSN (insn), PREV_INSN (before));
1943 }
1944 if (after != NULL_RTX)
1945 {
de4fcb99
JJ
1946 if (cfun->can_throw_non_call_exceptions)
1947 copy_reg_eh_region_note_forward (insn, after, NULL);
e3b3b596
VM
1948 if (! JUMP_P (insn))
1949 {
1950 rtx_insn *last;
1951
1952 if (lra_dump_file != NULL)
1953 {
1954 fprintf (lra_dump_file, " %s after:\n", title);
1955 dump_rtl_slim (lra_dump_file, after, NULL, -1, 0);
1956 }
1957 for (last = after;
1958 NEXT_INSN (last) != NULL_RTX;
1959 last = NEXT_INSN (last))
1960 ;
1961 emit_insn_after (after, insn);
1962 push_insns (last, insn);
1963 setup_sp_offset (after, last);
1964 }
1965 else
1966 {
1967 /* Put output reload insns on successor BBs: */
1968 edge_iterator ei;
1969 edge e;
1970
1971 FOR_EACH_EDGE (e, ei, BLOCK_FOR_INSN (insn)->succs)
1972 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1973 {
e53b6e56 1974 /* We already made the edge no-critical in ira.cc::ira */
e3b3b596 1975 lra_assert (!EDGE_CRITICAL_P (e));
253c415a 1976 rtx_insn *curr, *tmp = BB_HEAD (e->dest);
e3b3b596
VM
1977 if (LABEL_P (tmp))
1978 tmp = NEXT_INSN (tmp);
1979 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1980 tmp = NEXT_INSN (tmp);
253c415a 1981 /* Do not put reload insns if it is the last BB
a02e3042
VM
1982 without actual insns. */
1983 if (tmp == NULL)
253c415a
VM
1984 continue;
1985 start_sequence ();
1986 for (curr = after; curr != NULL_RTX; curr = NEXT_INSN (curr))
e3b3b596
VM
1987 emit_insn (copy_insn (PATTERN (curr)));
1988 rtx_insn *copy = get_insns (), *last = get_last_insn ();
1989 end_sequence ();
1990 if (lra_dump_file != NULL)
1991 {
1992 fprintf (lra_dump_file, " %s after in bb%d:\n", title,
1993 e->dest->index);
1994 dump_rtl_slim (lra_dump_file, copy, NULL, -1, 0);
1995 }
a02e3042
VM
1996 /* Use the right emit func for setting up BB_END/BB_HEAD: */
1997 if (BB_END (e->dest) == PREV_INSN (tmp))
1998 emit_insn_after_noloc (copy, PREV_INSN (tmp), e->dest);
1999 else
2000 emit_insn_before_noloc (copy, tmp, e->dest);
e3b3b596
VM
2001 push_insns (last, PREV_INSN (copy));
2002 setup_sp_offset (copy, last);
2003 /* We can ignore BB live info here as it and reg notes
2004 will be updated before the next assignment
2005 sub-pass. */
2006 }
2007 }
55a2c322 2008 }
e3b3b596
VM
2009 if (lra_dump_file != NULL)
2010 fprintf (lra_dump_file, "\n");
de4fcb99
JJ
2011 if (cfun->can_throw_non_call_exceptions)
2012 {
2013 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2014 if (note && !insn_could_throw_p (insn))
2015 remove_note (insn, note);
2016 }
55a2c322 2017}
55a2c322
VM
2018\f
2019
8160cd3e 2020/* Replace all references to register OLD_REGNO in *LOC with pseudo
ef87312e 2021 register NEW_REG. Try to simplify subreg of constant if SUBREG_P.
33006d53
JJ
2022 DEBUG_P is if LOC is within a DEBUG_INSN. Return true if any
2023 change was made. */
8160cd3e 2024bool
33006d53
JJ
2025lra_substitute_pseudo (rtx *loc, int old_regno, rtx new_reg, bool subreg_p,
2026 bool debug_p)
8160cd3e
VM
2027{
2028 rtx x = *loc;
2029 bool result = false;
2030 enum rtx_code code;
2031 const char *fmt;
2032 int i, j;
2033
2034 if (x == NULL_RTX)
2035 return false;
2036
2037 code = GET_CODE (x);
ef87312e 2038 if (code == SUBREG && subreg_p)
8160cd3e 2039 {
ef87312e
VM
2040 rtx subst, inner = SUBREG_REG (x);
2041 /* Transform subreg of constant while we still have inner mode
2042 of the subreg. The subreg internal should not be an insn
2043 operand. */
2044 if (REG_P (inner) && (int) REGNO (inner) == old_regno
2045 && CONSTANT_P (new_reg)
2046 && (subst = simplify_subreg (GET_MODE (x), new_reg, GET_MODE (inner),
2047 SUBREG_BYTE (x))) != NULL_RTX)
2048 {
2049 *loc = subst;
2050 return true;
2051 }
2052
2053 }
2054 else if (code == REG && (int) REGNO (x) == old_regno)
2055 {
2056 machine_mode mode = GET_MODE (x);
8160cd3e
VM
2057 machine_mode inner_mode = GET_MODE (new_reg);
2058
eef7b18c 2059 if (mode != inner_mode
59193b1b 2060 && ! (CONST_SCALAR_INT_P (new_reg) && SCALAR_INT_MODE_P (mode)))
8160cd3e 2061 {
33006d53
JJ
2062 poly_uint64 offset = 0;
2063 if (partial_subreg_p (mode, inner_mode)
2064 && SCALAR_INT_MODE_P (inner_mode))
2065 offset = subreg_lowpart_offset (mode, inner_mode);
2066 if (debug_p)
2067 new_reg = gen_rtx_raw_SUBREG (mode, new_reg, offset);
8160cd3e 2068 else
33006d53 2069 new_reg = gen_rtx_SUBREG (mode, new_reg, offset);
8160cd3e
VM
2070 }
2071 *loc = new_reg;
2072 return true;
2073 }
2074
2075 /* Scan all the operand sub-expressions. */
2076 fmt = GET_RTX_FORMAT (code);
2077 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2078 {
2079 if (fmt[i] == 'e')
2080 {
77eb0461
JJ
2081 if (debug_p
2082 && i == 0
2083 && (code == SUBREG
2084 || code == ZERO_EXTEND
2085 || code == SIGN_EXTEND
2086 || code == FLOAT
2087 || code == UNSIGNED_FLOAT))
2088 {
2089 rtx y = XEXP (x, 0);
2090 if (lra_substitute_pseudo (&y, old_regno,
2091 new_reg, subreg_p, debug_p))
2092 {
2093 result = true;
2094 if (CONST_SCALAR_INT_P (y))
2095 {
2096 if (code == SUBREG)
2097 y = simplify_subreg (GET_MODE (x), y,
2098 GET_MODE (SUBREG_REG (x)),
2099 SUBREG_BYTE (x));
2100 else
2101 y = simplify_unary_operation (code, GET_MODE (x), y,
2102 GET_MODE (XEXP (x, 0)));
2103 if (y)
2104 *loc = y;
2105 else
2106 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
2107 }
2108 else
2109 XEXP (x, 0) = y;
2110 }
2111 }
2112 else if (lra_substitute_pseudo (&XEXP (x, i), old_regno,
2113 new_reg, subreg_p, debug_p))
8160cd3e
VM
2114 result = true;
2115 }
2116 else if (fmt[i] == 'E')
2117 {
2118 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
ef87312e 2119 if (lra_substitute_pseudo (&XVECEXP (x, i, j), old_regno,
33006d53 2120 new_reg, subreg_p, debug_p))
8160cd3e
VM
2121 result = true;
2122 }
2123 }
2124 return result;
2125}
2126
ef87312e
VM
2127/* Call lra_substitute_pseudo within an insn. Try to simplify subreg
2128 of constant if SUBREG_P. This won't update the insn ptr, just the
2129 contents of the insn. */
8160cd3e 2130bool
ef87312e
VM
2131lra_substitute_pseudo_within_insn (rtx_insn *insn, int old_regno,
2132 rtx new_reg, bool subreg_p)
8160cd3e
VM
2133{
2134 rtx loc = insn;
33006d53
JJ
2135 return lra_substitute_pseudo (&loc, old_regno, new_reg, subreg_p,
2136 DEBUG_INSN_P (insn));
8160cd3e
VM
2137}
2138
2139\f
2140
44fbc9c6
VM
2141/* Return new register of the same mode as ORIGINAL of class ALL_REGS.
2142 Used in ira_remove_scratches. */
2143static rtx
2144get_scratch_reg (rtx original)
55a2c322 2145{
85419ac5
VM
2146 return lra_create_new_reg (GET_MODE (original), original, ALL_REGS,
2147 NULL, NULL);
55a2c322
VM
2148}
2149
44fbc9c6 2150/* Remove all insn scratches in INSN. */
55a2c322 2151static void
44fbc9c6 2152remove_insn_scratches (rtx_insn *insn)
55a2c322 2153{
44fbc9c6 2154 if (ira_remove_insn_scratches (insn, true, lra_dump_file, get_scratch_reg))
2713e5db
PB
2155 df_insn_rescan (insn);
2156}
2157
44fbc9c6 2158/* Remove all insn scratches in the current function. */
2713e5db
PB
2159static void
2160remove_scratches (void)
2161{
2162 basic_block bb;
2163 rtx_insn *insn;
2164
11cd3bed 2165 FOR_EACH_BB_FN (bb, cfun)
55a2c322 2166 FOR_BB_INSNS (bb, insn)
44fbc9c6
VM
2167 if (INSN_P (insn))
2168 remove_insn_scratches (insn);
55a2c322
VM
2169}
2170
55a2c322
VM
2171/* Function checks RTL for correctness. If FINAL_P is true, it is
2172 done at the end of LRA and the check is more rigorous. */
2173static void
2174check_rtl (bool final_p)
2175{
55a2c322 2176 basic_block bb;
cfa434f6 2177 rtx_insn *insn;
55a2c322
VM
2178
2179 lra_assert (! final_p || reload_completed);
11cd3bed 2180 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
2181 FOR_BB_INSNS (bb, insn)
2182 if (NONDEBUG_INSN_P (insn)
2183 && GET_CODE (PATTERN (insn)) != USE
2184 && GET_CODE (PATTERN (insn)) != CLOBBER
55a2c322
VM
2185 && GET_CODE (PATTERN (insn)) != ASM_INPUT)
2186 {
2187 if (final_p)
2188 {
75d25a02 2189 extract_constrain_insn (insn);
55a2c322
VM
2190 continue;
2191 }
a26891f4
VM
2192 /* LRA code is based on assumption that all addresses can be
2193 correctly decomposed. LRA can generate reloads for
2194 decomposable addresses. The decomposition code checks the
2195 correctness of the addresses. So we don't need to check
49f5efa5
VM
2196 the addresses here. Don't call insn_invalid_p here, it can
2197 change the code at this stage. */
2198 if (recog_memoized (insn) < 0 && asm_noperands (PATTERN (insn)) < 0)
55a2c322 2199 fatal_insn_not_found (insn);
55a2c322
VM
2200 }
2201}
55a2c322
VM
2202
2203/* Determine if the current function has an exception receiver block
2204 that reaches the exit block via non-exceptional edges */
2205static bool
2206has_nonexceptional_receiver (void)
2207{
2208 edge e;
2209 edge_iterator ei;
2210 basic_block *tos, *worklist, bb;
2211
2212 /* If we're not optimizing, then just err on the safe side. */
2213 if (!optimize)
2214 return true;
f4eafc30 2215
55a2c322 2216 /* First determine which blocks can reach exit via normal paths. */
0cae8d31 2217 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
55a2c322 2218
11cd3bed 2219 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
2220 bb->flags &= ~BB_REACHABLE;
2221
2222 /* Place the exit block on our worklist. */
fefa31b5
DM
2223 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
2224 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
f4eafc30 2225
55a2c322
VM
2226 /* Iterate: find everything reachable from what we've already seen. */
2227 while (tos != worklist)
2228 {
2229 bb = *--tos;
2230
2231 FOR_EACH_EDGE (e, ei, bb->preds)
2232 if (e->flags & EDGE_ABNORMAL)
2233 {
2234 free (worklist);
2235 return true;
2236 }
2237 else
2238 {
2239 basic_block src = e->src;
2240
2241 if (!(src->flags & BB_REACHABLE))
2242 {
2243 src->flags |= BB_REACHABLE;
2244 *tos++ = src;
2245 }
2246 }
2247 }
2248 free (worklist);
2249 /* No exceptional block reached exit unexceptionally. */
2250 return false;
2251}
2252
55a2c322
VM
2253/* Remove all REG_DEAD and REG_UNUSED notes and regenerate REG_INC.
2254 We change pseudos by hard registers without notification of DF and
2255 that can make the notes obsolete. DF-infrastructure does not deal
2256 with REG_INC notes -- so we should regenerate them here. */
2257static void
2258update_inc_notes (void)
2259{
2260 rtx *pnote;
2261 basic_block bb;
cfa434f6 2262 rtx_insn *insn;
55a2c322 2263
11cd3bed 2264 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
2265 FOR_BB_INSNS (bb, insn)
2266 if (NONDEBUG_INSN_P (insn))
2267 {
2268 pnote = &REG_NOTES (insn);
2269 while (*pnote != 0)
2270 {
20f114a3
YR
2271 if (REG_NOTE_KIND (*pnote) == REG_DEAD
2272 || REG_NOTE_KIND (*pnote) == REG_UNUSED
2273 || REG_NOTE_KIND (*pnote) == REG_INC)
55a2c322
VM
2274 *pnote = XEXP (*pnote, 1);
2275 else
2276 pnote = &XEXP (*pnote, 1);
2277 }
760edf20
TS
2278
2279 if (AUTO_INC_DEC)
2280 add_auto_inc_notes (insn, PATTERN (insn));
55a2c322
VM
2281 }
2282}
2283
0c8ecbcd
VM
2284/* Set to true while in LRA. */
2285bool lra_in_progress = false;
55a2c322 2286
f681cf95
VM
2287/* Start of pseudo regnos before the LRA. */
2288int lra_new_regno_start;
2289
f4eafc30 2290/* Start of reload pseudo regnos before the new spill pass. */
55a2c322
VM
2291int lra_constraint_new_regno_start;
2292
8fd827b8
VM
2293/* Avoid spilling pseudos with regno more than the following value if
2294 it is possible. */
2295int lra_bad_spill_regno_start;
2296
68ba1039
VM
2297/* A pseudo of Pmode. */
2298rtx lra_pmode_pseudo;
2299
f4eafc30 2300/* Inheritance pseudo regnos before the new spill pass. */
55a2c322
VM
2301bitmap_head lra_inheritance_pseudos;
2302
f4eafc30 2303/* Split regnos before the new spill pass. */
55a2c322
VM
2304bitmap_head lra_split_regs;
2305
5764ee3c
JW
2306/* Reload pseudo regnos before the new assignment pass which still can
2307 be spilled after the assignment pass as memory is also accepted in
2b778c9d 2308 insns for the reload pseudos. */
55a2c322
VM
2309bitmap_head lra_optional_reload_pseudos;
2310
2b778c9d 2311/* Pseudo regnos used for subreg reloads before the new assignment
5764ee3c 2312 pass. Such pseudos still can be spilled after the assignment
2b778c9d
VM
2313 pass. */
2314bitmap_head lra_subreg_reload_pseudos;
2315
55a2c322
VM
2316/* File used for output of LRA debug information. */
2317FILE *lra_dump_file;
2318
1390bf52
VM
2319/* How verbose should be the debug information. */
2320int lra_verbose;
2321
15a47f43
VM
2322/* True if we split hard reg after the last constraint sub-pass. */
2323bool lra_hard_reg_split_p;
2324
11067dee
VM
2325/* True if we found an asm error. */
2326bool lra_asm_error_p;
2327
55a2c322
VM
2328/* True if we should try spill into registers of different classes
2329 instead of memory. */
2330bool lra_reg_spill_p;
2331
2332/* Set up value LRA_REG_SPILL_P. */
2333static void
2334setup_reg_spill_flag (void)
2335{
2336 int cl, mode;
2337
2338 if (targetm.spill_class != NULL)
2339 for (cl = 0; cl < (int) LIM_REG_CLASSES; cl++)
2340 for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
2341 if (targetm.spill_class ((enum reg_class) cl,
ef4bddc2 2342 (machine_mode) mode) != NO_REGS)
55a2c322
VM
2343 {
2344 lra_reg_spill_p = true;
2345 return;
2346 }
2347 lra_reg_spill_p = false;
2348}
2349
2350/* True if the current function is too big to use regular algorithms
2351 in LRA. In other words, we should use simpler and faster algorithms
2352 in LRA. It also means we should not worry about generation code
2353 for caller saves. The value is set up in IRA. */
2354bool lra_simple_p;
2355
2356/* Major LRA entry function. F is a file should be used to dump LRA
1390bf52 2357 debug info with given verbosity. */
55a2c322 2358void
1390bf52 2359lra (FILE *f, int verbose)
55a2c322
VM
2360{
2361 int i;
15961e4a 2362 bool live_p, inserted_p;
55a2c322
VM
2363
2364 lra_dump_file = f;
1390bf52 2365 lra_verbose = verbose;
11067dee 2366 lra_asm_error_p = false;
68ba1039 2367 lra_pmode_pseudo = gen_reg_rtx (Pmode);
11067dee 2368
55a2c322
VM
2369 timevar_push (TV_LRA);
2370
2c62cbaa
VM
2371 /* Make sure that the last insn is a note. Some subsequent passes
2372 need it. */
2373 emit_note (NOTE_INSN_DELETED);
2374
6576d245 2375 lra_no_alloc_regs = ira_no_alloc_regs;
b28ece32 2376
afa22e29
VM
2377 init_reg_info ();
2378 expand_reg_info ();
2379
55a2c322
VM
2380 init_insn_recog_data ();
2381
49f5efa5 2382 /* Some quick check on RTL generated by previous passes. */
b2b29377
MM
2383 if (flag_checking)
2384 check_rtl (false);
55a2c322 2385
0c8ecbcd 2386 lra_in_progress = true;
49f5efa5 2387
f54437d5
VM
2388 lra_live_range_iter = lra_coalesce_iter = lra_constraint_iter = 0;
2389 lra_assignment_iter = lra_assignment_iter_after_spill = 0;
55a2c322 2390 lra_inheritance_iter = lra_undo_inheritance_iter = 0;
94446928 2391 lra_rematerialization_iter = 0;
55a2c322
VM
2392
2393 setup_reg_spill_flag ();
2394
55a2c322
VM
2395 /* Function remove_scratches can creates new pseudos for clobbers --
2396 so set up lra_constraint_new_regno_start before its call to
2397 permit changing reg classes for pseudos created by this
2398 simplification. */
f681cf95 2399 lra_constraint_new_regno_start = lra_new_regno_start = max_reg_num ();
8fd827b8 2400 lra_bad_spill_regno_start = INT_MAX;
55a2c322 2401 remove_scratches ();
55a2c322
VM
2402
2403 /* A function that has a non-local label that can reach the exit
2404 block via non-exceptional paths must save all call-saved
2405 registers. */
2406 if (cfun->has_nonlocal_label && has_nonexceptional_receiver ())
2407 crtl->saves_all_registers = 1;
2408
2409 if (crtl->saves_all_registers)
2410 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
a1e6ee38
RS
2411 if (!crtl->abi->clobbers_full_reg_p (i)
2412 && !fixed_regs[i]
2413 && !LOCAL_REGNO (i))
55a2c322
VM
2414 df_set_regs_ever_live (i, true);
2415
2416 /* We don't DF from now and avoid its using because it is to
2417 expensive when a lot of RTL changes are made. */
2418 df_set_flags (DF_NO_INSN_RESCAN);
9771b263 2419 lra_constraint_insn_stack.create (get_max_uid ());
55a2c322 2420 lra_constraint_insn_stack_bitmap = sbitmap_alloc (get_max_uid ());
f61e445a 2421 bitmap_clear (lra_constraint_insn_stack_bitmap);
55a2c322
VM
2422 lra_live_ranges_init ();
2423 lra_constraints_init ();
2424 lra_curr_reload_num = 0;
cfa434f6 2425 push_insns (get_last_insn (), NULL);
55a2c322 2426 /* It is needed for the 1st coalescing. */
55a2c322
VM
2427 bitmap_initialize (&lra_inheritance_pseudos, &reg_obstack);
2428 bitmap_initialize (&lra_split_regs, &reg_obstack);
2429 bitmap_initialize (&lra_optional_reload_pseudos, &reg_obstack);
2b778c9d 2430 bitmap_initialize (&lra_subreg_reload_pseudos, &reg_obstack);
55a2c322 2431 live_p = false;
f075bd95 2432 if (maybe_ne (get_frame_size (), 0) && crtl->stack_alignment_needed)
2c62cbaa
VM
2433 /* If we have a stack frame, we must align it now. The stack size
2434 may be a part of the offset computation for register
2435 elimination. */
2436 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
4c2b2d79 2437 lra_init_equiv ();
55a2c322
VM
2438 for (;;)
2439 {
2440 for (;;)
2441 {
0a045a1f 2442 bool reloads_p = lra_constraints (lra_constraint_iter == 0);
55a2c322
VM
2443 /* Constraint transformations may result in that eliminable
2444 hard regs become uneliminable and pseudos which use them
2445 should be spilled. It is better to do it before pseudo
2446 assignments.
2447
2448 For example, rs6000 can make
2449 RS6000_PIC_OFFSET_TABLE_REGNUM uneliminable if we started
2450 to use a constant pool. */
8d49e7ef 2451 lra_eliminate (false, false);
0a045a1f
VM
2452 /* We should try to assign hard registers to scratches even
2453 if there were no RTL transformations in lra_constraints.
2454 Also we should check IRA assignments on the first
2455 iteration as they can be wrong because of early clobbers
2456 operands which are ignored in IRA. */
2457 if (! reloads_p && lra_constraint_iter > 1)
2458 {
2459 /* Stack is not empty here only when there are changes
2460 during the elimination sub-pass. */
2461 if (bitmap_empty_p (lra_constraint_insn_stack_bitmap))
2462 break;
2463 else
2464 /* If there are no reloads but changing due
2465 elimination, restart the constraint sub-pass
2466 first. */
2467 continue;
2468 }
55a2c322
VM
2469 /* Do inheritance only for regular algorithms. */
2470 if (! lra_simple_p)
a1e6ee38 2471 lra_inheritance ();
4ccf8f43
JJ
2472 if (live_p)
2473 lra_clear_live_ranges ();
6027ea4c 2474 bool fails_p;
15a47f43 2475 lra_hard_reg_split_p = false;
6027ea4c 2476 do
55a2c322 2477 {
6027ea4c
VM
2478 /* We need live ranges for lra_assign -- so build them.
2479 But don't remove dead insns or change global live
2480 info as we can undo inheritance transformations after
2481 inheritance pseudo assigning. */
12981e9b 2482 lra_create_live_ranges (true, !lra_simple_p);
6027ea4c
VM
2483 live_p = true;
2484 /* If we don't spill non-reload and non-inheritance
2485 pseudos, there is no sense to run memory-memory move
2486 coalescing. If inheritance pseudos were spilled, the
2487 memory-memory moves involving them will be removed by
2488 pass undoing inheritance. */
2489 if (lra_simple_p)
2490 lra_assign (fails_p);
2491 else
72ea0d47 2492 {
6027ea4c
VM
2493 bool spill_p = !lra_assign (fails_p);
2494
2495 if (lra_undo_inheritance ())
2496 live_p = false;
2497 if (spill_p && ! fails_p)
72ea0d47 2498 {
6027ea4c
VM
2499 if (! live_p)
2500 {
2501 lra_create_live_ranges (true, true);
2502 live_p = true;
2503 }
2504 if (lra_coalesce ())
2505 live_p = false;
72ea0d47 2506 }
6027ea4c
VM
2507 if (! live_p)
2508 lra_clear_live_ranges ();
2509 }
2510 if (fails_p)
2511 {
2512 /* It is a very rare case. It is the last hope to
2513 split a hard regno live range for a reload
2514 pseudo. */
2515 if (live_p)
2516 lra_clear_live_ranges ();
2517 live_p = false;
2518 if (! lra_split_hard_reg_for ())
2519 break;
15a47f43 2520 lra_hard_reg_split_p = true;
72ea0d47 2521 }
55a2c322 2522 }
81d762cb 2523 while (fails_p && !lra_asm_error_p);
12981e9b
VM
2524 if (! live_p) {
2525 /* We need the correct reg notes for work of constraint sub-pass. */
2526 lra_create_live_ranges (true, true);
2527 live_p = true;
2528 }
55a2c322 2529 }
080cbf9e
VM
2530 /* Don't clear optional reloads bitmap until all constraints are
2531 satisfied as we need to differ them from regular reloads. */
2532 bitmap_clear (&lra_optional_reload_pseudos);
2b778c9d 2533 bitmap_clear (&lra_subreg_reload_pseudos);
55a2c322
VM
2534 bitmap_clear (&lra_inheritance_pseudos);
2535 bitmap_clear (&lra_split_regs);
55a2c322
VM
2536 if (! live_p)
2537 {
2538 /* We need full live info for spilling pseudos into
2539 registers instead of memory. */
4ab74a01 2540 lra_create_live_ranges (lra_reg_spill_p, true);
55a2c322
VM
2541 live_p = true;
2542 }
4ab74a01
VM
2543 /* We should check necessity for spilling here as the above live
2544 range pass can remove spilled pseudos. */
2545 if (! lra_need_for_spills_p ())
2546 break;
d9cf932c
VM
2547 /* Now we know what pseudos should be spilled. Try to
2548 rematerialize them first. */
f607c5c4 2549 if (lra_remat ())
d9cf932c
VM
2550 {
2551 /* We need full live info -- see the comment above. */
4ab74a01 2552 lra_create_live_ranges (lra_reg_spill_p, true);
d9cf932c
VM
2553 live_p = true;
2554 if (! lra_need_for_spills_p ())
23e0f4c3
BE
2555 {
2556 if (lra_need_for_scratch_reg_p ())
2557 continue;
2558 break;
2559 }
d9cf932c 2560 }
55a2c322
VM
2561 lra_spill ();
2562 /* Assignment of stack slots changes elimination offsets for
2563 some eliminations. So update the offsets here. */
8d49e7ef 2564 lra_eliminate (false, false);
8fd827b8
VM
2565 lra_constraint_new_regno_start = max_reg_num ();
2566 if (lra_bad_spill_regno_start == INT_MAX
2567 && lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES
2568 && lra_rematerialization_iter > LRA_MAX_REMATERIALIZATION_PASSES)
2569 /* After switching off inheritance and rematerialization
2570 passes, avoid spilling reload pseudos will be created to
2571 prevent LRA cycling in some complicated cases. */
2572 lra_bad_spill_regno_start = lra_constraint_new_regno_start;
f54437d5 2573 lra_assignment_iter_after_spill = 0;
55a2c322 2574 }
44fbc9c6 2575 ira_restore_scratches (lra_dump_file);
8d49e7ef 2576 lra_eliminate (true, false);
c5cd5a7e 2577 lra_final_code_change ();
0c8ecbcd 2578 lra_in_progress = false;
4ccf8f43
JJ
2579 if (live_p)
2580 lra_clear_live_ranges ();
55a2c322
VM
2581 lra_live_ranges_finish ();
2582 lra_constraints_finish ();
2583 finish_reg_info ();
2584 sbitmap_free (lra_constraint_insn_stack_bitmap);
9771b263 2585 lra_constraint_insn_stack.release ();
55a2c322
VM
2586 finish_insn_recog_data ();
2587 regstat_free_n_sets_and_refs ();
2588 regstat_free_ri ();
2589 reload_completed = 1;
2590 update_inc_notes ();
2591
2592 inserted_p = fixup_abnormal_edges ();
2593
2594 /* We've possibly turned single trapping insn into multiple ones. */
2595 if (cfun->can_throw_non_call_exceptions)
2596 {
7ba9e72d 2597 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
f61e445a 2598 bitmap_ones (blocks);
55a2c322 2599 find_many_sub_basic_blocks (blocks);
55a2c322
VM
2600 }
2601
2602 if (inserted_p)
2603 commit_edge_insertions ();
2604
3c23defe
VM
2605 /* Subsequent passes expect that rtl is unshared, so unshare everything
2606 here. */
55a2c322
VM
2607 unshare_all_rtl_again (get_insns ());
2608
b2b29377
MM
2609 if (flag_checking)
2610 check_rtl (true);
55a2c322
VM
2611
2612 timevar_pop (TV_LRA);
2613}
2614
2615/* Called once per compiler to initialize LRA data once. */
2616void
2617lra_init_once (void)
2618{
2619 init_insn_code_data_once ();
2620}
2621
55a2c322
VM
2622/* Called once per compiler to finish LRA data which are initialize
2623 once. */
2624void
2625lra_finish_once (void)
2626{
2627 finish_insn_code_data_once ();
2628}