]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/lra.c
Daily bump.
[thirdparty/gcc.git] / gcc / lra.c
CommitLineData
55a2c322 1/* LRA (local register allocator) driver and LRA utilities.
5624e564 2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
55a2c322
VM
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22/* The Local Register Allocator (LRA) is a replacement of former
23 reload pass. It is focused to simplify code solving the reload
24 pass tasks, to make the code maintenance easier, and to implement new
25 perspective optimizations.
26
27 The major LRA design solutions are:
28 o division small manageable, separated sub-tasks
29 o reflection of all transformations and decisions in RTL as more
30 as possible
31 o insn constraints as a primary source of the info (minimizing
32 number of target-depended macros/hooks)
33
34 In brief LRA works by iterative insn process with the final goal is
35 to satisfy all insn and address constraints:
36 o New reload insns (in brief reloads) and reload pseudos might be
37 generated;
38 o Some pseudos might be spilled to assign hard registers to
39 new reload pseudos;
d9cf932c 40 o Recalculating spilled pseudo values (rematerialization);
55a2c322
VM
41 o Changing spilled pseudos to stack memory or their equivalences;
42 o Allocation stack memory changes the address displacement and
43 new iteration is needed.
44
45 Here is block diagram of LRA passes:
46
2b778c9d
VM
47 ------------------------
48 --------------- | Undo inheritance for | ---------------
49 | Memory-memory | | spilled pseudos, | | New (and old) |
50 | move coalesce |<---| splits for pseudos got |<-- | pseudos |
51 --------------- | the same hard regs, | | assignment |
52 Start | | and optional reloads | ---------------
53 | | ------------------------ ^
f38e1b0a
VM
54 V | ---------------- |
55 ----------- V | Update virtual | |
56| Remove |----> ------------>| register | |
57| scratches | ^ | displacements | |
58 ----------- | ---------------- |
59 | | |
60 | V New |
d9cf932c
VM
61 | ------------ pseudos -------------------
62 | |Constraints:| or insns | Inheritance/split |
63 | | RTL |--------->| transformations |
64 | | transfor- | | in EBB scope |
65 | substi- | mations | -------------------
66 | tutions ------------
67 | | No change
68 ---------------- V
69 | Spilled pseudo | -------------------
70 | to memory |<----| Rematerialization |
71 | substitution | -------------------
72 ----------------
73 | No susbtitions
74 V
75 -------------------------
76 | Hard regs substitution, |
77 | devirtalization, and |------> Finish
78 | restoring scratches got |
79 | memory |
80 -------------------------
55a2c322
VM
81
82 To speed up the process:
83 o We process only insns affected by changes on previous
84 iterations;
85 o We don't use DFA-infrastructure because it results in much slower
86 compiler speed than a special IR described below does;
87 o We use a special insn representation for quick access to insn
88 info which is always *synchronized* with the current RTL;
89 o Insn IR is minimized by memory. It is divided on three parts:
90 o one specific for each insn in RTL (only operand locations);
91 o one common for all insns in RTL with the same insn code
92 (different operand attributes from machine descriptions);
93 o one oriented for maintenance of live info (list of pseudos).
94 o Pseudo data:
95 o all insns where the pseudo is referenced;
96 o live info (conflicting hard regs, live ranges, # of
97 references etc);
98 o data used for assigning (preferred hard regs, costs etc).
99
100 This file contains LRA driver, LRA utility functions and data, and
101 code for dealing with scratches. */
102
103#include "config.h"
104#include "system.h"
105#include "coretypes.h"
106#include "tm.h"
b28ece32 107#include "hard-reg-set.h"
55a2c322
VM
108#include "rtl.h"
109#include "tm_p.h"
110#include "regs.h"
111#include "insn-config.h"
112#include "insn-codes.h"
113#include "recog.h"
114#include "output.h"
115#include "addresses.h"
55a2c322
VM
116#include "flags.h"
117#include "function.h"
40e23961 118#include "symtab.h"
40e23961 119#include "tree.h"
b0710fe1 120#include "optabs.h"
36566b39
PK
121#include "alias.h"
122#include "expmed.h"
123#include "dojump.h"
124#include "explow.h"
125#include "calls.h"
126#include "emit-rtl.h"
127#include "varasm.h"
128#include "stmt.h"
55a2c322 129#include "expr.h"
60393bbc
AM
130#include "predict.h"
131#include "dominance.h"
132#include "cfg.h"
133#include "cfgrtl.h"
134#include "cfgbuild.h"
55a2c322
VM
135#include "basic-block.h"
136#include "except.h"
137#include "tree-pass.h"
138#include "timevar.h"
139#include "target.h"
55a2c322 140#include "ira.h"
cb8abb1c 141#include "alloc-pool.h"
55a2c322
VM
142#include "lra-int.h"
143#include "df.h"
144
8160cd3e
VM
145/* Dump bitmap SET with TITLE and BB INDEX. */
146void
147lra_dump_bitmap_with_title (const char *title, bitmap set, int index)
148{
149 unsigned int i;
150 int count;
151 bitmap_iterator bi;
152 static const int max_nums_on_line = 10;
153
154 if (bitmap_empty_p (set))
155 return;
156 fprintf (lra_dump_file, " %s %d:", title, index);
157 fprintf (lra_dump_file, "\n");
158 count = max_nums_on_line + 1;
159 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
160 {
161 if (count > max_nums_on_line)
162 {
163 fprintf (lra_dump_file, "\n ");
164 count = 0;
165 }
166 fprintf (lra_dump_file, " %4u", i);
167 count++;
168 }
169 fprintf (lra_dump_file, "\n");
170}
171
55a2c322
VM
172/* Hard registers currently not available for allocation. It can
173 changed after some hard registers become not eliminable. */
174HARD_REG_SET lra_no_alloc_regs;
175
176static int get_new_reg_value (void);
177static void expand_reg_info (void);
178static void invalidate_insn_recog_data (int);
cfa434f6
DM
179static int get_insn_freq (rtx_insn *);
180static void invalidate_insn_data_regno_info (lra_insn_recog_data_t,
181 rtx_insn *, int);
55a2c322
VM
182
183/* Expand all regno related info needed for LRA. */
184static void
a2d0d374 185expand_reg_data (int old)
55a2c322
VM
186{
187 resize_reg_info ();
188 expand_reg_info ();
189 ira_expand_reg_equiv ();
a2d0d374
VM
190 for (int i = (int) max_reg_num () - 1; i >= old; i--)
191 lra_change_class (i, ALL_REGS, " Set", true);
55a2c322
VM
192}
193
194/* Create and return a new reg of ORIGINAL mode. If ORIGINAL is NULL
195 or of VOIDmode, use MD_MODE for the new reg. Initialize its
196 register class to RCLASS. Print message about assigning class
197 RCLASS containing new register name TITLE unless it is NULL. Use
198 attributes of ORIGINAL if it is a register. The created register
199 will have unique held value. */
200rtx
ef4bddc2 201lra_create_new_reg_with_unique_value (machine_mode md_mode, rtx original,
55a2c322
VM
202 enum reg_class rclass, const char *title)
203{
ef4bddc2 204 machine_mode mode;
55a2c322
VM
205 rtx new_reg;
206
207 if (original == NULL_RTX || (mode = GET_MODE (original)) == VOIDmode)
208 mode = md_mode;
209 lra_assert (mode != VOIDmode);
210 new_reg = gen_reg_rtx (mode);
211 if (original == NULL_RTX || ! REG_P (original))
212 {
213 if (lra_dump_file != NULL)
214 fprintf (lra_dump_file, " Creating newreg=%i", REGNO (new_reg));
215 }
216 else
217 {
218 if (ORIGINAL_REGNO (original) >= FIRST_PSEUDO_REGISTER)
219 ORIGINAL_REGNO (new_reg) = ORIGINAL_REGNO (original);
220 REG_USERVAR_P (new_reg) = REG_USERVAR_P (original);
221 REG_POINTER (new_reg) = REG_POINTER (original);
222 REG_ATTRS (new_reg) = REG_ATTRS (original);
223 if (lra_dump_file != NULL)
224 fprintf (lra_dump_file, " Creating newreg=%i from oldreg=%i",
225 REGNO (new_reg), REGNO (original));
226 }
227 if (lra_dump_file != NULL)
228 {
229 if (title != NULL)
230 fprintf (lra_dump_file, ", assigning class %s to%s%s r%d",
231 reg_class_names[rclass], *title == '\0' ? "" : " ",
232 title, REGNO (new_reg));
233 fprintf (lra_dump_file, "\n");
234 }
a2d0d374 235 expand_reg_data (max_reg_num ());
55a2c322
VM
236 setup_reg_classes (REGNO (new_reg), rclass, NO_REGS, rclass);
237 return new_reg;
238}
239
240/* Analogous to the previous function but also inherits value of
241 ORIGINAL. */
242rtx
ef4bddc2 243lra_create_new_reg (machine_mode md_mode, rtx original,
55a2c322
VM
244 enum reg_class rclass, const char *title)
245{
246 rtx new_reg;
247
248 new_reg
249 = lra_create_new_reg_with_unique_value (md_mode, original, rclass, title);
250 if (original != NULL_RTX && REG_P (original))
d70a81dd 251 lra_assign_reg_val (REGNO (original), REGNO (new_reg));
55a2c322
VM
252 return new_reg;
253}
254
255/* Set up for REGNO unique hold value. */
256void
257lra_set_regno_unique_value (int regno)
258{
259 lra_reg_info[regno].val = get_new_reg_value ();
260}
261
8d49e7ef
VM
262/* Invalidate INSN related info used by LRA. The info should never be
263 used after that. */
55a2c322 264void
cfa434f6 265lra_invalidate_insn_data (rtx_insn *insn)
55a2c322
VM
266{
267 lra_invalidate_insn_regno_info (insn);
268 invalidate_insn_recog_data (INSN_UID (insn));
269}
270
271/* Mark INSN deleted and invalidate the insn related info used by
272 LRA. */
273void
cfa434f6 274lra_set_insn_deleted (rtx_insn *insn)
55a2c322
VM
275{
276 lra_invalidate_insn_data (insn);
277 SET_INSN_DELETED (insn);
278}
279
280/* Delete an unneeded INSN and any previous insns who sole purpose is
281 loading data that is dead in INSN. */
282void
cfa434f6 283lra_delete_dead_insn (rtx_insn *insn)
55a2c322 284{
cfa434f6 285 rtx_insn *prev = prev_real_insn (insn);
55a2c322
VM
286 rtx prev_dest;
287
288 /* If the previous insn sets a register that dies in our insn,
289 delete it too. */
290 if (prev && GET_CODE (PATTERN (prev)) == SET
291 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
292 && reg_mentioned_p (prev_dest, PATTERN (insn))
293 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
294 && ! side_effects_p (SET_SRC (PATTERN (prev))))
295 lra_delete_dead_insn (prev);
296
297 lra_set_insn_deleted (insn);
298}
299
11f5b71a
VM
300/* Emit insn x = y + z. Return NULL if we failed to do it.
301 Otherwise, return the insn. We don't use gen_add3_insn as it might
302 clobber CC. */
e67d1102 303static rtx_insn *
11f5b71a
VM
304emit_add3_insn (rtx x, rtx y, rtx z)
305{
fee3e72c 306 rtx_insn *last;
11f5b71a
VM
307
308 last = get_last_insn ();
72a4ddf2
AK
309
310 if (have_addptr3_insn (x, y, z))
311 {
e67d1102 312 rtx_insn *insn = gen_addptr3_insn (x, y, z);
72a4ddf2
AK
313
314 /* If the target provides an "addptr" pattern it hopefully does
315 for a reason. So falling back to the normal add would be
316 a bug. */
317 lra_assert (insn != NULL_RTX);
318 emit_insn (insn);
319 return insn;
320 }
321
f7df4a84
RS
322 rtx_insn *insn = emit_insn (gen_rtx_SET (x, gen_rtx_PLUS (GET_MODE (y),
323 y, z)));
11f5b71a
VM
324 if (recog_memoized (insn) < 0)
325 {
326 delete_insns_since (last);
647d790d 327 insn = NULL;
11f5b71a
VM
328 }
329 return insn;
330}
331
332/* Emit insn x = x + y. Return the insn. We use gen_add2_insn as the
333 last resort. */
e67d1102 334static rtx_insn *
11f5b71a
VM
335emit_add2_insn (rtx x, rtx y)
336{
e67d1102 337 rtx_insn *insn = emit_add3_insn (x, x, y);
11f5b71a
VM
338 if (insn == NULL_RTX)
339 {
340 insn = gen_add2_insn (x, y);
341 if (insn != NULL_RTX)
342 emit_insn (insn);
343 }
344 return insn;
345}
346
55a2c322
VM
347/* Target checks operands through operand predicates to recognize an
348 insn. We should have a special precaution to generate add insns
349 which are frequent results of elimination.
350
351 Emit insns for x = y + z. X can be used to store intermediate
352 values and should be not in Y and Z when we use X to store an
353 intermediate value. Y + Z should form [base] [+ index[ * scale]] [
354 + disp] where base and index are registers, disp and scale are
355 constants. Y should contain base if it is present, Z should
356 contain disp if any. index[*scale] can be part of Y or Z. */
357void
358lra_emit_add (rtx x, rtx y, rtx z)
359{
360 int old;
fee3e72c 361 rtx_insn *last;
55a2c322
VM
362 rtx a1, a2, base, index, disp, scale, index_scale;
363 bool ok_p;
364
e67d1102 365 rtx_insn *add3_insn = emit_add3_insn (x, y, z);
55a2c322 366 old = max_reg_num ();
647d790d 367 if (add3_insn != NULL)
11f5b71a 368 ;
55a2c322
VM
369 else
370 {
371 disp = a2 = NULL_RTX;
372 if (GET_CODE (y) == PLUS)
373 {
374 a1 = XEXP (y, 0);
375 a2 = XEXP (y, 1);
376 disp = z;
377 }
378 else
379 {
380 a1 = y;
381 if (CONSTANT_P (z))
382 disp = z;
383 else
384 a2 = z;
385 }
386 index_scale = scale = NULL_RTX;
387 if (GET_CODE (a1) == MULT)
388 {
389 index_scale = a1;
390 index = XEXP (a1, 0);
391 scale = XEXP (a1, 1);
392 base = a2;
393 }
394 else if (a2 != NULL_RTX && GET_CODE (a2) == MULT)
395 {
396 index_scale = a2;
397 index = XEXP (a2, 0);
398 scale = XEXP (a2, 1);
399 base = a1;
400 }
401 else
402 {
403 base = a1;
404 index = a2;
405 }
93aaa05d
VM
406 if (! (REG_P (base) || GET_CODE (base) == SUBREG)
407 || (index != NULL_RTX
408 && ! (REG_P (index) || GET_CODE (index) == SUBREG))
55a2c322
VM
409 || (disp != NULL_RTX && ! CONSTANT_P (disp))
410 || (scale != NULL_RTX && ! CONSTANT_P (scale)))
411 {
11f5b71a
VM
412 /* Probably we have no 3 op add. Last chance is to use 2-op
413 add insn. To succeed, don't move Z to X as an address
414 segment always comes in Y. Otherwise, we might fail when
415 adding the address segment to register. */
55a2c322 416 lra_assert (x != y && x != z);
ed52a84e 417 emit_move_insn (x, y);
e67d1102 418 rtx_insn *insn = emit_add2_insn (x, z);
11f5b71a 419 lra_assert (insn != NULL_RTX);
55a2c322
VM
420 }
421 else
422 {
423 if (index_scale == NULL_RTX)
424 index_scale = index;
425 if (disp == NULL_RTX)
426 {
427 /* Generate x = index_scale; x = x + base. */
428 lra_assert (index_scale != NULL_RTX && base != NULL_RTX);
429 emit_move_insn (x, index_scale);
e67d1102 430 rtx_insn *insn = emit_add2_insn (x, base);
11f5b71a 431 lra_assert (insn != NULL_RTX);
55a2c322
VM
432 }
433 else if (scale == NULL_RTX)
434 {
435 /* Try x = base + disp. */
436 lra_assert (base != NULL_RTX);
437 last = get_last_insn ();
647d790d
DM
438 rtx_insn *move_insn =
439 emit_move_insn (x, gen_rtx_PLUS (GET_MODE (base), base, disp));
440 if (recog_memoized (move_insn) < 0)
55a2c322
VM
441 {
442 delete_insns_since (last);
443 /* Generate x = disp; x = x + base. */
444 emit_move_insn (x, disp);
e67d1102 445 rtx_insn *add2_insn = emit_add2_insn (x, base);
647d790d 446 lra_assert (add2_insn != NULL_RTX);
55a2c322
VM
447 }
448 /* Generate x = x + index. */
449 if (index != NULL_RTX)
450 {
e67d1102 451 rtx_insn *insn = emit_add2_insn (x, index);
11f5b71a 452 lra_assert (insn != NULL_RTX);
55a2c322
VM
453 }
454 }
455 else
456 {
457 /* Try x = index_scale; x = x + disp; x = x + base. */
458 last = get_last_insn ();
647d790d 459 rtx_insn *move_insn = emit_move_insn (x, index_scale);
55a2c322 460 ok_p = false;
647d790d 461 if (recog_memoized (move_insn) >= 0)
55a2c322 462 {
e67d1102 463 rtx_insn *insn = emit_add2_insn (x, disp);
55a2c322
VM
464 if (insn != NULL_RTX)
465 {
091790a7 466 insn = emit_add2_insn (x, base);
55a2c322 467 if (insn != NULL_RTX)
11f5b71a 468 ok_p = true;
55a2c322
VM
469 }
470 }
471 if (! ok_p)
472 {
473 delete_insns_since (last);
474 /* Generate x = disp; x = x + base; x = x + index_scale. */
475 emit_move_insn (x, disp);
e67d1102 476 rtx_insn *insn = emit_add2_insn (x, base);
11f5b71a
VM
477 lra_assert (insn != NULL_RTX);
478 insn = emit_add2_insn (x, index_scale);
479 lra_assert (insn != NULL_RTX);
55a2c322
VM
480 }
481 }
482 }
483 }
484 /* Functions emit_... can create pseudos -- so expand the pseudo
485 data. */
486 if (old != max_reg_num ())
a2d0d374 487 expand_reg_data (old);
55a2c322
VM
488}
489
490/* The number of emitted reload insns so far. */
491int lra_curr_reload_num;
492
493/* Emit x := y, processing special case when y = u + v or y = u + v *
494 scale + w through emit_add (Y can be an address which is base +
495 index reg * scale + displacement in general case). X may be used
496 as intermediate result therefore it should be not in Y. */
497void
498lra_emit_move (rtx x, rtx y)
499{
500 int old;
501
502 if (GET_CODE (y) != PLUS)
503 {
504 if (rtx_equal_p (x, y))
505 return;
506 old = max_reg_num ();
507 emit_move_insn (x, y);
508 if (REG_P (x))
509 lra_reg_info[ORIGINAL_REGNO (x)].last_reload = ++lra_curr_reload_num;
510 /* Function emit_move can create pseudos -- so expand the pseudo
511 data. */
512 if (old != max_reg_num ())
a2d0d374 513 expand_reg_data (old);
55a2c322
VM
514 return;
515 }
516 lra_emit_add (x, XEXP (y, 0), XEXP (y, 1));
517}
518
519/* Update insn operands which are duplication of operands whose
520 numbers are in array of NOPS (with end marker -1). The insn is
521 represented by its LRA internal representation ID. */
522void
523lra_update_dups (lra_insn_recog_data_t id, signed char *nops)
524{
525 int i, j, nop;
526 struct lra_static_insn_data *static_id = id->insn_static_data;
527
528 for (i = 0; i < static_id->n_dups; i++)
529 for (j = 0; (nop = nops[j]) >= 0; j++)
530 if (static_id->dup_num[i] == nop)
531 *id->dup_loc[i] = *id->operand_loc[nop];
532}
533
534\f
535
536/* This page contains code dealing with info about registers in the
537 insns. */
538
539/* Pools for insn reg info. */
2f77a607 540pool_allocator<lra_insn_reg> lra_insn_reg::pool ("insn regs", 100);
55a2c322 541
c34c46dd
RS
542/* Create LRA insn related info about a reference to REGNO in INSN with
543 TYPE (in/out/inout), biggest reference mode MODE, flag that it is
55a2c322
VM
544 reference through subreg (SUBREG_P), flag that is early clobbered
545 in the insn (EARLY_CLOBBER), and reference to the next insn reg
546 info (NEXT). */
547static struct lra_insn_reg *
cfa434f6 548new_insn_reg (rtx_insn *insn, int regno, enum op_type type,
ef4bddc2 549 machine_mode mode,
55a2c322
VM
550 bool subreg_p, bool early_clobber, struct lra_insn_reg *next)
551{
2f77a607 552 lra_insn_reg *ir = new lra_insn_reg ();
55a2c322
VM
553 ir->type = type;
554 ir->biggest_mode = mode;
c34c46dd
RS
555 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (lra_reg_info[regno].biggest_mode)
556 && NONDEBUG_INSN_P (insn))
b28ece32 557 lra_reg_info[regno].biggest_mode = mode;
55a2c322
VM
558 ir->subreg_p = subreg_p;
559 ir->early_clobber = early_clobber;
560 ir->regno = regno;
561 ir->next = next;
562 return ir;
563}
564
55a2c322
VM
565/* Free insn reg info list IR. */
566static void
567free_insn_regs (struct lra_insn_reg *ir)
568{
569 struct lra_insn_reg *next_ir;
570
571 for (; ir != NULL; ir = next_ir)
572 {
573 next_ir = ir->next;
2f77a607 574 delete ir;
55a2c322
VM
575 }
576}
577
578/* Finish pool for insn reg info. */
579static void
580finish_insn_regs (void)
581{
2f77a607 582 lra_insn_reg::pool.release ();
55a2c322
VM
583}
584
585\f
586
587/* This page contains code dealing LRA insn info (or in other words
588 LRA internal insn representation). */
589
55a2c322
VM
590/* Map INSN_CODE -> the static insn data. This info is valid during
591 all translation unit. */
592struct lra_static_insn_data *insn_code_data[LAST_INSN_CODE];
593
594/* Debug insns are represented as a special insn with one input
595 operand which is RTL expression in var_location. */
596
597/* The following data are used as static insn operand data for all
598 debug insns. If structure lra_operand_data is changed, the
599 initializer should be changed too. */
600static struct lra_operand_data debug_operand_data =
601 {
602 NULL, /* alternative */
603 VOIDmode, /* We are not interesting in the operand mode. */
604 OP_IN,
605 0, 0, 0, 0
606 };
607
608/* The following data are used as static insn data for all debug
609 insns. If structure lra_static_insn_data is changed, the
610 initializer should be changed too. */
611static struct lra_static_insn_data debug_insn_static_data =
612 {
613 &debug_operand_data,
614 0, /* Duplication operands #. */
615 -1, /* Commutative operand #. */
616 1, /* Operands #. There is only one operand which is debug RTL
617 expression. */
618 0, /* Duplications #. */
619 0, /* Alternatives #. We are not interesting in alternatives
620 because we does not proceed debug_insns for reloads. */
621 NULL, /* Hard registers referenced in machine description. */
622 NULL /* Descriptions of operands in alternatives. */
623 };
624
625/* Called once per compiler work to initialize some LRA data related
626 to insns. */
627static void
628init_insn_code_data_once (void)
629{
630 memset (insn_code_data, 0, sizeof (insn_code_data));
55a2c322
VM
631}
632
633/* Called once per compiler work to finalize some LRA data related to
634 insns. */
635static void
636finish_insn_code_data_once (void)
637{
638 int i;
639
640 for (i = 0; i < LAST_INSN_CODE; i++)
641 {
642 if (insn_code_data[i] != NULL)
643 free (insn_code_data[i]);
55a2c322
VM
644 }
645}
646
55a2c322
VM
647/* Return static insn data, allocate and setup if necessary. Although
648 dup_num is static data (it depends only on icode), to set it up we
649 need to extract insn first. So recog_data should be valid for
650 normal insn (ICODE >= 0) before the call. */
651static struct lra_static_insn_data *
652get_static_insn_data (int icode, int nop, int ndup, int nalt)
653{
654 struct lra_static_insn_data *data;
655 size_t n_bytes;
656
657 lra_assert (icode < LAST_INSN_CODE);
658 if (icode >= 0 && (data = insn_code_data[icode]) != NULL)
659 return data;
660 lra_assert (nop >= 0 && ndup >= 0 && nalt >= 0);
661 n_bytes = sizeof (struct lra_static_insn_data)
662 + sizeof (struct lra_operand_data) * nop
663 + sizeof (int) * ndup;
664 data = XNEWVAR (struct lra_static_insn_data, n_bytes);
0c331756 665 data->operand_alternative = NULL;
55a2c322
VM
666 data->n_operands = nop;
667 data->n_dups = ndup;
668 data->n_alternatives = nalt;
669 data->operand = ((struct lra_operand_data *)
670 ((char *) data + sizeof (struct lra_static_insn_data)));
671 data->dup_num = ((int *) ((char *) data->operand
672 + sizeof (struct lra_operand_data) * nop));
673 if (icode >= 0)
674 {
675 int i;
676
677 insn_code_data[icode] = data;
678 for (i = 0; i < nop; i++)
679 {
680 data->operand[i].constraint
681 = insn_data[icode].operand[i].constraint;
682 data->operand[i].mode = insn_data[icode].operand[i].mode;
683 data->operand[i].strict_low = insn_data[icode].operand[i].strict_low;
684 data->operand[i].is_operator
685 = insn_data[icode].operand[i].is_operator;
686 data->operand[i].type
687 = (data->operand[i].constraint[0] == '=' ? OP_OUT
688 : data->operand[i].constraint[0] == '+' ? OP_INOUT
689 : OP_IN);
690 data->operand[i].is_address = false;
691 }
692 for (i = 0; i < ndup; i++)
693 data->dup_num[i] = recog_data.dup_num[i];
694 }
695 return data;
696}
697
698/* The current length of the following array. */
699int lra_insn_recog_data_len;
700
701/* Map INSN_UID -> the insn recog data (NULL if unknown). */
702lra_insn_recog_data_t *lra_insn_recog_data;
703
704/* Initialize LRA data about insns. */
705static void
706init_insn_recog_data (void)
707{
708 lra_insn_recog_data_len = 0;
709 lra_insn_recog_data = NULL;
55a2c322
VM
710}
711
712/* Expand, if necessary, LRA data about insns. */
713static void
714check_and_expand_insn_recog_data (int index)
715{
716 int i, old;
717
718 if (lra_insn_recog_data_len > index)
719 return;
720 old = lra_insn_recog_data_len;
721 lra_insn_recog_data_len = index * 3 / 2 + 1;
722 lra_insn_recog_data = XRESIZEVEC (lra_insn_recog_data_t,
723 lra_insn_recog_data,
724 lra_insn_recog_data_len);
725 for (i = old; i < lra_insn_recog_data_len; i++)
726 lra_insn_recog_data[i] = NULL;
727}
728
729/* Finish LRA DATA about insn. */
730static void
731free_insn_recog_data (lra_insn_recog_data_t data)
732{
733 if (data->operand_loc != NULL)
734 free (data->operand_loc);
735 if (data->dup_loc != NULL)
736 free (data->dup_loc);
737 if (data->arg_hard_regs != NULL)
738 free (data->arg_hard_regs);
55a2c322
VM
739 if (data->icode < 0 && NONDEBUG_INSN_P (data->insn))
740 {
741 if (data->insn_static_data->operand_alternative != NULL)
0c331756
RS
742 free (const_cast <operand_alternative *>
743 (data->insn_static_data->operand_alternative));
55a2c322
VM
744 free_insn_regs (data->insn_static_data->hard_regs);
745 free (data->insn_static_data);
746 }
747 free_insn_regs (data->regs);
748 data->regs = NULL;
749 free (data);
750}
751
752/* Finish LRA data about all insns. */
753static void
754finish_insn_recog_data (void)
755{
756 int i;
757 lra_insn_recog_data_t data;
758
759 for (i = 0; i < lra_insn_recog_data_len; i++)
760 if ((data = lra_insn_recog_data[i]) != NULL)
761 free_insn_recog_data (data);
762 finish_insn_regs ();
2f77a607
ML
763 lra_copy::pool.release ();
764 lra_insn_reg::pool.release ();
55a2c322
VM
765 free (lra_insn_recog_data);
766}
767
768/* Setup info about operands in alternatives of LRA DATA of insn. */
769static void
0c331756
RS
770setup_operand_alternative (lra_insn_recog_data_t data,
771 const operand_alternative *op_alt)
55a2c322 772{
0c331756 773 int i, j, nop, nalt;
55a2c322
VM
774 int icode = data->icode;
775 struct lra_static_insn_data *static_data = data->insn_static_data;
776
55a2c322
VM
777 static_data->commutative = -1;
778 nop = static_data->n_operands;
55a2c322 779 nalt = static_data->n_alternatives;
0c331756 780 static_data->operand_alternative = op_alt;
55a2c322
VM
781 for (i = 0; i < nop; i++)
782 {
0c331756
RS
783 static_data->operand[i].early_clobber = false;
784 static_data->operand[i].is_address = false;
785 if (static_data->operand[i].constraint[0] == '%')
55a2c322 786 {
0c331756
RS
787 /* We currently only support one commutative pair of operands. */
788 if (static_data->commutative < 0)
789 static_data->commutative = i;
790 else
791 lra_assert (icode < 0); /* Asm */
792 /* The last operand should not be marked commutative. */
793 lra_assert (i != nop - 1);
55a2c322
VM
794 }
795 }
0c331756
RS
796 for (j = 0; j < nalt; j++)
797 for (i = 0; i < nop; i++, op_alt++)
798 {
799 static_data->operand[i].early_clobber |= op_alt->earlyclobber;
800 static_data->operand[i].is_address |= op_alt->is_address;
801 }
55a2c322
VM
802}
803
804/* Recursively process X and collect info about registers, which are
805 not the insn operands, in X with TYPE (in/out/inout) and flag that
806 it is early clobbered in the insn (EARLY_CLOBBER) and add the info
807 to LIST. X is a part of insn given by DATA. Return the result
808 list. */
809static struct lra_insn_reg *
810collect_non_operand_hard_regs (rtx *x, lra_insn_recog_data_t data,
811 struct lra_insn_reg *list,
812 enum op_type type, bool early_clobber)
813{
814 int i, j, regno, last;
815 bool subreg_p;
ef4bddc2 816 machine_mode mode;
55a2c322
VM
817 struct lra_insn_reg *curr;
818 rtx op = *x;
819 enum rtx_code code = GET_CODE (op);
820 const char *fmt = GET_RTX_FORMAT (code);
821
822 for (i = 0; i < data->insn_static_data->n_operands; i++)
823 if (x == data->operand_loc[i])
824 /* It is an operand loc. Stop here. */
825 return list;
826 for (i = 0; i < data->insn_static_data->n_dups; i++)
827 if (x == data->dup_loc[i])
828 /* It is a dup loc. Stop here. */
829 return list;
830 mode = GET_MODE (op);
831 subreg_p = false;
832 if (code == SUBREG)
833 {
834 op = SUBREG_REG (op);
835 code = GET_CODE (op);
836 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (op)))
837 {
838 mode = GET_MODE (op);
839 if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
840 subreg_p = true;
841 }
842 }
843 if (REG_P (op))
844 {
845 if ((regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER)
846 return list;
d9cf932c
VM
847 /* Process all regs even unallocatable ones as we need info
848 about all regs for rematerialization pass. */
55a2c322
VM
849 for (last = regno + hard_regno_nregs[regno][mode];
850 regno < last;
851 regno++)
d9cf932c
VM
852 {
853 for (curr = list; curr != NULL; curr = curr->next)
854 if (curr->regno == regno && curr->subreg_p == subreg_p
855 && curr->biggest_mode == mode)
55a2c322 856 {
d9cf932c
VM
857 if (curr->type != type)
858 curr->type = OP_INOUT;
859 if (curr->early_clobber != early_clobber)
860 curr->early_clobber = true;
861 break;
862 }
863 if (curr == NULL)
864 {
865 /* This is a new hard regno or the info can not be
866 integrated into the found structure. */
55a2c322 867#ifdef STACK_REGS
d9cf932c
VM
868 early_clobber
869 = (early_clobber
870 /* This clobber is to inform popping floating
871 point stack only. */
872 && ! (FIRST_STACK_REG <= regno
873 && regno <= LAST_STACK_REG));
55a2c322 874#endif
d9cf932c
VM
875 list = new_insn_reg (data->insn, regno, type, mode, subreg_p,
876 early_clobber, list);
877 }
878 }
55a2c322
VM
879 return list;
880 }
881 switch (code)
882 {
883 case SET:
884 list = collect_non_operand_hard_regs (&SET_DEST (op), data,
885 list, OP_OUT, false);
886 list = collect_non_operand_hard_regs (&SET_SRC (op), data,
887 list, OP_IN, false);
888 break;
889 case CLOBBER:
890 /* We treat clobber of non-operand hard registers as early
f4eafc30 891 clobber (the behavior is expected from asm). */
55a2c322
VM
892 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
893 list, OP_OUT, true);
894 break;
895 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
896 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
897 list, OP_INOUT, false);
898 break;
899 case PRE_MODIFY: case POST_MODIFY:
900 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
901 list, OP_INOUT, false);
902 list = collect_non_operand_hard_regs (&XEXP (op, 1), data,
903 list, OP_IN, false);
904 break;
905 default:
906 fmt = GET_RTX_FORMAT (code);
907 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
908 {
909 if (fmt[i] == 'e')
910 list = collect_non_operand_hard_regs (&XEXP (op, i), data,
911 list, OP_IN, false);
912 else if (fmt[i] == 'E')
913 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
914 list = collect_non_operand_hard_regs (&XVECEXP (op, i, j), data,
915 list, OP_IN, false);
916 }
917 }
918 return list;
919}
920
921/* Set up and return info about INSN. Set up the info if it is not set up
922 yet. */
923lra_insn_recog_data_t
cfa434f6 924lra_set_insn_recog_data (rtx_insn *insn)
55a2c322
VM
925{
926 lra_insn_recog_data_t data;
927 int i, n, icode;
928 rtx **locs;
929 unsigned int uid = INSN_UID (insn);
930 struct lra_static_insn_data *insn_static_data;
931
932 check_and_expand_insn_recog_data (uid);
933 if (DEBUG_INSN_P (insn))
934 icode = -1;
935 else
936 {
937 icode = INSN_CODE (insn);
938 if (icode < 0)
939 /* It might be a new simple insn which is not recognized yet. */
940 INSN_CODE (insn) = icode = recog_memoized (insn);
941 }
942 data = XNEW (struct lra_insn_recog_data);
943 lra_insn_recog_data[uid] = data;
944 data->insn = insn;
945 data->used_insn_alternative = -1;
946 data->icode = icode;
947 data->regs = NULL;
948 if (DEBUG_INSN_P (insn))
949 {
950 data->insn_static_data = &debug_insn_static_data;
951 data->dup_loc = NULL;
952 data->arg_hard_regs = NULL;
9840b2fa 953 data->preferred_alternatives = ALL_ALTERNATIVES;
55a2c322
VM
954 data->operand_loc = XNEWVEC (rtx *, 1);
955 data->operand_loc[0] = &INSN_VAR_LOCATION_LOC (insn);
956 return data;
957 }
958 if (icode < 0)
959 {
0c331756 960 int nop, nalt;
ef4bddc2 961 machine_mode operand_mode[MAX_RECOG_OPERANDS];
55a2c322
VM
962 const char *constraints[MAX_RECOG_OPERANDS];
963
964 nop = asm_noperands (PATTERN (insn));
965 data->operand_loc = data->dup_loc = NULL;
0c331756 966 nalt = 1;
55a2c322 967 if (nop < 0)
91c5ee5b 968 {
0e9e0a21 969 /* It is a special insn like USE or CLOBBER. We should
91c5ee5b
VM
970 recognize any regular insn otherwise LRA can do nothing
971 with this insn. */
972 gcc_assert (GET_CODE (PATTERN (insn)) == USE
973 || GET_CODE (PATTERN (insn)) == CLOBBER
974 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
975 data->insn_static_data = insn_static_data
0c331756 976 = get_static_insn_data (-1, 0, 0, nalt);
91c5ee5b 977 }
55a2c322
VM
978 else
979 {
980 /* expand_asm_operands makes sure there aren't too many
981 operands. */
982 lra_assert (nop <= MAX_RECOG_OPERANDS);
983 if (nop != 0)
984 data->operand_loc = XNEWVEC (rtx *, nop);
985 /* Now get the operand values and constraints out of the
986 insn. */
987 decode_asm_operands (PATTERN (insn), NULL,
988 data->operand_loc,
989 constraints, operand_mode, NULL);
55a2c322
VM
990 if (nop > 0)
991 {
992 const char *p = recog_data.constraints[0];
f4eafc30 993
55a2c322 994 for (p = constraints[0]; *p; p++)
0c331756 995 nalt += *p == ',';
55a2c322
VM
996 }
997 data->insn_static_data = insn_static_data
0c331756 998 = get_static_insn_data (-1, nop, 0, nalt);
55a2c322
VM
999 for (i = 0; i < nop; i++)
1000 {
1001 insn_static_data->operand[i].mode = operand_mode[i];
1002 insn_static_data->operand[i].constraint = constraints[i];
1003 insn_static_data->operand[i].strict_low = false;
1004 insn_static_data->operand[i].is_operator = false;
1005 insn_static_data->operand[i].is_address = false;
1006 }
1007 }
1008 for (i = 0; i < insn_static_data->n_operands; i++)
1009 insn_static_data->operand[i].type
1010 = (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1011 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1012 : OP_IN);
9840b2fa 1013 data->preferred_alternatives = ALL_ALTERNATIVES;
0c331756
RS
1014 if (nop > 0)
1015 {
1016 operand_alternative *op_alt = XCNEWVEC (operand_alternative,
1017 nalt * nop);
1018 preprocess_constraints (nop, nalt, constraints, op_alt);
1019 setup_operand_alternative (data, op_alt);
1020 }
55a2c322
VM
1021 }
1022 else
1023 {
1024 insn_extract (insn);
1025 data->insn_static_data = insn_static_data
1026 = get_static_insn_data (icode, insn_data[icode].n_operands,
1027 insn_data[icode].n_dups,
1028 insn_data[icode].n_alternatives);
1029 n = insn_static_data->n_operands;
1030 if (n == 0)
1031 locs = NULL;
1032 else
1033 {
1034 locs = XNEWVEC (rtx *, n);
1035 memcpy (locs, recog_data.operand_loc, n * sizeof (rtx *));
1036 }
1037 data->operand_loc = locs;
1038 n = insn_static_data->n_dups;
1039 if (n == 0)
1040 locs = NULL;
1041 else
1042 {
1043 locs = XNEWVEC (rtx *, n);
1044 memcpy (locs, recog_data.dup_loc, n * sizeof (rtx *));
1045 }
1046 data->dup_loc = locs;
9840b2fa 1047 data->preferred_alternatives = get_preferred_alternatives (insn);
0c331756
RS
1048 const operand_alternative *op_alt = preprocess_insn_constraints (icode);
1049 if (!insn_static_data->operand_alternative)
1050 setup_operand_alternative (data, op_alt);
1051 else if (op_alt != insn_static_data->operand_alternative)
1052 insn_static_data->operand_alternative = op_alt;
55a2c322
VM
1053 }
1054 if (GET_CODE (PATTERN (insn)) == CLOBBER || GET_CODE (PATTERN (insn)) == USE)
1055 insn_static_data->hard_regs = NULL;
1056 else
1057 insn_static_data->hard_regs
1058 = collect_non_operand_hard_regs (&PATTERN (insn), data,
1059 NULL, OP_IN, false);
55a2c322
VM
1060 data->arg_hard_regs = NULL;
1061 if (CALL_P (insn))
1062 {
1063 rtx link;
1064 int n_hard_regs, regno, arg_hard_regs[FIRST_PSEUDO_REGISTER];
1065
1066 n_hard_regs = 0;
1067 /* Finding implicit hard register usage. We believe it will be
1068 not changed whatever transformations are used. Call insns
1069 are such example. */
1070 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1071 link != NULL_RTX;
1072 link = XEXP (link, 1))
1073 if (GET_CODE (XEXP (link, 0)) == USE
1074 && REG_P (XEXP (XEXP (link, 0), 0)))
1075 {
1076 regno = REGNO (XEXP (XEXP (link, 0), 0));
1077 lra_assert (regno < FIRST_PSEUDO_REGISTER);
1078 /* It is an argument register. */
dc8afb70 1079 for (i = REG_NREGS (XEXP (XEXP (link, 0), 0)) - 1; i >= 0; i--)
55a2c322
VM
1080 arg_hard_regs[n_hard_regs++] = regno + i;
1081 }
1082 if (n_hard_regs != 0)
1083 {
1084 arg_hard_regs[n_hard_regs++] = -1;
1085 data->arg_hard_regs = XNEWVEC (int, n_hard_regs);
1086 memcpy (data->arg_hard_regs, arg_hard_regs,
1087 sizeof (int) * n_hard_regs);
1088 }
1089 }
1090 /* Some output operand can be recognized only from the context not
1091 from the constraints which are empty in this case. Call insn may
1092 contain a hard register in set destination with empty constraint
1093 and extract_insn treats them as an input. */
1094 for (i = 0; i < insn_static_data->n_operands; i++)
1095 {
1096 int j;
1097 rtx pat, set;
1098 struct lra_operand_data *operand = &insn_static_data->operand[i];
1099
1100 /* ??? Should we treat 'X' the same way. It looks to me that
1101 'X' means anything and empty constraint means we do not
1102 care. */
1103 if (operand->type != OP_IN || *operand->constraint != '\0'
1104 || operand->is_operator)
1105 continue;
1106 pat = PATTERN (insn);
1107 if (GET_CODE (pat) == SET)
1108 {
1109 if (data->operand_loc[i] != &SET_DEST (pat))
1110 continue;
1111 }
1112 else if (GET_CODE (pat) == PARALLEL)
1113 {
1114 for (j = XVECLEN (pat, 0) - 1; j >= 0; j--)
1115 {
1116 set = XVECEXP (PATTERN (insn), 0, j);
1117 if (GET_CODE (set) == SET
1118 && &SET_DEST (set) == data->operand_loc[i])
1119 break;
1120 }
1121 if (j < 0)
1122 continue;
1123 }
1124 else
1125 continue;
1126 operand->type = OP_OUT;
1127 }
1128 return data;
1129}
1130
1131/* Return info about insn give by UID. The info should be already set
1132 up. */
1133static lra_insn_recog_data_t
1134get_insn_recog_data_by_uid (int uid)
1135{
1136 lra_insn_recog_data_t data;
1137
1138 data = lra_insn_recog_data[uid];
1139 lra_assert (data != NULL);
1140 return data;
1141}
1142
1143/* Invalidate all info about insn given by its UID. */
1144static void
1145invalidate_insn_recog_data (int uid)
1146{
1147 lra_insn_recog_data_t data;
1148
1149 data = lra_insn_recog_data[uid];
1150 lra_assert (data != NULL);
1151 free_insn_recog_data (data);
1152 lra_insn_recog_data[uid] = NULL;
1153}
1154
1155/* Update all the insn info about INSN. It is usually called when
1156 something in the insn was changed. Return the updated info. */
1157lra_insn_recog_data_t
cfa434f6 1158lra_update_insn_recog_data (rtx_insn *insn)
55a2c322
VM
1159{
1160 lra_insn_recog_data_t data;
1161 int n;
1162 unsigned int uid = INSN_UID (insn);
1163 struct lra_static_insn_data *insn_static_data;
8d49e7ef 1164 HOST_WIDE_INT sp_offset = 0;
f4eafc30 1165
55a2c322
VM
1166 check_and_expand_insn_recog_data (uid);
1167 if ((data = lra_insn_recog_data[uid]) != NULL
1168 && data->icode != INSN_CODE (insn))
1169 {
8d49e7ef 1170 sp_offset = data->sp_offset;
55a2c322
VM
1171 invalidate_insn_data_regno_info (data, insn, get_insn_freq (insn));
1172 invalidate_insn_recog_data (uid);
1173 data = NULL;
1174 }
1175 if (data == NULL)
8d49e7ef
VM
1176 {
1177 data = lra_get_insn_recog_data (insn);
1178 /* Initiate or restore SP offset. */
1179 data->sp_offset = sp_offset;
1180 return data;
1181 }
55a2c322
VM
1182 insn_static_data = data->insn_static_data;
1183 data->used_insn_alternative = -1;
1184 if (DEBUG_INSN_P (insn))
1185 return data;
1186 if (data->icode < 0)
1187 {
1188 int nop;
ef4bddc2 1189 machine_mode operand_mode[MAX_RECOG_OPERANDS];
55a2c322
VM
1190 const char *constraints[MAX_RECOG_OPERANDS];
1191
1192 nop = asm_noperands (PATTERN (insn));
1193 if (nop >= 0)
1194 {
1195 lra_assert (nop == data->insn_static_data->n_operands);
1196 /* Now get the operand values and constraints out of the
1197 insn. */
1198 decode_asm_operands (PATTERN (insn), NULL,
1199 data->operand_loc,
1200 constraints, operand_mode, NULL);
1201#ifdef ENABLE_CHECKING
1202 {
1203 int i;
1204
1205 for (i = 0; i < nop; i++)
1206 lra_assert
1207 (insn_static_data->operand[i].mode == operand_mode[i]
1208 && insn_static_data->operand[i].constraint == constraints[i]
1209 && ! insn_static_data->operand[i].is_operator);
1210 }
1211#endif
1212 }
1213#ifdef ENABLE_CHECKING
1214 {
1215 int i;
1216
1217 for (i = 0; i < insn_static_data->n_operands; i++)
1218 lra_assert
1219 (insn_static_data->operand[i].type
1220 == (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1221 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1222 : OP_IN));
1223 }
1224#endif
1225 }
1226 else
1227 {
1228 insn_extract (insn);
1229 n = insn_static_data->n_operands;
1230 if (n != 0)
1231 memcpy (data->operand_loc, recog_data.operand_loc, n * sizeof (rtx *));
1232 n = insn_static_data->n_dups;
1233 if (n != 0)
1234 memcpy (data->dup_loc, recog_data.dup_loc, n * sizeof (rtx *));
9840b2fa 1235 lra_assert (check_bool_attrs (insn));
55a2c322
VM
1236 }
1237 return data;
1238}
1239
1240/* Set up that INSN is using alternative ALT now. */
1241void
cfa434f6 1242lra_set_used_insn_alternative (rtx_insn *insn, int alt)
55a2c322
VM
1243{
1244 lra_insn_recog_data_t data;
1245
1246 data = lra_get_insn_recog_data (insn);
1247 data->used_insn_alternative = alt;
1248}
1249
1250/* Set up that insn with UID is using alternative ALT now. The insn
1251 info should be already set up. */
1252void
1253lra_set_used_insn_alternative_by_uid (int uid, int alt)
1254{
1255 lra_insn_recog_data_t data;
1256
1257 check_and_expand_insn_recog_data (uid);
1258 data = lra_insn_recog_data[uid];
1259 lra_assert (data != NULL);
1260 data->used_insn_alternative = alt;
1261}
1262
1263\f
1264
1265/* This page contains code dealing with common register info and
1266 pseudo copies. */
1267
1268/* The size of the following array. */
1269static int reg_info_size;
1270/* Common info about each register. */
1271struct lra_reg *lra_reg_info;
1272
1273/* Last register value. */
1274static int last_reg_value;
1275
1276/* Return new register value. */
1277static int
1278get_new_reg_value (void)
1279{
1280 return ++last_reg_value;
1281}
1282
1283/* Pools for copies. */
2f77a607 1284pool_allocator<lra_copy> lra_copy::pool ("lra copies", 100);
55a2c322 1285
55a2c322 1286/* Vec referring to pseudo copies. */
9771b263 1287static vec<lra_copy_t> copy_vec;
55a2c322
VM
1288
1289/* Initialize I-th element of lra_reg_info. */
1290static inline void
1291initialize_lra_reg_info_element (int i)
1292{
1293 bitmap_initialize (&lra_reg_info[i].insn_bitmap, &reg_obstack);
1294#ifdef STACK_REGS
1295 lra_reg_info[i].no_stack_p = false;
1296#endif
1297 CLEAR_HARD_REG_SET (lra_reg_info[i].conflict_hard_regs);
10e1bdb2 1298 CLEAR_HARD_REG_SET (lra_reg_info[i].actual_call_used_reg_set);
55a2c322
VM
1299 lra_reg_info[i].preferred_hard_regno1 = -1;
1300 lra_reg_info[i].preferred_hard_regno2 = -1;
1301 lra_reg_info[i].preferred_hard_regno_profit1 = 0;
1302 lra_reg_info[i].preferred_hard_regno_profit2 = 0;
b28ece32 1303 lra_reg_info[i].biggest_mode = VOIDmode;
55a2c322
VM
1304 lra_reg_info[i].live_ranges = NULL;
1305 lra_reg_info[i].nrefs = lra_reg_info[i].freq = 0;
1306 lra_reg_info[i].last_reload = 0;
1307 lra_reg_info[i].restore_regno = -1;
1308 lra_reg_info[i].val = get_new_reg_value ();
d70a81dd 1309 lra_reg_info[i].offset = 0;
55a2c322
VM
1310 lra_reg_info[i].copies = NULL;
1311}
1312
1313/* Initialize common reg info and copies. */
1314static void
1315init_reg_info (void)
1316{
1317 int i;
1318
1319 last_reg_value = 0;
1320 reg_info_size = max_reg_num () * 3 / 2 + 1;
1321 lra_reg_info = XNEWVEC (struct lra_reg, reg_info_size);
1322 for (i = 0; i < reg_info_size; i++)
1323 initialize_lra_reg_info_element (i);
9771b263 1324 copy_vec.create (100);
55a2c322
VM
1325}
1326
1327
1328/* Finish common reg info and copies. */
1329static void
1330finish_reg_info (void)
1331{
1332 int i;
1333
1334 for (i = 0; i < reg_info_size; i++)
1335 bitmap_clear (&lra_reg_info[i].insn_bitmap);
1336 free (lra_reg_info);
1337 reg_info_size = 0;
55a2c322
VM
1338}
1339
1340/* Expand common reg info if it is necessary. */
1341static void
1342expand_reg_info (void)
1343{
1344 int i, old = reg_info_size;
1345
1346 if (reg_info_size > max_reg_num ())
1347 return;
1348 reg_info_size = max_reg_num () * 3 / 2 + 1;
1349 lra_reg_info = XRESIZEVEC (struct lra_reg, lra_reg_info, reg_info_size);
1350 for (i = old; i < reg_info_size; i++)
1351 initialize_lra_reg_info_element (i);
1352}
1353
1354/* Free all copies. */
1355void
1356lra_free_copies (void)
1357{
1358 lra_copy_t cp;
1359
9771b263 1360 while (copy_vec.length () != 0)
55a2c322 1361 {
9771b263 1362 cp = copy_vec.pop ();
55a2c322 1363 lra_reg_info[cp->regno1].copies = lra_reg_info[cp->regno2].copies = NULL;
2f77a607 1364 delete cp;
55a2c322
VM
1365 }
1366}
1367
1368/* Create copy of two pseudos REGNO1 and REGNO2. The copy execution
1369 frequency is FREQ. */
1370void
1371lra_create_copy (int regno1, int regno2, int freq)
1372{
1373 bool regno1_dest_p;
1374 lra_copy_t cp;
1375
1376 lra_assert (regno1 != regno2);
1377 regno1_dest_p = true;
1378 if (regno1 > regno2)
1379 {
1380 int temp = regno2;
1381
1382 regno1_dest_p = false;
1383 regno2 = regno1;
1384 regno1 = temp;
1385 }
2f77a607 1386 cp = new lra_copy ();
9771b263 1387 copy_vec.safe_push (cp);
55a2c322
VM
1388 cp->regno1_dest_p = regno1_dest_p;
1389 cp->freq = freq;
1390 cp->regno1 = regno1;
1391 cp->regno2 = regno2;
1392 cp->regno1_next = lra_reg_info[regno1].copies;
1393 lra_reg_info[regno1].copies = cp;
1394 cp->regno2_next = lra_reg_info[regno2].copies;
1395 lra_reg_info[regno2].copies = cp;
1396 if (lra_dump_file != NULL)
1397 fprintf (lra_dump_file, " Creating copy r%d%sr%d@%d\n",
1398 regno1, regno1_dest_p ? "<-" : "->", regno2, freq);
1399}
1400
1401/* Return N-th (0, 1, ...) copy. If there is no copy, return
1402 NULL. */
1403lra_copy_t
1404lra_get_copy (int n)
1405{
9771b263 1406 if (n >= (int) copy_vec.length ())
55a2c322 1407 return NULL;
9771b263 1408 return copy_vec[n];
55a2c322
VM
1409}
1410
1411\f
1412
1413/* This page contains code dealing with info about registers in
1414 insns. */
1415
1416/* Process X of insn UID recursively and add info (operand type is
1417 given by TYPE, flag of that it is early clobber is EARLY_CLOBBER)
1418 about registers in X to the insn DATA. */
1419static void
1420add_regs_to_insn_regno_info (lra_insn_recog_data_t data, rtx x, int uid,
1421 enum op_type type, bool early_clobber)
1422{
1423 int i, j, regno;
1424 bool subreg_p;
ef4bddc2 1425 machine_mode mode;
55a2c322
VM
1426 const char *fmt;
1427 enum rtx_code code;
1428 struct lra_insn_reg *curr;
1429
1430 code = GET_CODE (x);
1431 mode = GET_MODE (x);
1432 subreg_p = false;
1433 if (GET_CODE (x) == SUBREG)
1434 {
1435 x = SUBREG_REG (x);
1436 code = GET_CODE (x);
1437 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1438 {
1439 mode = GET_MODE (x);
1440 if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
1441 subreg_p = true;
1442 }
1443 }
1444 if (REG_P (x))
1445 {
1446 regno = REGNO (x);
d9cf932c
VM
1447 /* Process all regs even unallocatable ones as we need info about
1448 all regs for rematerialization pass. */
55a2c322
VM
1449 expand_reg_info ();
1450 if (bitmap_set_bit (&lra_reg_info[regno].insn_bitmap, uid))
1451 {
c34c46dd 1452 data->regs = new_insn_reg (data->insn, regno, type, mode, subreg_p,
55a2c322
VM
1453 early_clobber, data->regs);
1454 return;
1455 }
1456 else
1457 {
1458 for (curr = data->regs; curr != NULL; curr = curr->next)
1459 if (curr->regno == regno)
1460 {
1461 if (curr->subreg_p != subreg_p || curr->biggest_mode != mode)
1462 /* The info can not be integrated into the found
1463 structure. */
c34c46dd
RS
1464 data->regs = new_insn_reg (data->insn, regno, type, mode,
1465 subreg_p, early_clobber,
1466 data->regs);
55a2c322
VM
1467 else
1468 {
1469 if (curr->type != type)
1470 curr->type = OP_INOUT;
1471 if (curr->early_clobber != early_clobber)
1472 curr->early_clobber = true;
1473 }
1474 return;
1475 }
1476 gcc_unreachable ();
1477 }
1478 }
1479
1480 switch (code)
1481 {
1482 case SET:
1483 add_regs_to_insn_regno_info (data, SET_DEST (x), uid, OP_OUT, false);
1484 add_regs_to_insn_regno_info (data, SET_SRC (x), uid, OP_IN, false);
1485 break;
1486 case CLOBBER:
1487 /* We treat clobber of non-operand hard registers as early
f4eafc30 1488 clobber (the behavior is expected from asm). */
55a2c322
VM
1489 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_OUT, true);
1490 break;
1491 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
1492 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false);
1493 break;
1494 case PRE_MODIFY: case POST_MODIFY:
1495 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false);
1496 add_regs_to_insn_regno_info (data, XEXP (x, 1), uid, OP_IN, false);
1497 break;
1498 default:
1499 if ((code != PARALLEL && code != EXPR_LIST) || type != OP_OUT)
1500 /* Some targets place small structures in registers for return
1501 values of functions, and those registers are wrapped in
1502 PARALLEL that we may see as the destination of a SET. Here
1503 is an example:
1504
1505 (call_insn 13 12 14 2 (set (parallel:BLK [
1506 (expr_list:REG_DEP_TRUE (reg:DI 0 ax)
1507 (const_int 0 [0]))
1508 (expr_list:REG_DEP_TRUE (reg:DI 1 dx)
1509 (const_int 8 [0x8]))
1510 ])
1511 (call (mem:QI (symbol_ref:DI (... */
1512 type = OP_IN;
1513 fmt = GET_RTX_FORMAT (code);
1514 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1515 {
1516 if (fmt[i] == 'e')
1517 add_regs_to_insn_regno_info (data, XEXP (x, i), uid, type, false);
1518 else if (fmt[i] == 'E')
1519 {
1520 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1521 add_regs_to_insn_regno_info (data, XVECEXP (x, i, j), uid,
1522 type, false);
1523 }
1524 }
1525 }
1526}
1527
1528/* Return execution frequency of INSN. */
1529static int
cfa434f6 1530get_insn_freq (rtx_insn *insn)
55a2c322 1531{
39718607 1532 basic_block bb = BLOCK_FOR_INSN (insn);
55a2c322 1533
39718607
SB
1534 gcc_checking_assert (bb != NULL);
1535 return REG_FREQ_FROM_BB (bb);
55a2c322
VM
1536}
1537
1538/* Invalidate all reg info of INSN with DATA and execution frequency
1539 FREQ. Update common info about the invalidated registers. */
1540static void
cfa434f6 1541invalidate_insn_data_regno_info (lra_insn_recog_data_t data, rtx_insn *insn,
55a2c322
VM
1542 int freq)
1543{
1544 int uid;
1545 bool debug_p;
1546 unsigned int i;
1547 struct lra_insn_reg *ir, *next_ir;
1548
1549 uid = INSN_UID (insn);
1550 debug_p = DEBUG_INSN_P (insn);
1551 for (ir = data->regs; ir != NULL; ir = next_ir)
1552 {
1553 i = ir->regno;
1554 next_ir = ir->next;
2f77a607 1555 delete ir;
55a2c322
VM
1556 bitmap_clear_bit (&lra_reg_info[i].insn_bitmap, uid);
1557 if (i >= FIRST_PSEUDO_REGISTER && ! debug_p)
1558 {
1559 lra_reg_info[i].nrefs--;
1560 lra_reg_info[i].freq -= freq;
1561 lra_assert (lra_reg_info[i].nrefs >= 0 && lra_reg_info[i].freq >= 0);
1562 }
1563 }
1564 data->regs = NULL;
1565}
1566
1567/* Invalidate all reg info of INSN. Update common info about the
1568 invalidated registers. */
1569void
cfa434f6 1570lra_invalidate_insn_regno_info (rtx_insn *insn)
55a2c322
VM
1571{
1572 invalidate_insn_data_regno_info (lra_get_insn_recog_data (insn), insn,
1573 get_insn_freq (insn));
1574}
1575
1576/* Update common reg info from reg info of insn given by its DATA and
1577 execution frequency FREQ. */
1578static void
1579setup_insn_reg_info (lra_insn_recog_data_t data, int freq)
1580{
1581 unsigned int i;
1582 struct lra_insn_reg *ir;
1583
1584 for (ir = data->regs; ir != NULL; ir = ir->next)
1585 if ((i = ir->regno) >= FIRST_PSEUDO_REGISTER)
1586 {
1587 lra_reg_info[i].nrefs++;
1588 lra_reg_info[i].freq += freq;
1589 }
1590}
1591
1592/* Set up insn reg info of INSN. Update common reg info from reg info
1593 of INSN. */
1594void
cfa434f6 1595lra_update_insn_regno_info (rtx_insn *insn)
55a2c322
VM
1596{
1597 int i, uid, freq;
1598 lra_insn_recog_data_t data;
1599 struct lra_static_insn_data *static_data;
1600 enum rtx_code code;
05f23918
VM
1601 rtx link;
1602
55a2c322
VM
1603 if (! INSN_P (insn))
1604 return;
1605 data = lra_get_insn_recog_data (insn);
1606 static_data = data->insn_static_data;
1607 freq = get_insn_freq (insn);
1608 invalidate_insn_data_regno_info (data, insn, freq);
1609 uid = INSN_UID (insn);
1610 for (i = static_data->n_operands - 1; i >= 0; i--)
1611 add_regs_to_insn_regno_info (data, *data->operand_loc[i], uid,
1612 static_data->operand[i].type,
1613 static_data->operand[i].early_clobber);
1614 if ((code = GET_CODE (PATTERN (insn))) == CLOBBER || code == USE)
1615 add_regs_to_insn_regno_info (data, XEXP (PATTERN (insn), 0), uid,
1616 code == USE ? OP_IN : OP_OUT, false);
05f23918
VM
1617 if (CALL_P (insn))
1618 /* On some targets call insns can refer to pseudos in memory in
1619 CALL_INSN_FUNCTION_USAGE list. Process them in order to
1620 consider their occurrences in calls for different
1621 transformations (e.g. inheritance) with given pseudos. */
1622 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1623 link != NULL_RTX;
1624 link = XEXP (link, 1))
1625 if (((code = GET_CODE (XEXP (link, 0))) == USE || code == CLOBBER)
1626 && MEM_P (XEXP (XEXP (link, 0), 0)))
1627 add_regs_to_insn_regno_info (data, XEXP (XEXP (link, 0), 0), uid,
1628 code == USE ? OP_IN : OP_OUT, false);
55a2c322
VM
1629 if (NONDEBUG_INSN_P (insn))
1630 setup_insn_reg_info (data, freq);
1631}
1632
1633/* Return reg info of insn given by it UID. */
1634struct lra_insn_reg *
1635lra_get_insn_regs (int uid)
1636{
1637 lra_insn_recog_data_t data;
1638
1639 data = get_insn_recog_data_by_uid (uid);
1640 return data->regs;
1641}
1642
1643\f
1644
1645/* This page contains code dealing with stack of the insns which
1646 should be processed by the next constraint pass. */
1647
1648/* Bitmap used to put an insn on the stack only in one exemplar. */
1649static sbitmap lra_constraint_insn_stack_bitmap;
1650
1651/* The stack itself. */
cfa434f6 1652vec<rtx_insn *> lra_constraint_insn_stack;
55a2c322
VM
1653
1654/* Put INSN on the stack. If ALWAYS_UPDATE is true, always update the reg
1655 info for INSN, otherwise only update it if INSN is not already on the
1656 stack. */
1657static inline void
cfa434f6 1658lra_push_insn_1 (rtx_insn *insn, bool always_update)
55a2c322
VM
1659{
1660 unsigned int uid = INSN_UID (insn);
1661 if (always_update)
1662 lra_update_insn_regno_info (insn);
1663 if (uid >= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap))
1664 lra_constraint_insn_stack_bitmap =
1665 sbitmap_resize (lra_constraint_insn_stack_bitmap, 3 * uid / 2, 0);
d7c028c0 1666 if (bitmap_bit_p (lra_constraint_insn_stack_bitmap, uid))
55a2c322 1667 return;
d7c028c0 1668 bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
55a2c322
VM
1669 if (! always_update)
1670 lra_update_insn_regno_info (insn);
9771b263 1671 lra_constraint_insn_stack.safe_push (insn);
55a2c322
VM
1672}
1673
1674/* Put INSN on the stack. */
1675void
cfa434f6 1676lra_push_insn (rtx_insn *insn)
55a2c322
VM
1677{
1678 lra_push_insn_1 (insn, false);
1679}
1680
1681/* Put INSN on the stack and update its reg info. */
1682void
cfa434f6 1683lra_push_insn_and_update_insn_regno_info (rtx_insn *insn)
55a2c322
VM
1684{
1685 lra_push_insn_1 (insn, true);
1686}
1687
1688/* Put insn with UID on the stack. */
1689void
1690lra_push_insn_by_uid (unsigned int uid)
1691{
1692 lra_push_insn (lra_insn_recog_data[uid]->insn);
1693}
1694
1695/* Take the last-inserted insns off the stack and return it. */
cfa434f6 1696rtx_insn *
55a2c322
VM
1697lra_pop_insn (void)
1698{
cfa434f6 1699 rtx_insn *insn = lra_constraint_insn_stack.pop ();
d7c028c0 1700 bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
55a2c322
VM
1701 return insn;
1702}
1703
1704/* Return the current size of the insn stack. */
1705unsigned int
1706lra_insn_stack_length (void)
1707{
9771b263 1708 return lra_constraint_insn_stack.length ();
55a2c322
VM
1709}
1710
1711/* Push insns FROM to TO (excluding it) going in reverse order. */
1712static void
cfa434f6 1713push_insns (rtx_insn *from, rtx_insn *to)
55a2c322 1714{
cfa434f6 1715 rtx_insn *insn;
55a2c322
VM
1716
1717 if (from == NULL_RTX)
1718 return;
1719 for (insn = from; insn != to; insn = PREV_INSN (insn))
1720 if (INSN_P (insn))
1721 lra_push_insn (insn);
1722}
1723
8d49e7ef
VM
1724/* Set up sp offset for insn in range [FROM, LAST]. The offset is
1725 taken from the next BB insn after LAST or zero if there in such
1726 insn. */
1727static void
cfa434f6 1728setup_sp_offset (rtx_insn *from, rtx_insn *last)
8d49e7ef 1729{
cfa434f6 1730 rtx_insn *before = next_nonnote_insn_bb (last);
8d49e7ef
VM
1731 HOST_WIDE_INT offset = (before == NULL_RTX || ! INSN_P (before)
1732 ? 0 : lra_get_insn_recog_data (before)->sp_offset);
1733
cfa434f6 1734 for (rtx_insn *insn = from; insn != NEXT_INSN (last); insn = NEXT_INSN (insn))
8d49e7ef
VM
1735 lra_get_insn_recog_data (insn)->sp_offset = offset;
1736}
1737
55a2c322
VM
1738/* Emit insns BEFORE before INSN and insns AFTER after INSN. Put the
1739 insns onto the stack. Print about emitting the insns with
1740 TITLE. */
1741void
cfa434f6
DM
1742lra_process_new_insns (rtx_insn *insn, rtx_insn *before, rtx_insn *after,
1743 const char *title)
55a2c322 1744{
cfa434f6 1745 rtx_insn *last;
55a2c322 1746
8d49e7ef
VM
1747 if (before == NULL_RTX && after == NULL_RTX)
1748 return;
1749 if (lra_dump_file != NULL)
55a2c322 1750 {
cfbeaedf 1751 dump_insn_slim (lra_dump_file, insn);
55a2c322
VM
1752 if (before != NULL_RTX)
1753 {
1754 fprintf (lra_dump_file," %s before:\n", title);
dc01c3d1 1755 dump_rtl_slim (lra_dump_file, before, NULL, -1, 0);
55a2c322
VM
1756 }
1757 if (after != NULL_RTX)
1758 {
1759 fprintf (lra_dump_file, " %s after:\n", title);
dc01c3d1 1760 dump_rtl_slim (lra_dump_file, after, NULL, -1, 0);
55a2c322
VM
1761 }
1762 fprintf (lra_dump_file, "\n");
1763 }
1764 if (before != NULL_RTX)
1765 {
1766 emit_insn_before (before, insn);
1767 push_insns (PREV_INSN (insn), PREV_INSN (before));
8d49e7ef 1768 setup_sp_offset (before, PREV_INSN (insn));
55a2c322
VM
1769 }
1770 if (after != NULL_RTX)
1771 {
1772 for (last = after; NEXT_INSN (last) != NULL_RTX; last = NEXT_INSN (last))
1773 ;
1774 emit_insn_after (after, insn);
1775 push_insns (last, insn);
8d49e7ef 1776 setup_sp_offset (after, last);
55a2c322
VM
1777 }
1778}
1779
1780\f
1781
8160cd3e
VM
1782/* Replace all references to register OLD_REGNO in *LOC with pseudo
1783 register NEW_REG. Return true if any change was made. */
1784bool
1785lra_substitute_pseudo (rtx *loc, int old_regno, rtx new_reg)
1786{
1787 rtx x = *loc;
1788 bool result = false;
1789 enum rtx_code code;
1790 const char *fmt;
1791 int i, j;
1792
1793 if (x == NULL_RTX)
1794 return false;
1795
1796 code = GET_CODE (x);
1797 if (code == REG && (int) REGNO (x) == old_regno)
1798 {
1799 machine_mode mode = GET_MODE (*loc);
1800 machine_mode inner_mode = GET_MODE (new_reg);
1801
eef7b18c
VM
1802 if (mode != inner_mode
1803 && ! (CONST_INT_P (new_reg) && SCALAR_INT_MODE_P (mode)))
8160cd3e
VM
1804 {
1805 if (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (inner_mode)
1806 || ! SCALAR_INT_MODE_P (inner_mode))
1807 new_reg = gen_rtx_SUBREG (mode, new_reg, 0);
1808 else
1809 new_reg = gen_lowpart_SUBREG (mode, new_reg);
1810 }
1811 *loc = new_reg;
1812 return true;
1813 }
1814
1815 /* Scan all the operand sub-expressions. */
1816 fmt = GET_RTX_FORMAT (code);
1817 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1818 {
1819 if (fmt[i] == 'e')
1820 {
1821 if (lra_substitute_pseudo (&XEXP (x, i), old_regno, new_reg))
1822 result = true;
1823 }
1824 else if (fmt[i] == 'E')
1825 {
1826 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1827 if (lra_substitute_pseudo (&XVECEXP (x, i, j), old_regno, new_reg))
1828 result = true;
1829 }
1830 }
1831 return result;
1832}
1833
1834/* Call lra_substitute_pseudo within an insn. This won't update the insn ptr,
1835 just the contents of the insn. */
1836bool
1837lra_substitute_pseudo_within_insn (rtx_insn *insn, int old_regno, rtx new_reg)
1838{
1839 rtx loc = insn;
1840 return lra_substitute_pseudo (&loc, old_regno, new_reg);
1841}
1842
1843\f
1844
55a2c322
VM
1845/* This page contains code dealing with scratches (changing them onto
1846 pseudos and restoring them from the pseudos).
1847
1848 We change scratches into pseudos at the beginning of LRA to
1849 simplify dealing with them (conflicts, hard register assignments).
1850
1851 If the pseudo denoting scratch was spilled it means that we do need
1852 a hard register for it. Such pseudos are transformed back to
1853 scratches at the end of LRA. */
1854
1855/* Description of location of a former scratch operand. */
926cd61f 1856struct sloc
55a2c322 1857{
cfa434f6 1858 rtx_insn *insn; /* Insn where the scratch was. */
55a2c322
VM
1859 int nop; /* Number of the operand which was a scratch. */
1860};
1861
926cd61f 1862typedef struct sloc *sloc_t;
55a2c322 1863
55a2c322 1864/* Locations of the former scratches. */
9771b263 1865static vec<sloc_t> scratches;
55a2c322
VM
1866
1867/* Bitmap of scratch regnos. */
1868static bitmap_head scratch_bitmap;
1869
1870/* Bitmap of scratch operands. */
1871static bitmap_head scratch_operand_bitmap;
1872
1873/* Return true if pseudo REGNO is made of SCRATCH. */
1874bool
1875lra_former_scratch_p (int regno)
1876{
1877 return bitmap_bit_p (&scratch_bitmap, regno);
1878}
1879
1880/* Return true if the operand NOP of INSN is a former scratch. */
1881bool
cfa434f6 1882lra_former_scratch_operand_p (rtx_insn *insn, int nop)
55a2c322
VM
1883{
1884 return bitmap_bit_p (&scratch_operand_bitmap,
1885 INSN_UID (insn) * MAX_RECOG_OPERANDS + nop) != 0;
1886}
1887
892927b7
VM
1888/* Register operand NOP in INSN as a former scratch. It will be
1889 changed to scratch back, if it is necessary, at the LRA end. */
1890void
1891lra_register_new_scratch_op (rtx_insn *insn, int nop)
1892{
1893 lra_insn_recog_data_t id = lra_get_insn_recog_data (insn);
1894 rtx op = *id->operand_loc[nop];
1895 sloc_t loc = XNEW (struct sloc);
1896 lra_assert (REG_P (op));
1897 loc->insn = insn;
1898 loc->nop = nop;
1899 scratches.safe_push (loc);
1900 bitmap_set_bit (&scratch_bitmap, REGNO (op));
1901 bitmap_set_bit (&scratch_operand_bitmap,
1902 INSN_UID (insn) * MAX_RECOG_OPERANDS + nop);
1903 add_reg_note (insn, REG_UNUSED, op);
1904}
1905
55a2c322
VM
1906/* Change scratches onto pseudos and save their location. */
1907static void
1908remove_scratches (void)
1909{
1910 int i;
1911 bool insn_changed_p;
1912 basic_block bb;
cfa434f6
DM
1913 rtx_insn *insn;
1914 rtx reg;
55a2c322
VM
1915 lra_insn_recog_data_t id;
1916 struct lra_static_insn_data *static_id;
1917
9771b263 1918 scratches.create (get_max_uid ());
55a2c322
VM
1919 bitmap_initialize (&scratch_bitmap, &reg_obstack);
1920 bitmap_initialize (&scratch_operand_bitmap, &reg_obstack);
11cd3bed 1921 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
1922 FOR_BB_INSNS (bb, insn)
1923 if (INSN_P (insn))
1924 {
1925 id = lra_get_insn_recog_data (insn);
1926 static_id = id->insn_static_data;
1927 insn_changed_p = false;
1928 for (i = 0; i < static_id->n_operands; i++)
1929 if (GET_CODE (*id->operand_loc[i]) == SCRATCH
1930 && GET_MODE (*id->operand_loc[i]) != VOIDmode)
1931 {
1932 insn_changed_p = true;
1933 *id->operand_loc[i] = reg
1934 = lra_create_new_reg (static_id->operand[i].mode,
1935 *id->operand_loc[i], ALL_REGS, NULL);
892927b7 1936 lra_register_new_scratch_op (insn, i);
55a2c322
VM
1937 if (lra_dump_file != NULL)
1938 fprintf (lra_dump_file,
1939 "Removing SCRATCH in insn #%u (nop %d)\n",
1940 INSN_UID (insn), i);
1941 }
1942 if (insn_changed_p)
1943 /* Because we might use DF right after caller-saves sub-pass
1944 we need to keep DF info up to date. */
1945 df_insn_rescan (insn);
1946 }
1947}
1948
1949/* Changes pseudos created by function remove_scratches onto scratches. */
1950static void
1951restore_scratches (void)
1952{
9771b263
DN
1953 int regno;
1954 unsigned i;
926cd61f 1955 sloc_t loc;
cfa434f6 1956 rtx_insn *last = NULL;
55a2c322
VM
1957 lra_insn_recog_data_t id = NULL;
1958
9771b263 1959 for (i = 0; scratches.iterate (i, &loc); i++)
55a2c322
VM
1960 {
1961 if (last != loc->insn)
1962 {
1963 last = loc->insn;
1964 id = lra_get_insn_recog_data (last);
1965 }
1966 if (REG_P (*id->operand_loc[loc->nop])
1967 && ((regno = REGNO (*id->operand_loc[loc->nop]))
1968 >= FIRST_PSEUDO_REGISTER)
1969 && lra_get_regno_hard_regno (regno) < 0)
1970 {
1971 /* It should be only case when scratch register with chosen
1972 constraint 'X' did not get memory or hard register. */
1973 lra_assert (lra_former_scratch_p (regno));
1974 *id->operand_loc[loc->nop]
1975 = gen_rtx_SCRATCH (GET_MODE (*id->operand_loc[loc->nop]));
1976 lra_update_dup (id, loc->nop);
1977 if (lra_dump_file != NULL)
1978 fprintf (lra_dump_file, "Restoring SCRATCH in insn #%u(nop %d)\n",
1979 INSN_UID (loc->insn), loc->nop);
1980 }
1981 }
9771b263 1982 for (i = 0; scratches.iterate (i, &loc); i++)
55a2c322 1983 free (loc);
9771b263 1984 scratches.release ();
55a2c322
VM
1985 bitmap_clear (&scratch_bitmap);
1986 bitmap_clear (&scratch_operand_bitmap);
1987}
1988
1989\f
1990
1991#ifdef ENABLE_CHECKING
1992
1993/* Function checks RTL for correctness. If FINAL_P is true, it is
1994 done at the end of LRA and the check is more rigorous. */
1995static void
1996check_rtl (bool final_p)
1997{
55a2c322 1998 basic_block bb;
cfa434f6 1999 rtx_insn *insn;
55a2c322
VM
2000
2001 lra_assert (! final_p || reload_completed);
11cd3bed 2002 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
2003 FOR_BB_INSNS (bb, insn)
2004 if (NONDEBUG_INSN_P (insn)
2005 && GET_CODE (PATTERN (insn)) != USE
2006 && GET_CODE (PATTERN (insn)) != CLOBBER
55a2c322
VM
2007 && GET_CODE (PATTERN (insn)) != ASM_INPUT)
2008 {
2009 if (final_p)
2010 {
75d25a02
RS
2011#ifdef ENABLED_CHECKING
2012 extract_constrain_insn (insn);
2013#endif
55a2c322
VM
2014 continue;
2015 }
a26891f4
VM
2016 /* LRA code is based on assumption that all addresses can be
2017 correctly decomposed. LRA can generate reloads for
2018 decomposable addresses. The decomposition code checks the
2019 correctness of the addresses. So we don't need to check
49f5efa5
VM
2020 the addresses here. Don't call insn_invalid_p here, it can
2021 change the code at this stage. */
2022 if (recog_memoized (insn) < 0 && asm_noperands (PATTERN (insn)) < 0)
55a2c322 2023 fatal_insn_not_found (insn);
55a2c322
VM
2024 }
2025}
2026#endif /* #ifdef ENABLE_CHECKING */
2027
2028/* Determine if the current function has an exception receiver block
2029 that reaches the exit block via non-exceptional edges */
2030static bool
2031has_nonexceptional_receiver (void)
2032{
2033 edge e;
2034 edge_iterator ei;
2035 basic_block *tos, *worklist, bb;
2036
2037 /* If we're not optimizing, then just err on the safe side. */
2038 if (!optimize)
2039 return true;
f4eafc30 2040
55a2c322 2041 /* First determine which blocks can reach exit via normal paths. */
0cae8d31 2042 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
55a2c322 2043
11cd3bed 2044 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
2045 bb->flags &= ~BB_REACHABLE;
2046
2047 /* Place the exit block on our worklist. */
fefa31b5
DM
2048 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
2049 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
f4eafc30 2050
55a2c322
VM
2051 /* Iterate: find everything reachable from what we've already seen. */
2052 while (tos != worklist)
2053 {
2054 bb = *--tos;
2055
2056 FOR_EACH_EDGE (e, ei, bb->preds)
2057 if (e->flags & EDGE_ABNORMAL)
2058 {
2059 free (worklist);
2060 return true;
2061 }
2062 else
2063 {
2064 basic_block src = e->src;
2065
2066 if (!(src->flags & BB_REACHABLE))
2067 {
2068 src->flags |= BB_REACHABLE;
2069 *tos++ = src;
2070 }
2071 }
2072 }
2073 free (worklist);
2074 /* No exceptional block reached exit unexceptionally. */
2075 return false;
2076}
2077
2078#ifdef AUTO_INC_DEC
2079
2080/* Process recursively X of INSN and add REG_INC notes if necessary. */
2081static void
cfa434f6 2082add_auto_inc_notes (rtx_insn *insn, rtx x)
55a2c322
VM
2083{
2084 enum rtx_code code = GET_CODE (x);
2085 const char *fmt;
2086 int i, j;
2087
2088 if (code == MEM && auto_inc_p (XEXP (x, 0)))
2089 {
2090 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
2091 return;
2092 }
2093
2094 /* Scan all X sub-expressions. */
2095 fmt = GET_RTX_FORMAT (code);
2096 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2097 {
2098 if (fmt[i] == 'e')
2099 add_auto_inc_notes (insn, XEXP (x, i));
2100 else if (fmt[i] == 'E')
2101 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2102 add_auto_inc_notes (insn, XVECEXP (x, i, j));
2103 }
2104}
2105
2106#endif
2107
2108/* Remove all REG_DEAD and REG_UNUSED notes and regenerate REG_INC.
2109 We change pseudos by hard registers without notification of DF and
2110 that can make the notes obsolete. DF-infrastructure does not deal
2111 with REG_INC notes -- so we should regenerate them here. */
2112static void
2113update_inc_notes (void)
2114{
2115 rtx *pnote;
2116 basic_block bb;
cfa434f6 2117 rtx_insn *insn;
55a2c322 2118
11cd3bed 2119 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
2120 FOR_BB_INSNS (bb, insn)
2121 if (NONDEBUG_INSN_P (insn))
2122 {
2123 pnote = &REG_NOTES (insn);
2124 while (*pnote != 0)
2125 {
20f114a3
YR
2126 if (REG_NOTE_KIND (*pnote) == REG_DEAD
2127 || REG_NOTE_KIND (*pnote) == REG_UNUSED
2128 || REG_NOTE_KIND (*pnote) == REG_INC)
55a2c322
VM
2129 *pnote = XEXP (*pnote, 1);
2130 else
2131 pnote = &XEXP (*pnote, 1);
2132 }
2133#ifdef AUTO_INC_DEC
2134 add_auto_inc_notes (insn, PATTERN (insn));
2135#endif
2136 }
2137}
2138
2139/* Set to 1 while in lra. */
2140int lra_in_progress;
2141
f681cf95
VM
2142/* Start of pseudo regnos before the LRA. */
2143int lra_new_regno_start;
2144
f4eafc30 2145/* Start of reload pseudo regnos before the new spill pass. */
55a2c322
VM
2146int lra_constraint_new_regno_start;
2147
8fd827b8
VM
2148/* Avoid spilling pseudos with regno more than the following value if
2149 it is possible. */
2150int lra_bad_spill_regno_start;
2151
f4eafc30 2152/* Inheritance pseudo regnos before the new spill pass. */
55a2c322
VM
2153bitmap_head lra_inheritance_pseudos;
2154
f4eafc30 2155/* Split regnos before the new spill pass. */
55a2c322
VM
2156bitmap_head lra_split_regs;
2157
2b778c9d
VM
2158/* Reload pseudo regnos before the new assignmnet pass which still can
2159 be spilled after the assinment pass as memory is also accepted in
2160 insns for the reload pseudos. */
55a2c322
VM
2161bitmap_head lra_optional_reload_pseudos;
2162
2b778c9d
VM
2163/* Pseudo regnos used for subreg reloads before the new assignment
2164 pass. Such pseudos still can be spilled after the assinment
2165 pass. */
2166bitmap_head lra_subreg_reload_pseudos;
2167
55a2c322
VM
2168/* File used for output of LRA debug information. */
2169FILE *lra_dump_file;
2170
2171/* True if we should try spill into registers of different classes
2172 instead of memory. */
2173bool lra_reg_spill_p;
2174
2175/* Set up value LRA_REG_SPILL_P. */
2176static void
2177setup_reg_spill_flag (void)
2178{
2179 int cl, mode;
2180
2181 if (targetm.spill_class != NULL)
2182 for (cl = 0; cl < (int) LIM_REG_CLASSES; cl++)
2183 for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
2184 if (targetm.spill_class ((enum reg_class) cl,
ef4bddc2 2185 (machine_mode) mode) != NO_REGS)
55a2c322
VM
2186 {
2187 lra_reg_spill_p = true;
2188 return;
2189 }
2190 lra_reg_spill_p = false;
2191}
2192
2193/* True if the current function is too big to use regular algorithms
2194 in LRA. In other words, we should use simpler and faster algorithms
2195 in LRA. It also means we should not worry about generation code
2196 for caller saves. The value is set up in IRA. */
2197bool lra_simple_p;
2198
2199/* Major LRA entry function. F is a file should be used to dump LRA
2200 debug info. */
2201void
2202lra (FILE *f)
2203{
2204 int i;
2205 bool live_p, scratch_p, inserted_p;
2206
2207 lra_dump_file = f;
2208
2209 timevar_push (TV_LRA);
2210
2c62cbaa
VM
2211 /* Make sure that the last insn is a note. Some subsequent passes
2212 need it. */
2213 emit_note (NOTE_INSN_DELETED);
2214
b28ece32
VM
2215 COPY_HARD_REG_SET (lra_no_alloc_regs, ira_no_alloc_regs);
2216
afa22e29
VM
2217 init_reg_info ();
2218 expand_reg_info ();
2219
55a2c322
VM
2220 init_insn_recog_data ();
2221
2222#ifdef ENABLE_CHECKING
49f5efa5 2223 /* Some quick check on RTL generated by previous passes. */
55a2c322
VM
2224 check_rtl (false);
2225#endif
2226
49f5efa5
VM
2227 lra_in_progress = 1;
2228
f54437d5
VM
2229 lra_live_range_iter = lra_coalesce_iter = lra_constraint_iter = 0;
2230 lra_assignment_iter = lra_assignment_iter_after_spill = 0;
55a2c322 2231 lra_inheritance_iter = lra_undo_inheritance_iter = 0;
94446928 2232 lra_rematerialization_iter = 0;
55a2c322
VM
2233
2234 setup_reg_spill_flag ();
2235
55a2c322
VM
2236 /* Function remove_scratches can creates new pseudos for clobbers --
2237 so set up lra_constraint_new_regno_start before its call to
2238 permit changing reg classes for pseudos created by this
2239 simplification. */
f681cf95 2240 lra_constraint_new_regno_start = lra_new_regno_start = max_reg_num ();
8fd827b8 2241 lra_bad_spill_regno_start = INT_MAX;
55a2c322
VM
2242 remove_scratches ();
2243 scratch_p = lra_constraint_new_regno_start != max_reg_num ();
2244
2245 /* A function that has a non-local label that can reach the exit
2246 block via non-exceptional paths must save all call-saved
2247 registers. */
2248 if (cfun->has_nonlocal_label && has_nonexceptional_receiver ())
2249 crtl->saves_all_registers = 1;
2250
2251 if (crtl->saves_all_registers)
2252 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2253 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
2254 df_set_regs_ever_live (i, true);
2255
2256 /* We don't DF from now and avoid its using because it is to
2257 expensive when a lot of RTL changes are made. */
2258 df_set_flags (DF_NO_INSN_RESCAN);
9771b263 2259 lra_constraint_insn_stack.create (get_max_uid ());
55a2c322 2260 lra_constraint_insn_stack_bitmap = sbitmap_alloc (get_max_uid ());
f61e445a 2261 bitmap_clear (lra_constraint_insn_stack_bitmap);
55a2c322
VM
2262 lra_live_ranges_init ();
2263 lra_constraints_init ();
2264 lra_curr_reload_num = 0;
cfa434f6 2265 push_insns (get_last_insn (), NULL);
55a2c322 2266 /* It is needed for the 1st coalescing. */
55a2c322
VM
2267 bitmap_initialize (&lra_inheritance_pseudos, &reg_obstack);
2268 bitmap_initialize (&lra_split_regs, &reg_obstack);
2269 bitmap_initialize (&lra_optional_reload_pseudos, &reg_obstack);
2b778c9d 2270 bitmap_initialize (&lra_subreg_reload_pseudos, &reg_obstack);
55a2c322 2271 live_p = false;
2c62cbaa
VM
2272 if (get_frame_size () != 0 && crtl->stack_alignment_needed)
2273 /* If we have a stack frame, we must align it now. The stack size
2274 may be a part of the offset computation for register
2275 elimination. */
2276 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
4c2b2d79 2277 lra_init_equiv ();
55a2c322
VM
2278 for (;;)
2279 {
2280 for (;;)
2281 {
55a2c322
VM
2282 /* We should try to assign hard registers to scratches even
2283 if there were no RTL transformations in
2284 lra_constraints. */
2285 if (! lra_constraints (lra_constraint_iter == 0)
2286 && (lra_constraint_iter > 1
2287 || (! scratch_p && ! caller_save_needed)))
2288 break;
2289 /* Constraint transformations may result in that eliminable
2290 hard regs become uneliminable and pseudos which use them
2291 should be spilled. It is better to do it before pseudo
2292 assignments.
2293
2294 For example, rs6000 can make
2295 RS6000_PIC_OFFSET_TABLE_REGNUM uneliminable if we started
2296 to use a constant pool. */
8d49e7ef 2297 lra_eliminate (false, false);
55a2c322
VM
2298 /* Do inheritance only for regular algorithms. */
2299 if (! lra_simple_p)
10e1bdb2 2300 {
1e288103 2301 if (flag_ipa_ra)
10e1bdb2
TV
2302 {
2303 if (live_p)
2304 lra_clear_live_ranges ();
2305 /* As a side-effect of lra_create_live_ranges, we calculate
2306 actual_call_used_reg_set, which is needed during
2307 lra_inheritance. */
4ab74a01 2308 lra_create_live_ranges (true, true);
8f047f6c 2309 live_p = true;
10e1bdb2
TV
2310 }
2311 lra_inheritance ();
2312 }
4ccf8f43
JJ
2313 if (live_p)
2314 lra_clear_live_ranges ();
4ab74a01
VM
2315 /* We need live ranges for lra_assign -- so build them. But
2316 don't remove dead insns or change global live info as we
2317 can undo inheritance transformations after inheritance
2318 pseudo assigning. */
2319 lra_create_live_ranges (true, false);
55a2c322
VM
2320 live_p = true;
2321 /* If we don't spill non-reload and non-inheritance pseudos,
2322 there is no sense to run memory-memory move coalescing.
2323 If inheritance pseudos were spilled, the memory-memory
2324 moves involving them will be removed by pass undoing
2325 inheritance. */
2326 if (lra_simple_p)
2327 lra_assign ();
2328 else
2329 {
72ea0d47
VM
2330 bool spill_p = !lra_assign ();
2331
55a2c322
VM
2332 if (lra_undo_inheritance ())
2333 live_p = false;
72ea0d47
VM
2334 if (spill_p)
2335 {
2336 if (! live_p)
2337 {
4ab74a01 2338 lra_create_live_ranges (true, true);
72ea0d47
VM
2339 live_p = true;
2340 }
2341 if (lra_coalesce ())
2342 live_p = false;
2343 }
4ccf8f43
JJ
2344 if (! live_p)
2345 lra_clear_live_ranges ();
55a2c322
VM
2346 }
2347 }
080cbf9e
VM
2348 /* Don't clear optional reloads bitmap until all constraints are
2349 satisfied as we need to differ them from regular reloads. */
2350 bitmap_clear (&lra_optional_reload_pseudos);
2b778c9d 2351 bitmap_clear (&lra_subreg_reload_pseudos);
55a2c322
VM
2352 bitmap_clear (&lra_inheritance_pseudos);
2353 bitmap_clear (&lra_split_regs);
55a2c322
VM
2354 if (! live_p)
2355 {
2356 /* We need full live info for spilling pseudos into
2357 registers instead of memory. */
4ab74a01 2358 lra_create_live_ranges (lra_reg_spill_p, true);
55a2c322
VM
2359 live_p = true;
2360 }
4ab74a01
VM
2361 /* We should check necessity for spilling here as the above live
2362 range pass can remove spilled pseudos. */
2363 if (! lra_need_for_spills_p ())
2364 break;
d9cf932c
VM
2365 /* Now we know what pseudos should be spilled. Try to
2366 rematerialize them first. */
f607c5c4 2367 if (lra_remat ())
d9cf932c
VM
2368 {
2369 /* We need full live info -- see the comment above. */
4ab74a01 2370 lra_create_live_ranges (lra_reg_spill_p, true);
d9cf932c
VM
2371 live_p = true;
2372 if (! lra_need_for_spills_p ())
2373 break;
2374 }
55a2c322
VM
2375 lra_spill ();
2376 /* Assignment of stack slots changes elimination offsets for
2377 some eliminations. So update the offsets here. */
8d49e7ef 2378 lra_eliminate (false, false);
8fd827b8
VM
2379 lra_constraint_new_regno_start = max_reg_num ();
2380 if (lra_bad_spill_regno_start == INT_MAX
2381 && lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES
2382 && lra_rematerialization_iter > LRA_MAX_REMATERIALIZATION_PASSES)
2383 /* After switching off inheritance and rematerialization
2384 passes, avoid spilling reload pseudos will be created to
2385 prevent LRA cycling in some complicated cases. */
2386 lra_bad_spill_regno_start = lra_constraint_new_regno_start;
f54437d5 2387 lra_assignment_iter_after_spill = 0;
55a2c322
VM
2388 }
2389 restore_scratches ();
8d49e7ef 2390 lra_eliminate (true, false);
c5cd5a7e 2391 lra_final_code_change ();
55a2c322 2392 lra_in_progress = 0;
4ccf8f43
JJ
2393 if (live_p)
2394 lra_clear_live_ranges ();
55a2c322
VM
2395 lra_live_ranges_finish ();
2396 lra_constraints_finish ();
2397 finish_reg_info ();
2398 sbitmap_free (lra_constraint_insn_stack_bitmap);
9771b263 2399 lra_constraint_insn_stack.release ();
55a2c322
VM
2400 finish_insn_recog_data ();
2401 regstat_free_n_sets_and_refs ();
2402 regstat_free_ri ();
2403 reload_completed = 1;
2404 update_inc_notes ();
2405
2406 inserted_p = fixup_abnormal_edges ();
2407
2408 /* We've possibly turned single trapping insn into multiple ones. */
2409 if (cfun->can_throw_non_call_exceptions)
2410 {
2411 sbitmap blocks;
8b1c6fd7 2412 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
f61e445a 2413 bitmap_ones (blocks);
55a2c322
VM
2414 find_many_sub_basic_blocks (blocks);
2415 sbitmap_free (blocks);
2416 }
2417
2418 if (inserted_p)
2419 commit_edge_insertions ();
2420
2421 /* Replacing pseudos with their memory equivalents might have
2422 created shared rtx. Subsequent passes would get confused
2423 by this, so unshare everything here. */
2424 unshare_all_rtl_again (get_insns ());
2425
2426#ifdef ENABLE_CHECKING
2427 check_rtl (true);
2428#endif
2429
2430 timevar_pop (TV_LRA);
2431}
2432
2433/* Called once per compiler to initialize LRA data once. */
2434void
2435lra_init_once (void)
2436{
2437 init_insn_code_data_once ();
2438}
2439
55a2c322
VM
2440/* Called once per compiler to finish LRA data which are initialize
2441 once. */
2442void
2443lra_finish_once (void)
2444{
2445 finish_insn_code_data_once ();
2446}