]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/lra.c
PR debug/54970
[thirdparty/gcc.git] / gcc / lra.c
CommitLineData
c6a6cdaa 1/* LRA (local register allocator) driver and LRA utilities.
2 Copyright (C) 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22
23/* The Local Register Allocator (LRA) is a replacement of former
24 reload pass. It is focused to simplify code solving the reload
25 pass tasks, to make the code maintenance easier, and to implement new
26 perspective optimizations.
27
28 The major LRA design solutions are:
29 o division small manageable, separated sub-tasks
30 o reflection of all transformations and decisions in RTL as more
31 as possible
32 o insn constraints as a primary source of the info (minimizing
33 number of target-depended macros/hooks)
34
35 In brief LRA works by iterative insn process with the final goal is
36 to satisfy all insn and address constraints:
37 o New reload insns (in brief reloads) and reload pseudos might be
38 generated;
39 o Some pseudos might be spilled to assign hard registers to
40 new reload pseudos;
41 o Changing spilled pseudos to stack memory or their equivalences;
42 o Allocation stack memory changes the address displacement and
43 new iteration is needed.
44
45 Here is block diagram of LRA passes:
46
1a8f8886 47 ---------------------
48 | Undo inheritance | --------------- ---------------
c6a6cdaa 49 | for spilled pseudos)| | Memory-memory | | New (and old) |
50 | and splits (for |<----| move coalesce |<-----| pseudos |
51 | pseudos got the | --------------- | assignment |
1a8f8886 52 Start | same hard regs) | ---------------
c6a6cdaa 53 | --------------------- ^
54 V | ---------------- |
55 ----------- V | Update virtual | |
56| Remove |----> ------------>| register | |
57| scratches | ^ | displacements | |
58 ----------- | ---------------- |
59 | | |
60 | V New |
61 ---------------- No ------------ pseudos -------------------
62 | Spilled pseudo | change |Constraints:| or insns | Inheritance/split |
63 | to memory |<-------| RTL |--------->| transformations |
64 | substitution | | transfor- | | in EBB scope |
65 ---------------- | mations | -------------------
1a8f8886 66 | ------------
c6a6cdaa 67 V
68 -------------------------
69 | Hard regs substitution, |
70 | devirtalization, and |------> Finish
71 | restoring scratches got |
72 | memory |
73 -------------------------
74
75 To speed up the process:
76 o We process only insns affected by changes on previous
77 iterations;
78 o We don't use DFA-infrastructure because it results in much slower
79 compiler speed than a special IR described below does;
80 o We use a special insn representation for quick access to insn
81 info which is always *synchronized* with the current RTL;
82 o Insn IR is minimized by memory. It is divided on three parts:
83 o one specific for each insn in RTL (only operand locations);
84 o one common for all insns in RTL with the same insn code
85 (different operand attributes from machine descriptions);
86 o one oriented for maintenance of live info (list of pseudos).
87 o Pseudo data:
88 o all insns where the pseudo is referenced;
89 o live info (conflicting hard regs, live ranges, # of
90 references etc);
91 o data used for assigning (preferred hard regs, costs etc).
92
93 This file contains LRA driver, LRA utility functions and data, and
94 code for dealing with scratches. */
95
96#include "config.h"
97#include "system.h"
98#include "coretypes.h"
99#include "tm.h"
100#include "rtl.h"
101#include "tm_p.h"
102#include "regs.h"
103#include "insn-config.h"
104#include "insn-codes.h"
105#include "recog.h"
106#include "output.h"
107#include "addresses.h"
108#include "hard-reg-set.h"
109#include "flags.h"
110#include "function.h"
111#include "expr.h"
112#include "basic-block.h"
113#include "except.h"
114#include "tree-pass.h"
115#include "timevar.h"
116#include "target.h"
117#include "vec.h"
118#include "ira.h"
119#include "lra-int.h"
120#include "df.h"
121
122/* Hard registers currently not available for allocation. It can
123 changed after some hard registers become not eliminable. */
124HARD_REG_SET lra_no_alloc_regs;
125
126static int get_new_reg_value (void);
127static void expand_reg_info (void);
128static void invalidate_insn_recog_data (int);
129static int get_insn_freq (rtx);
130static void invalidate_insn_data_regno_info (lra_insn_recog_data_t, rtx, int);
131
132/* Expand all regno related info needed for LRA. */
133static void
134expand_reg_data (void)
135{
136 resize_reg_info ();
137 expand_reg_info ();
138 ira_expand_reg_equiv ();
139}
140
141/* Create and return a new reg of ORIGINAL mode. If ORIGINAL is NULL
142 or of VOIDmode, use MD_MODE for the new reg. Initialize its
143 register class to RCLASS. Print message about assigning class
144 RCLASS containing new register name TITLE unless it is NULL. Use
145 attributes of ORIGINAL if it is a register. The created register
146 will have unique held value. */
147rtx
148lra_create_new_reg_with_unique_value (enum machine_mode md_mode, rtx original,
149 enum reg_class rclass, const char *title)
150{
151 enum machine_mode mode;
152 rtx new_reg;
153
154 if (original == NULL_RTX || (mode = GET_MODE (original)) == VOIDmode)
155 mode = md_mode;
156 lra_assert (mode != VOIDmode);
157 new_reg = gen_reg_rtx (mode);
158 if (original == NULL_RTX || ! REG_P (original))
159 {
160 if (lra_dump_file != NULL)
161 fprintf (lra_dump_file, " Creating newreg=%i", REGNO (new_reg));
162 }
163 else
164 {
165 if (ORIGINAL_REGNO (original) >= FIRST_PSEUDO_REGISTER)
166 ORIGINAL_REGNO (new_reg) = ORIGINAL_REGNO (original);
167 REG_USERVAR_P (new_reg) = REG_USERVAR_P (original);
168 REG_POINTER (new_reg) = REG_POINTER (original);
169 REG_ATTRS (new_reg) = REG_ATTRS (original);
170 if (lra_dump_file != NULL)
171 fprintf (lra_dump_file, " Creating newreg=%i from oldreg=%i",
172 REGNO (new_reg), REGNO (original));
173 }
174 if (lra_dump_file != NULL)
175 {
176 if (title != NULL)
177 fprintf (lra_dump_file, ", assigning class %s to%s%s r%d",
178 reg_class_names[rclass], *title == '\0' ? "" : " ",
179 title, REGNO (new_reg));
180 fprintf (lra_dump_file, "\n");
181 }
182 expand_reg_data ();
183 setup_reg_classes (REGNO (new_reg), rclass, NO_REGS, rclass);
184 return new_reg;
185}
186
187/* Analogous to the previous function but also inherits value of
188 ORIGINAL. */
189rtx
190lra_create_new_reg (enum machine_mode md_mode, rtx original,
191 enum reg_class rclass, const char *title)
192{
193 rtx new_reg;
194
195 new_reg
196 = lra_create_new_reg_with_unique_value (md_mode, original, rclass, title);
197 if (original != NULL_RTX && REG_P (original))
198 lra_reg_info[REGNO (new_reg)].val = lra_reg_info[REGNO (original)].val;
199 return new_reg;
200}
201
202/* Set up for REGNO unique hold value. */
203void
204lra_set_regno_unique_value (int regno)
205{
206 lra_reg_info[regno].val = get_new_reg_value ();
207}
208
209/* Invalidate INSN related info used by LRA. */
210void
211lra_invalidate_insn_data (rtx insn)
212{
213 lra_invalidate_insn_regno_info (insn);
214 invalidate_insn_recog_data (INSN_UID (insn));
215}
216
217/* Mark INSN deleted and invalidate the insn related info used by
218 LRA. */
219void
220lra_set_insn_deleted (rtx insn)
221{
222 lra_invalidate_insn_data (insn);
223 SET_INSN_DELETED (insn);
224}
225
226/* Delete an unneeded INSN and any previous insns who sole purpose is
227 loading data that is dead in INSN. */
228void
229lra_delete_dead_insn (rtx insn)
230{
231 rtx prev = prev_real_insn (insn);
232 rtx prev_dest;
233
234 /* If the previous insn sets a register that dies in our insn,
235 delete it too. */
236 if (prev && GET_CODE (PATTERN (prev)) == SET
237 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
238 && reg_mentioned_p (prev_dest, PATTERN (insn))
239 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
240 && ! side_effects_p (SET_SRC (PATTERN (prev))))
241 lra_delete_dead_insn (prev);
242
243 lra_set_insn_deleted (insn);
244}
245
246/* Target checks operands through operand predicates to recognize an
247 insn. We should have a special precaution to generate add insns
248 which are frequent results of elimination.
249
250 Emit insns for x = y + z. X can be used to store intermediate
251 values and should be not in Y and Z when we use X to store an
252 intermediate value. Y + Z should form [base] [+ index[ * scale]] [
253 + disp] where base and index are registers, disp and scale are
254 constants. Y should contain base if it is present, Z should
255 contain disp if any. index[*scale] can be part of Y or Z. */
256void
257lra_emit_add (rtx x, rtx y, rtx z)
258{
259 int old;
260 rtx insn, last;
261 rtx a1, a2, base, index, disp, scale, index_scale;
262 bool ok_p;
263
264 insn = gen_add3_insn (x, y, z);
265 old = max_reg_num ();
266 if (insn != NULL_RTX)
267 emit_insn (insn);
268 else
269 {
270 disp = a2 = NULL_RTX;
271 if (GET_CODE (y) == PLUS)
272 {
273 a1 = XEXP (y, 0);
274 a2 = XEXP (y, 1);
275 disp = z;
276 }
277 else
278 {
279 a1 = y;
280 if (CONSTANT_P (z))
281 disp = z;
282 else
283 a2 = z;
284 }
285 index_scale = scale = NULL_RTX;
286 if (GET_CODE (a1) == MULT)
287 {
288 index_scale = a1;
289 index = XEXP (a1, 0);
290 scale = XEXP (a1, 1);
291 base = a2;
292 }
293 else if (a2 != NULL_RTX && GET_CODE (a2) == MULT)
294 {
295 index_scale = a2;
296 index = XEXP (a2, 0);
297 scale = XEXP (a2, 1);
298 base = a1;
299 }
300 else
301 {
302 base = a1;
303 index = a2;
304 }
305 if (! REG_P (base)
306 || (index != NULL_RTX && ! REG_P (index))
307 || (disp != NULL_RTX && ! CONSTANT_P (disp))
308 || (scale != NULL_RTX && ! CONSTANT_P (scale)))
309 {
310 /* Its is not an address generation. Probably we have no 3 op
311 add. Last chance is to use 2-op add insn. */
312 lra_assert (x != y && x != z);
313 emit_move_insn (x, z);
314 insn = gen_add2_insn (x, y);
315 emit_insn (insn);
316 }
317 else
318 {
319 if (index_scale == NULL_RTX)
320 index_scale = index;
321 if (disp == NULL_RTX)
322 {
323 /* Generate x = index_scale; x = x + base. */
324 lra_assert (index_scale != NULL_RTX && base != NULL_RTX);
325 emit_move_insn (x, index_scale);
326 insn = gen_add2_insn (x, base);
327 emit_insn (insn);
328 }
329 else if (scale == NULL_RTX)
330 {
331 /* Try x = base + disp. */
332 lra_assert (base != NULL_RTX);
333 last = get_last_insn ();
334 insn = emit_move_insn (x, gen_rtx_PLUS (GET_MODE (base),
335 base, disp));
336 if (recog_memoized (insn) < 0)
337 {
338 delete_insns_since (last);
339 /* Generate x = disp; x = x + base. */
340 emit_move_insn (x, disp);
341 insn = gen_add2_insn (x, base);
342 emit_insn (insn);
343 }
344 /* Generate x = x + index. */
345 if (index != NULL_RTX)
346 {
347 insn = gen_add2_insn (x, index);
348 emit_insn (insn);
349 }
350 }
351 else
352 {
353 /* Try x = index_scale; x = x + disp; x = x + base. */
354 last = get_last_insn ();
355 insn = emit_move_insn (x, index_scale);
356 ok_p = false;
357 if (recog_memoized (insn) >= 0)
358 {
359 insn = gen_add2_insn (x, disp);
360 if (insn != NULL_RTX)
361 {
362 emit_insn (insn);
363 insn = gen_add2_insn (x, disp);
364 if (insn != NULL_RTX)
365 {
366 emit_insn (insn);
367 ok_p = true;
368 }
369 }
370 }
371 if (! ok_p)
372 {
373 delete_insns_since (last);
374 /* Generate x = disp; x = x + base; x = x + index_scale. */
375 emit_move_insn (x, disp);
376 insn = gen_add2_insn (x, base);
377 emit_insn (insn);
378 insn = gen_add2_insn (x, index_scale);
379 emit_insn (insn);
380 }
381 }
382 }
383 }
384 /* Functions emit_... can create pseudos -- so expand the pseudo
385 data. */
386 if (old != max_reg_num ())
387 expand_reg_data ();
388}
389
390/* The number of emitted reload insns so far. */
391int lra_curr_reload_num;
392
393/* Emit x := y, processing special case when y = u + v or y = u + v *
394 scale + w through emit_add (Y can be an address which is base +
395 index reg * scale + displacement in general case). X may be used
396 as intermediate result therefore it should be not in Y. */
397void
398lra_emit_move (rtx x, rtx y)
399{
400 int old;
401
402 if (GET_CODE (y) != PLUS)
403 {
404 if (rtx_equal_p (x, y))
405 return;
406 old = max_reg_num ();
407 emit_move_insn (x, y);
408 if (REG_P (x))
409 lra_reg_info[ORIGINAL_REGNO (x)].last_reload = ++lra_curr_reload_num;
410 /* Function emit_move can create pseudos -- so expand the pseudo
411 data. */
412 if (old != max_reg_num ())
413 expand_reg_data ();
414 return;
415 }
416 lra_emit_add (x, XEXP (y, 0), XEXP (y, 1));
417}
418
419/* Update insn operands which are duplication of operands whose
420 numbers are in array of NOPS (with end marker -1). The insn is
421 represented by its LRA internal representation ID. */
422void
423lra_update_dups (lra_insn_recog_data_t id, signed char *nops)
424{
425 int i, j, nop;
426 struct lra_static_insn_data *static_id = id->insn_static_data;
427
428 for (i = 0; i < static_id->n_dups; i++)
429 for (j = 0; (nop = nops[j]) >= 0; j++)
430 if (static_id->dup_num[i] == nop)
431 *id->dup_loc[i] = *id->operand_loc[nop];
432}
433
434\f
435
436/* This page contains code dealing with info about registers in the
437 insns. */
438
439/* Pools for insn reg info. */
440static alloc_pool insn_reg_pool;
441
442/* Initiate pool for insn reg info. */
443static void
444init_insn_regs (void)
445{
446 insn_reg_pool
447 = create_alloc_pool ("insn regs", sizeof (struct lra_insn_reg), 100);
448}
449
450/* Create LRA insn related info about referenced REGNO with TYPE
451 (in/out/inout), biggest reference mode MODE, flag that it is
452 reference through subreg (SUBREG_P), flag that is early clobbered
453 in the insn (EARLY_CLOBBER), and reference to the next insn reg
454 info (NEXT). */
455static struct lra_insn_reg *
456new_insn_reg (int regno, enum op_type type, enum machine_mode mode,
457 bool subreg_p, bool early_clobber, struct lra_insn_reg *next)
458{
459 struct lra_insn_reg *ir;
460
461 ir = (struct lra_insn_reg *) pool_alloc (insn_reg_pool);
462 ir->type = type;
463 ir->biggest_mode = mode;
464 ir->subreg_p = subreg_p;
465 ir->early_clobber = early_clobber;
466 ir->regno = regno;
467 ir->next = next;
468 return ir;
469}
470
471/* Free insn reg info IR. */
472static void
473free_insn_reg (struct lra_insn_reg *ir)
474{
475 pool_free (insn_reg_pool, ir);
476}
477
478/* Free insn reg info list IR. */
479static void
480free_insn_regs (struct lra_insn_reg *ir)
481{
482 struct lra_insn_reg *next_ir;
483
484 for (; ir != NULL; ir = next_ir)
485 {
486 next_ir = ir->next;
487 free_insn_reg (ir);
488 }
489}
490
491/* Finish pool for insn reg info. */
492static void
493finish_insn_regs (void)
494{
495 free_alloc_pool (insn_reg_pool);
496}
497
498\f
499
500/* This page contains code dealing LRA insn info (or in other words
501 LRA internal insn representation). */
502
503struct target_lra_int default_target_lra_int;
504#if SWITCHABLE_TARGET
505struct target_lra_int *this_target_lra_int = &default_target_lra_int;
506#endif
507
508/* Map INSN_CODE -> the static insn data. This info is valid during
509 all translation unit. */
510struct lra_static_insn_data *insn_code_data[LAST_INSN_CODE];
511
512/* Debug insns are represented as a special insn with one input
513 operand which is RTL expression in var_location. */
514
515/* The following data are used as static insn operand data for all
516 debug insns. If structure lra_operand_data is changed, the
517 initializer should be changed too. */
518static struct lra_operand_data debug_operand_data =
519 {
520 NULL, /* alternative */
521 VOIDmode, /* We are not interesting in the operand mode. */
522 OP_IN,
523 0, 0, 0, 0
524 };
525
526/* The following data are used as static insn data for all debug
527 insns. If structure lra_static_insn_data is changed, the
528 initializer should be changed too. */
529static struct lra_static_insn_data debug_insn_static_data =
530 {
531 &debug_operand_data,
532 0, /* Duplication operands #. */
533 -1, /* Commutative operand #. */
534 1, /* Operands #. There is only one operand which is debug RTL
535 expression. */
536 0, /* Duplications #. */
537 0, /* Alternatives #. We are not interesting in alternatives
538 because we does not proceed debug_insns for reloads. */
539 NULL, /* Hard registers referenced in machine description. */
540 NULL /* Descriptions of operands in alternatives. */
541 };
542
543/* Called once per compiler work to initialize some LRA data related
544 to insns. */
545static void
546init_insn_code_data_once (void)
547{
548 memset (insn_code_data, 0, sizeof (insn_code_data));
549 memset (op_alt_data, 0, sizeof (op_alt_data));
550}
551
552/* Called once per compiler work to finalize some LRA data related to
553 insns. */
554static void
555finish_insn_code_data_once (void)
556{
557 int i;
558
559 for (i = 0; i < LAST_INSN_CODE; i++)
560 {
561 if (insn_code_data[i] != NULL)
562 free (insn_code_data[i]);
563 if (op_alt_data[i] != NULL)
564 free (op_alt_data[i]);
565 }
566}
567
568/* Initialize LRA info about operands in insn alternatives. */
569static void
570init_op_alt_data (void)
571{
572 int i;
573
574 for (i = 0; i < LAST_INSN_CODE; i++)
575 if (op_alt_data[i] != NULL)
576 {
577 free (op_alt_data[i]);
578 op_alt_data[i] = NULL;
579 }
580}
581
582/* Return static insn data, allocate and setup if necessary. Although
583 dup_num is static data (it depends only on icode), to set it up we
584 need to extract insn first. So recog_data should be valid for
585 normal insn (ICODE >= 0) before the call. */
586static struct lra_static_insn_data *
587get_static_insn_data (int icode, int nop, int ndup, int nalt)
588{
589 struct lra_static_insn_data *data;
590 size_t n_bytes;
591
592 lra_assert (icode < LAST_INSN_CODE);
593 if (icode >= 0 && (data = insn_code_data[icode]) != NULL)
594 return data;
595 lra_assert (nop >= 0 && ndup >= 0 && nalt >= 0);
596 n_bytes = sizeof (struct lra_static_insn_data)
597 + sizeof (struct lra_operand_data) * nop
598 + sizeof (int) * ndup;
599 data = XNEWVAR (struct lra_static_insn_data, n_bytes);
600 data->n_operands = nop;
601 data->n_dups = ndup;
602 data->n_alternatives = nalt;
603 data->operand = ((struct lra_operand_data *)
604 ((char *) data + sizeof (struct lra_static_insn_data)));
605 data->dup_num = ((int *) ((char *) data->operand
606 + sizeof (struct lra_operand_data) * nop));
607 if (icode >= 0)
608 {
609 int i;
610
611 insn_code_data[icode] = data;
612 for (i = 0; i < nop; i++)
613 {
614 data->operand[i].constraint
615 = insn_data[icode].operand[i].constraint;
616 data->operand[i].mode = insn_data[icode].operand[i].mode;
617 data->operand[i].strict_low = insn_data[icode].operand[i].strict_low;
618 data->operand[i].is_operator
619 = insn_data[icode].operand[i].is_operator;
620 data->operand[i].type
621 = (data->operand[i].constraint[0] == '=' ? OP_OUT
622 : data->operand[i].constraint[0] == '+' ? OP_INOUT
623 : OP_IN);
624 data->operand[i].is_address = false;
625 }
626 for (i = 0; i < ndup; i++)
627 data->dup_num[i] = recog_data.dup_num[i];
628 }
629 return data;
630}
631
632/* The current length of the following array. */
633int lra_insn_recog_data_len;
634
635/* Map INSN_UID -> the insn recog data (NULL if unknown). */
636lra_insn_recog_data_t *lra_insn_recog_data;
637
638/* Initialize LRA data about insns. */
639static void
640init_insn_recog_data (void)
641{
642 lra_insn_recog_data_len = 0;
643 lra_insn_recog_data = NULL;
644 init_insn_regs ();
645}
646
647/* Expand, if necessary, LRA data about insns. */
648static void
649check_and_expand_insn_recog_data (int index)
650{
651 int i, old;
652
653 if (lra_insn_recog_data_len > index)
654 return;
655 old = lra_insn_recog_data_len;
656 lra_insn_recog_data_len = index * 3 / 2 + 1;
657 lra_insn_recog_data = XRESIZEVEC (lra_insn_recog_data_t,
658 lra_insn_recog_data,
659 lra_insn_recog_data_len);
660 for (i = old; i < lra_insn_recog_data_len; i++)
661 lra_insn_recog_data[i] = NULL;
662}
663
664/* Finish LRA DATA about insn. */
665static void
666free_insn_recog_data (lra_insn_recog_data_t data)
667{
668 if (data->operand_loc != NULL)
669 free (data->operand_loc);
670 if (data->dup_loc != NULL)
671 free (data->dup_loc);
672 if (data->arg_hard_regs != NULL)
673 free (data->arg_hard_regs);
674#ifdef HAVE_ATTR_enabled
675 if (data->alternative_enabled_p != NULL)
676 free (data->alternative_enabled_p);
677#endif
678 if (data->icode < 0 && NONDEBUG_INSN_P (data->insn))
679 {
680 if (data->insn_static_data->operand_alternative != NULL)
681 free (data->insn_static_data->operand_alternative);
682 free_insn_regs (data->insn_static_data->hard_regs);
683 free (data->insn_static_data);
684 }
685 free_insn_regs (data->regs);
686 data->regs = NULL;
687 free (data);
688}
689
690/* Finish LRA data about all insns. */
691static void
692finish_insn_recog_data (void)
693{
694 int i;
695 lra_insn_recog_data_t data;
696
697 for (i = 0; i < lra_insn_recog_data_len; i++)
698 if ((data = lra_insn_recog_data[i]) != NULL)
699 free_insn_recog_data (data);
700 finish_insn_regs ();
701 free (lra_insn_recog_data);
702}
703
704/* Setup info about operands in alternatives of LRA DATA of insn. */
705static void
706setup_operand_alternative (lra_insn_recog_data_t data)
707{
708 int i, nop, nalt;
709 int icode = data->icode;
710 struct lra_static_insn_data *static_data = data->insn_static_data;
711
712 if (icode >= 0
713 && (static_data->operand_alternative = op_alt_data[icode]) != NULL)
714 return;
715 static_data->commutative = -1;
716 nop = static_data->n_operands;
717 if (nop == 0)
718 {
719 static_data->operand_alternative = NULL;
720 return;
721 }
722 nalt = static_data->n_alternatives;
723 static_data->operand_alternative = XNEWVEC (struct operand_alternative,
724 nalt * nop);
725 memset (static_data->operand_alternative, 0,
726 nalt * nop * sizeof (struct operand_alternative));
727 if (icode >= 0)
728 op_alt_data[icode] = static_data->operand_alternative;
729 for (i = 0; i < nop; i++)
730 {
731 int j;
732 struct operand_alternative *op_alt_start, *op_alt;
733 const char *p = static_data->operand[i].constraint;
734
735 static_data->operand[i].early_clobber = 0;
736 op_alt_start = &static_data->operand_alternative[i];
737
738 for (j = 0; j < nalt; j++)
739 {
740 op_alt = op_alt_start + j * nop;
741 op_alt->cl = NO_REGS;
742 op_alt->constraint = p;
743 op_alt->matches = -1;
744 op_alt->matched = -1;
745
746 if (*p == '\0' || *p == ',')
747 {
748 op_alt->anything_ok = 1;
749 continue;
750 }
751
752 for (;;)
753 {
754 char c = *p;
755 if (c == '#')
756 do
757 c = *++p;
758 while (c != ',' && c != '\0');
759 if (c == ',' || c == '\0')
760 {
761 p++;
762 break;
763 }
764
765 switch (c)
766 {
767 case '=': case '+': case '*':
768 case 'E': case 'F': case 'G': case 'H':
769 case 's': case 'i': case 'n':
770 case 'I': case 'J': case 'K': case 'L':
771 case 'M': case 'N': case 'O': case 'P':
772 /* These don't say anything we care about. */
773 break;
774
775 case '%':
776 /* We currently only support one commutative pair of
777 operands. */
778 if (static_data->commutative < 0)
779 static_data->commutative = i;
780 else
781 lra_assert (data->icode < 0); /* Asm */
782
783 /* The last operand should not be marked
784 commutative. */
785 lra_assert (i != nop - 1);
786 break;
787
788 case '?':
789 op_alt->reject += 6;
790 break;
791 case '!':
792 op_alt->reject += 600;
793 break;
794 case '&':
795 op_alt->earlyclobber = 1;
796 static_data->operand[i].early_clobber = 1;
797 break;
798
799 case '0': case '1': case '2': case '3': case '4':
800 case '5': case '6': case '7': case '8': case '9':
801 {
802 char *end;
803 op_alt->matches = strtoul (p, &end, 10);
804 static_data->operand_alternative
805 [j * nop + op_alt->matches].matched = i;
806 p = end;
807 }
808 continue;
809
810 case TARGET_MEM_CONSTRAINT:
811 op_alt->memory_ok = 1;
812 break;
813 case '<':
814 op_alt->decmem_ok = 1;
815 break;
816 case '>':
817 op_alt->incmem_ok = 1;
818 break;
819 case 'V':
820 op_alt->nonoffmem_ok = 1;
821 break;
822 case 'o':
823 op_alt->offmem_ok = 1;
824 break;
825 case 'X':
826 op_alt->anything_ok = 1;
827 break;
828
829 case 'p':
830 static_data->operand[i].is_address = true;
831 op_alt->is_address = 1;
832 op_alt->cl = (reg_class_subunion[(int) op_alt->cl]
833 [(int) base_reg_class (VOIDmode,
834 ADDR_SPACE_GENERIC,
835 ADDRESS, SCRATCH)]);
836 break;
837
838 case 'g':
839 case 'r':
840 op_alt->cl =
841 reg_class_subunion[(int) op_alt->cl][(int) GENERAL_REGS];
842 break;
843
844 default:
845 if (EXTRA_MEMORY_CONSTRAINT (c, p))
846 {
847 op_alt->memory_ok = 1;
848 break;
849 }
850 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
851 {
852 static_data->operand[i].is_address = true;
853 op_alt->is_address = 1;
854 op_alt->cl
855 = (reg_class_subunion
856 [(int) op_alt->cl]
857 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
858 ADDRESS, SCRATCH)]);
859 break;
860 }
861
862 op_alt->cl
863 = (reg_class_subunion
864 [(int) op_alt->cl]
865 [(int)
866 REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
867 break;
868 }
869 p += CONSTRAINT_LEN (c, p);
870 }
871 }
872 }
873}
874
875/* Recursively process X and collect info about registers, which are
876 not the insn operands, in X with TYPE (in/out/inout) and flag that
877 it is early clobbered in the insn (EARLY_CLOBBER) and add the info
878 to LIST. X is a part of insn given by DATA. Return the result
879 list. */
880static struct lra_insn_reg *
881collect_non_operand_hard_regs (rtx *x, lra_insn_recog_data_t data,
882 struct lra_insn_reg *list,
883 enum op_type type, bool early_clobber)
884{
885 int i, j, regno, last;
886 bool subreg_p;
887 enum machine_mode mode;
888 struct lra_insn_reg *curr;
889 rtx op = *x;
890 enum rtx_code code = GET_CODE (op);
891 const char *fmt = GET_RTX_FORMAT (code);
892
893 for (i = 0; i < data->insn_static_data->n_operands; i++)
894 if (x == data->operand_loc[i])
895 /* It is an operand loc. Stop here. */
896 return list;
897 for (i = 0; i < data->insn_static_data->n_dups; i++)
898 if (x == data->dup_loc[i])
899 /* It is a dup loc. Stop here. */
900 return list;
901 mode = GET_MODE (op);
902 subreg_p = false;
903 if (code == SUBREG)
904 {
905 op = SUBREG_REG (op);
906 code = GET_CODE (op);
907 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (op)))
908 {
909 mode = GET_MODE (op);
910 if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
911 subreg_p = true;
912 }
913 }
914 if (REG_P (op))
915 {
916 if ((regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER)
917 return list;
918 for (last = regno + hard_regno_nregs[regno][mode];
919 regno < last;
920 regno++)
921 if (! TEST_HARD_REG_BIT (lra_no_alloc_regs, regno))
922 {
923 for (curr = list; curr != NULL; curr = curr->next)
924 if (curr->regno == regno && curr->subreg_p == subreg_p
925 && curr->biggest_mode == mode)
926 {
927 if (curr->type != type)
928 curr->type = OP_INOUT;
929 if (curr->early_clobber != early_clobber)
930 curr->early_clobber = true;
931 break;
932 }
933 if (curr == NULL)
934 {
935 /* This is a new hard regno or the info can not be
936 integrated into the found structure. */
937#ifdef STACK_REGS
938 early_clobber
939 = (early_clobber
940 /* This clobber is to inform popping floating
941 point stack only. */
942 && ! (FIRST_STACK_REG <= regno
943 && regno <= LAST_STACK_REG));
944#endif
945 list = new_insn_reg (regno, type, mode, subreg_p,
946 early_clobber, list);
947 }
948 }
949 return list;
950 }
951 switch (code)
952 {
953 case SET:
954 list = collect_non_operand_hard_regs (&SET_DEST (op), data,
955 list, OP_OUT, false);
956 list = collect_non_operand_hard_regs (&SET_SRC (op), data,
957 list, OP_IN, false);
958 break;
959 case CLOBBER:
960 /* We treat clobber of non-operand hard registers as early
1a8f8886 961 clobber (the behavior is expected from asm). */
c6a6cdaa 962 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
963 list, OP_OUT, true);
964 break;
965 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
966 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
967 list, OP_INOUT, false);
968 break;
969 case PRE_MODIFY: case POST_MODIFY:
970 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
971 list, OP_INOUT, false);
972 list = collect_non_operand_hard_regs (&XEXP (op, 1), data,
973 list, OP_IN, false);
974 break;
975 default:
976 fmt = GET_RTX_FORMAT (code);
977 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
978 {
979 if (fmt[i] == 'e')
980 list = collect_non_operand_hard_regs (&XEXP (op, i), data,
981 list, OP_IN, false);
982 else if (fmt[i] == 'E')
983 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
984 list = collect_non_operand_hard_regs (&XVECEXP (op, i, j), data,
985 list, OP_IN, false);
986 }
987 }
988 return list;
989}
990
991/* Set up and return info about INSN. Set up the info if it is not set up
992 yet. */
993lra_insn_recog_data_t
994lra_set_insn_recog_data (rtx insn)
995{
996 lra_insn_recog_data_t data;
997 int i, n, icode;
998 rtx **locs;
999 unsigned int uid = INSN_UID (insn);
1000 struct lra_static_insn_data *insn_static_data;
1001
1002 check_and_expand_insn_recog_data (uid);
1003 if (DEBUG_INSN_P (insn))
1004 icode = -1;
1005 else
1006 {
1007 icode = INSN_CODE (insn);
1008 if (icode < 0)
1009 /* It might be a new simple insn which is not recognized yet. */
1010 INSN_CODE (insn) = icode = recog_memoized (insn);
1011 }
1012 data = XNEW (struct lra_insn_recog_data);
1013 lra_insn_recog_data[uid] = data;
1014 data->insn = insn;
1015 data->used_insn_alternative = -1;
1016 data->icode = icode;
1017 data->regs = NULL;
1018 if (DEBUG_INSN_P (insn))
1019 {
1020 data->insn_static_data = &debug_insn_static_data;
1021 data->dup_loc = NULL;
1022 data->arg_hard_regs = NULL;
1023#ifdef HAVE_ATTR_enabled
1024 data->alternative_enabled_p = NULL;
1025#endif
1026 data->operand_loc = XNEWVEC (rtx *, 1);
1027 data->operand_loc[0] = &INSN_VAR_LOCATION_LOC (insn);
1028 return data;
1029 }
1030 if (icode < 0)
1031 {
1032 int nop;
1033 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1034 const char *constraints[MAX_RECOG_OPERANDS];
1035
1036 nop = asm_noperands (PATTERN (insn));
1037 data->operand_loc = data->dup_loc = NULL;
1038 if (nop < 0)
1039 /* Its is a special insn like USE or CLOBBER. */
1040 data->insn_static_data = insn_static_data
1041 = get_static_insn_data (-1, 0, 0, 1);
1042 else
1043 {
1044 /* expand_asm_operands makes sure there aren't too many
1045 operands. */
1046 lra_assert (nop <= MAX_RECOG_OPERANDS);
1047 if (nop != 0)
1048 data->operand_loc = XNEWVEC (rtx *, nop);
1049 /* Now get the operand values and constraints out of the
1050 insn. */
1051 decode_asm_operands (PATTERN (insn), NULL,
1052 data->operand_loc,
1053 constraints, operand_mode, NULL);
1054 n = 1;
1055 if (nop > 0)
1056 {
1057 const char *p = recog_data.constraints[0];
1a8f8886 1058
c6a6cdaa 1059 for (p = constraints[0]; *p; p++)
1060 n += *p == ',';
1061 }
1062 data->insn_static_data = insn_static_data
1063 = get_static_insn_data (-1, nop, 0, n);
1064 for (i = 0; i < nop; i++)
1065 {
1066 insn_static_data->operand[i].mode = operand_mode[i];
1067 insn_static_data->operand[i].constraint = constraints[i];
1068 insn_static_data->operand[i].strict_low = false;
1069 insn_static_data->operand[i].is_operator = false;
1070 insn_static_data->operand[i].is_address = false;
1071 }
1072 }
1073 for (i = 0; i < insn_static_data->n_operands; i++)
1074 insn_static_data->operand[i].type
1075 = (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1076 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1077 : OP_IN);
1078#ifdef HAVE_ATTR_enabled
1079 data->alternative_enabled_p = NULL;
1080#endif
1081 }
1082 else
1083 {
1084 insn_extract (insn);
1085 data->insn_static_data = insn_static_data
1086 = get_static_insn_data (icode, insn_data[icode].n_operands,
1087 insn_data[icode].n_dups,
1088 insn_data[icode].n_alternatives);
1089 n = insn_static_data->n_operands;
1090 if (n == 0)
1091 locs = NULL;
1092 else
1093 {
1094 locs = XNEWVEC (rtx *, n);
1095 memcpy (locs, recog_data.operand_loc, n * sizeof (rtx *));
1096 }
1097 data->operand_loc = locs;
1098 n = insn_static_data->n_dups;
1099 if (n == 0)
1100 locs = NULL;
1101 else
1102 {
1103 locs = XNEWVEC (rtx *, n);
1104 memcpy (locs, recog_data.dup_loc, n * sizeof (rtx *));
1105 }
1106 data->dup_loc = locs;
1107#ifdef HAVE_ATTR_enabled
1108 {
1109 bool *bp;
1110
1111 n = insn_static_data->n_alternatives;
1112 lra_assert (n >= 0);
1113 data->alternative_enabled_p = bp = XNEWVEC (bool, n);
1114 /* Cache the insn because we don't want to call extract_insn
1115 from get_attr_enabled as extract_insn modifies
1116 which_alternative. The attribute enabled should not depend
1117 on insn operands, operand modes, operand types, and operand
1118 constraints. It should depend on the architecture. If it
1119 is not true, we should rewrite this file code to use
1120 extract_insn instead of less expensive insn_extract. */
1121 recog_data.insn = insn;
1122 for (i = 0; i < n; i++)
1123 {
1124 which_alternative = i;
1125 bp[i] = get_attr_enabled (insn);
1126 }
1127 }
1128#endif
1129 }
1130 if (GET_CODE (PATTERN (insn)) == CLOBBER || GET_CODE (PATTERN (insn)) == USE)
1131 insn_static_data->hard_regs = NULL;
1132 else
1133 insn_static_data->hard_regs
1134 = collect_non_operand_hard_regs (&PATTERN (insn), data,
1135 NULL, OP_IN, false);
1136 setup_operand_alternative (data);
1137 data->arg_hard_regs = NULL;
1138 if (CALL_P (insn))
1139 {
1140 rtx link;
1141 int n_hard_regs, regno, arg_hard_regs[FIRST_PSEUDO_REGISTER];
1142
1143 n_hard_regs = 0;
1144 /* Finding implicit hard register usage. We believe it will be
1145 not changed whatever transformations are used. Call insns
1146 are such example. */
1147 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1148 link != NULL_RTX;
1149 link = XEXP (link, 1))
1150 if (GET_CODE (XEXP (link, 0)) == USE
1151 && REG_P (XEXP (XEXP (link, 0), 0)))
1152 {
1153 regno = REGNO (XEXP (XEXP (link, 0), 0));
1154 lra_assert (regno < FIRST_PSEUDO_REGISTER);
1155 /* It is an argument register. */
1156 for (i = (hard_regno_nregs
1157 [regno][GET_MODE (XEXP (XEXP (link, 0), 0))]) - 1;
1158 i >= 0;
1159 i--)
1160 arg_hard_regs[n_hard_regs++] = regno + i;
1161 }
1162 if (n_hard_regs != 0)
1163 {
1164 arg_hard_regs[n_hard_regs++] = -1;
1165 data->arg_hard_regs = XNEWVEC (int, n_hard_regs);
1166 memcpy (data->arg_hard_regs, arg_hard_regs,
1167 sizeof (int) * n_hard_regs);
1168 }
1169 }
1170 /* Some output operand can be recognized only from the context not
1171 from the constraints which are empty in this case. Call insn may
1172 contain a hard register in set destination with empty constraint
1173 and extract_insn treats them as an input. */
1174 for (i = 0; i < insn_static_data->n_operands; i++)
1175 {
1176 int j;
1177 rtx pat, set;
1178 struct lra_operand_data *operand = &insn_static_data->operand[i];
1179
1180 /* ??? Should we treat 'X' the same way. It looks to me that
1181 'X' means anything and empty constraint means we do not
1182 care. */
1183 if (operand->type != OP_IN || *operand->constraint != '\0'
1184 || operand->is_operator)
1185 continue;
1186 pat = PATTERN (insn);
1187 if (GET_CODE (pat) == SET)
1188 {
1189 if (data->operand_loc[i] != &SET_DEST (pat))
1190 continue;
1191 }
1192 else if (GET_CODE (pat) == PARALLEL)
1193 {
1194 for (j = XVECLEN (pat, 0) - 1; j >= 0; j--)
1195 {
1196 set = XVECEXP (PATTERN (insn), 0, j);
1197 if (GET_CODE (set) == SET
1198 && &SET_DEST (set) == data->operand_loc[i])
1199 break;
1200 }
1201 if (j < 0)
1202 continue;
1203 }
1204 else
1205 continue;
1206 operand->type = OP_OUT;
1207 }
1208 return data;
1209}
1210
1211/* Return info about insn give by UID. The info should be already set
1212 up. */
1213static lra_insn_recog_data_t
1214get_insn_recog_data_by_uid (int uid)
1215{
1216 lra_insn_recog_data_t data;
1217
1218 data = lra_insn_recog_data[uid];
1219 lra_assert (data != NULL);
1220 return data;
1221}
1222
1223/* Invalidate all info about insn given by its UID. */
1224static void
1225invalidate_insn_recog_data (int uid)
1226{
1227 lra_insn_recog_data_t data;
1228
1229 data = lra_insn_recog_data[uid];
1230 lra_assert (data != NULL);
1231 free_insn_recog_data (data);
1232 lra_insn_recog_data[uid] = NULL;
1233}
1234
1235/* Update all the insn info about INSN. It is usually called when
1236 something in the insn was changed. Return the updated info. */
1237lra_insn_recog_data_t
1238lra_update_insn_recog_data (rtx insn)
1239{
1240 lra_insn_recog_data_t data;
1241 int n;
1242 unsigned int uid = INSN_UID (insn);
1243 struct lra_static_insn_data *insn_static_data;
1a8f8886 1244
c6a6cdaa 1245 check_and_expand_insn_recog_data (uid);
1246 if ((data = lra_insn_recog_data[uid]) != NULL
1247 && data->icode != INSN_CODE (insn))
1248 {
1249 invalidate_insn_data_regno_info (data, insn, get_insn_freq (insn));
1250 invalidate_insn_recog_data (uid);
1251 data = NULL;
1252 }
1253 if (data == NULL)
1254 return lra_get_insn_recog_data (insn);
1255 insn_static_data = data->insn_static_data;
1256 data->used_insn_alternative = -1;
1257 if (DEBUG_INSN_P (insn))
1258 return data;
1259 if (data->icode < 0)
1260 {
1261 int nop;
1262 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1263 const char *constraints[MAX_RECOG_OPERANDS];
1264
1265 nop = asm_noperands (PATTERN (insn));
1266 if (nop >= 0)
1267 {
1268 lra_assert (nop == data->insn_static_data->n_operands);
1269 /* Now get the operand values and constraints out of the
1270 insn. */
1271 decode_asm_operands (PATTERN (insn), NULL,
1272 data->operand_loc,
1273 constraints, operand_mode, NULL);
1274#ifdef ENABLE_CHECKING
1275 {
1276 int i;
1277
1278 for (i = 0; i < nop; i++)
1279 lra_assert
1280 (insn_static_data->operand[i].mode == operand_mode[i]
1281 && insn_static_data->operand[i].constraint == constraints[i]
1282 && ! insn_static_data->operand[i].is_operator);
1283 }
1284#endif
1285 }
1286#ifdef ENABLE_CHECKING
1287 {
1288 int i;
1289
1290 for (i = 0; i < insn_static_data->n_operands; i++)
1291 lra_assert
1292 (insn_static_data->operand[i].type
1293 == (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1294 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1295 : OP_IN));
1296 }
1297#endif
1298 }
1299 else
1300 {
1301 insn_extract (insn);
1302 n = insn_static_data->n_operands;
1303 if (n != 0)
1304 memcpy (data->operand_loc, recog_data.operand_loc, n * sizeof (rtx *));
1305 n = insn_static_data->n_dups;
1306 if (n != 0)
1307 memcpy (data->dup_loc, recog_data.dup_loc, n * sizeof (rtx *));
1308#ifdef HAVE_ATTR_enabled
1309#ifdef ENABLE_CHECKING
1310 {
1311 int i;
1312 bool *bp;
1a8f8886 1313
c6a6cdaa 1314 n = insn_static_data->n_alternatives;
1315 bp = data->alternative_enabled_p;
1316 lra_assert (n >= 0 && bp != NULL);
1317 /* Cache the insn to prevent extract_insn call from
1318 get_attr_enabled. */
1319 recog_data.insn = insn;
1320 for (i = 0; i < n; i++)
1321 {
1322 which_alternative = i;
1323 lra_assert (bp[i] == get_attr_enabled (insn));
1324 }
1325 }
1326#endif
1327#endif
1328 }
1329 return data;
1330}
1331
1332/* Set up that INSN is using alternative ALT now. */
1333void
1334lra_set_used_insn_alternative (rtx insn, int alt)
1335{
1336 lra_insn_recog_data_t data;
1337
1338 data = lra_get_insn_recog_data (insn);
1339 data->used_insn_alternative = alt;
1340}
1341
1342/* Set up that insn with UID is using alternative ALT now. The insn
1343 info should be already set up. */
1344void
1345lra_set_used_insn_alternative_by_uid (int uid, int alt)
1346{
1347 lra_insn_recog_data_t data;
1348
1349 check_and_expand_insn_recog_data (uid);
1350 data = lra_insn_recog_data[uid];
1351 lra_assert (data != NULL);
1352 data->used_insn_alternative = alt;
1353}
1354
1355\f
1356
1357/* This page contains code dealing with common register info and
1358 pseudo copies. */
1359
1360/* The size of the following array. */
1361static int reg_info_size;
1362/* Common info about each register. */
1363struct lra_reg *lra_reg_info;
1364
1365/* Last register value. */
1366static int last_reg_value;
1367
1368/* Return new register value. */
1369static int
1370get_new_reg_value (void)
1371{
1372 return ++last_reg_value;
1373}
1374
1375/* Pools for copies. */
1376static alloc_pool copy_pool;
1377
1378DEF_VEC_P(lra_copy_t);
1379DEF_VEC_ALLOC_P(lra_copy_t, heap);
1380
1381/* Vec referring to pseudo copies. */
1382static VEC(lra_copy_t,heap) *copy_vec;
1383
1384/* Initialize I-th element of lra_reg_info. */
1385static inline void
1386initialize_lra_reg_info_element (int i)
1387{
1388 bitmap_initialize (&lra_reg_info[i].insn_bitmap, &reg_obstack);
1389#ifdef STACK_REGS
1390 lra_reg_info[i].no_stack_p = false;
1391#endif
1392 CLEAR_HARD_REG_SET (lra_reg_info[i].conflict_hard_regs);
1393 lra_reg_info[i].preferred_hard_regno1 = -1;
1394 lra_reg_info[i].preferred_hard_regno2 = -1;
1395 lra_reg_info[i].preferred_hard_regno_profit1 = 0;
1396 lra_reg_info[i].preferred_hard_regno_profit2 = 0;
1397 lra_reg_info[i].live_ranges = NULL;
1398 lra_reg_info[i].nrefs = lra_reg_info[i].freq = 0;
1399 lra_reg_info[i].last_reload = 0;
1400 lra_reg_info[i].restore_regno = -1;
1401 lra_reg_info[i].val = get_new_reg_value ();
1402 lra_reg_info[i].copies = NULL;
1403}
1404
1405/* Initialize common reg info and copies. */
1406static void
1407init_reg_info (void)
1408{
1409 int i;
1410
1411 last_reg_value = 0;
1412 reg_info_size = max_reg_num () * 3 / 2 + 1;
1413 lra_reg_info = XNEWVEC (struct lra_reg, reg_info_size);
1414 for (i = 0; i < reg_info_size; i++)
1415 initialize_lra_reg_info_element (i);
1416 copy_pool
1417 = create_alloc_pool ("lra copies", sizeof (struct lra_copy), 100);
1418 copy_vec = VEC_alloc (lra_copy_t, heap, 100);
1419}
1420
1421
1422/* Finish common reg info and copies. */
1423static void
1424finish_reg_info (void)
1425{
1426 int i;
1427
1428 for (i = 0; i < reg_info_size; i++)
1429 bitmap_clear (&lra_reg_info[i].insn_bitmap);
1430 free (lra_reg_info);
1431 reg_info_size = 0;
1432 free_alloc_pool (copy_pool);
1433 VEC_free (lra_copy_t, heap, copy_vec);
1434}
1435
1436/* Expand common reg info if it is necessary. */
1437static void
1438expand_reg_info (void)
1439{
1440 int i, old = reg_info_size;
1441
1442 if (reg_info_size > max_reg_num ())
1443 return;
1444 reg_info_size = max_reg_num () * 3 / 2 + 1;
1445 lra_reg_info = XRESIZEVEC (struct lra_reg, lra_reg_info, reg_info_size);
1446 for (i = old; i < reg_info_size; i++)
1447 initialize_lra_reg_info_element (i);
1448}
1449
1450/* Free all copies. */
1451void
1452lra_free_copies (void)
1453{
1454 lra_copy_t cp;
1455
1456 while (VEC_length (lra_copy_t, copy_vec) != 0)
1457 {
1458 cp = VEC_pop (lra_copy_t, copy_vec);
1459 lra_reg_info[cp->regno1].copies = lra_reg_info[cp->regno2].copies = NULL;
1460 pool_free (copy_pool, cp);
1461 }
1462}
1463
1464/* Create copy of two pseudos REGNO1 and REGNO2. The copy execution
1465 frequency is FREQ. */
1466void
1467lra_create_copy (int regno1, int regno2, int freq)
1468{
1469 bool regno1_dest_p;
1470 lra_copy_t cp;
1471
1472 lra_assert (regno1 != regno2);
1473 regno1_dest_p = true;
1474 if (regno1 > regno2)
1475 {
1476 int temp = regno2;
1477
1478 regno1_dest_p = false;
1479 regno2 = regno1;
1480 regno1 = temp;
1481 }
1482 cp = (lra_copy_t) pool_alloc (copy_pool);
1483 VEC_safe_push (lra_copy_t, heap, copy_vec, cp);
1484 cp->regno1_dest_p = regno1_dest_p;
1485 cp->freq = freq;
1486 cp->regno1 = regno1;
1487 cp->regno2 = regno2;
1488 cp->regno1_next = lra_reg_info[regno1].copies;
1489 lra_reg_info[regno1].copies = cp;
1490 cp->regno2_next = lra_reg_info[regno2].copies;
1491 lra_reg_info[regno2].copies = cp;
1492 if (lra_dump_file != NULL)
1493 fprintf (lra_dump_file, " Creating copy r%d%sr%d@%d\n",
1494 regno1, regno1_dest_p ? "<-" : "->", regno2, freq);
1495}
1496
1497/* Return N-th (0, 1, ...) copy. If there is no copy, return
1498 NULL. */
1499lra_copy_t
1500lra_get_copy (int n)
1501{
1502 if (n >= (int) VEC_length (lra_copy_t, copy_vec))
1503 return NULL;
1504 return VEC_index (lra_copy_t, copy_vec, n);
1505}
1506
1507\f
1508
1509/* This page contains code dealing with info about registers in
1510 insns. */
1511
1512/* Process X of insn UID recursively and add info (operand type is
1513 given by TYPE, flag of that it is early clobber is EARLY_CLOBBER)
1514 about registers in X to the insn DATA. */
1515static void
1516add_regs_to_insn_regno_info (lra_insn_recog_data_t data, rtx x, int uid,
1517 enum op_type type, bool early_clobber)
1518{
1519 int i, j, regno;
1520 bool subreg_p;
1521 enum machine_mode mode;
1522 const char *fmt;
1523 enum rtx_code code;
1524 struct lra_insn_reg *curr;
1525
1526 code = GET_CODE (x);
1527 mode = GET_MODE (x);
1528 subreg_p = false;
1529 if (GET_CODE (x) == SUBREG)
1530 {
1531 x = SUBREG_REG (x);
1532 code = GET_CODE (x);
1533 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1534 {
1535 mode = GET_MODE (x);
1536 if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
1537 subreg_p = true;
1538 }
1539 }
1540 if (REG_P (x))
1541 {
1542 regno = REGNO (x);
1543 expand_reg_info ();
1544 if (bitmap_set_bit (&lra_reg_info[regno].insn_bitmap, uid))
1545 {
1546 data->regs = new_insn_reg (regno, type, mode, subreg_p,
1547 early_clobber, data->regs);
1548 return;
1549 }
1550 else
1551 {
1552 for (curr = data->regs; curr != NULL; curr = curr->next)
1553 if (curr->regno == regno)
1554 {
1555 if (curr->subreg_p != subreg_p || curr->biggest_mode != mode)
1556 /* The info can not be integrated into the found
1557 structure. */
1558 data->regs = new_insn_reg (regno, type, mode, subreg_p,
1559 early_clobber, data->regs);
1560 else
1561 {
1562 if (curr->type != type)
1563 curr->type = OP_INOUT;
1564 if (curr->early_clobber != early_clobber)
1565 curr->early_clobber = true;
1566 }
1567 return;
1568 }
1569 gcc_unreachable ();
1570 }
1571 }
1572
1573 switch (code)
1574 {
1575 case SET:
1576 add_regs_to_insn_regno_info (data, SET_DEST (x), uid, OP_OUT, false);
1577 add_regs_to_insn_regno_info (data, SET_SRC (x), uid, OP_IN, false);
1578 break;
1579 case CLOBBER:
1580 /* We treat clobber of non-operand hard registers as early
1a8f8886 1581 clobber (the behavior is expected from asm). */
c6a6cdaa 1582 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_OUT, true);
1583 break;
1584 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
1585 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false);
1586 break;
1587 case PRE_MODIFY: case POST_MODIFY:
1588 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false);
1589 add_regs_to_insn_regno_info (data, XEXP (x, 1), uid, OP_IN, false);
1590 break;
1591 default:
1592 if ((code != PARALLEL && code != EXPR_LIST) || type != OP_OUT)
1593 /* Some targets place small structures in registers for return
1594 values of functions, and those registers are wrapped in
1595 PARALLEL that we may see as the destination of a SET. Here
1596 is an example:
1597
1598 (call_insn 13 12 14 2 (set (parallel:BLK [
1599 (expr_list:REG_DEP_TRUE (reg:DI 0 ax)
1600 (const_int 0 [0]))
1601 (expr_list:REG_DEP_TRUE (reg:DI 1 dx)
1602 (const_int 8 [0x8]))
1603 ])
1604 (call (mem:QI (symbol_ref:DI (... */
1605 type = OP_IN;
1606 fmt = GET_RTX_FORMAT (code);
1607 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1608 {
1609 if (fmt[i] == 'e')
1610 add_regs_to_insn_regno_info (data, XEXP (x, i), uid, type, false);
1611 else if (fmt[i] == 'E')
1612 {
1613 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1614 add_regs_to_insn_regno_info (data, XVECEXP (x, i, j), uid,
1615 type, false);
1616 }
1617 }
1618 }
1619}
1620
1621/* Return execution frequency of INSN. */
1622static int
1623get_insn_freq (rtx insn)
1624{
1625 basic_block bb;
1626
1627 if ((bb = BLOCK_FOR_INSN (insn)) != NULL)
1628 return REG_FREQ_FROM_BB (bb);
1629 else
1630 {
1631 lra_assert (lra_insn_recog_data[INSN_UID (insn)]
1632 ->insn_static_data->n_operands == 0);
1633 /* We don't care about such insn, e.g. it might be jump with
1634 addr_vec. */
1635 return 1;
1636 }
1637}
1638
1639/* Invalidate all reg info of INSN with DATA and execution frequency
1640 FREQ. Update common info about the invalidated registers. */
1641static void
1642invalidate_insn_data_regno_info (lra_insn_recog_data_t data, rtx insn,
1643 int freq)
1644{
1645 int uid;
1646 bool debug_p;
1647 unsigned int i;
1648 struct lra_insn_reg *ir, *next_ir;
1649
1650 uid = INSN_UID (insn);
1651 debug_p = DEBUG_INSN_P (insn);
1652 for (ir = data->regs; ir != NULL; ir = next_ir)
1653 {
1654 i = ir->regno;
1655 next_ir = ir->next;
1656 free_insn_reg (ir);
1657 bitmap_clear_bit (&lra_reg_info[i].insn_bitmap, uid);
1658 if (i >= FIRST_PSEUDO_REGISTER && ! debug_p)
1659 {
1660 lra_reg_info[i].nrefs--;
1661 lra_reg_info[i].freq -= freq;
1662 lra_assert (lra_reg_info[i].nrefs >= 0 && lra_reg_info[i].freq >= 0);
1663 }
1664 }
1665 data->regs = NULL;
1666}
1667
1668/* Invalidate all reg info of INSN. Update common info about the
1669 invalidated registers. */
1670void
1671lra_invalidate_insn_regno_info (rtx insn)
1672{
1673 invalidate_insn_data_regno_info (lra_get_insn_recog_data (insn), insn,
1674 get_insn_freq (insn));
1675}
1676
1677/* Update common reg info from reg info of insn given by its DATA and
1678 execution frequency FREQ. */
1679static void
1680setup_insn_reg_info (lra_insn_recog_data_t data, int freq)
1681{
1682 unsigned int i;
1683 struct lra_insn_reg *ir;
1684
1685 for (ir = data->regs; ir != NULL; ir = ir->next)
1686 if ((i = ir->regno) >= FIRST_PSEUDO_REGISTER)
1687 {
1688 lra_reg_info[i].nrefs++;
1689 lra_reg_info[i].freq += freq;
1690 }
1691}
1692
1693/* Set up insn reg info of INSN. Update common reg info from reg info
1694 of INSN. */
1695void
1696lra_update_insn_regno_info (rtx insn)
1697{
1698 int i, uid, freq;
1699 lra_insn_recog_data_t data;
1700 struct lra_static_insn_data *static_data;
1701 enum rtx_code code;
1702
1703 if (! INSN_P (insn))
1704 return;
1705 data = lra_get_insn_recog_data (insn);
1706 static_data = data->insn_static_data;
1707 freq = get_insn_freq (insn);
1708 invalidate_insn_data_regno_info (data, insn, freq);
1709 uid = INSN_UID (insn);
1710 for (i = static_data->n_operands - 1; i >= 0; i--)
1711 add_regs_to_insn_regno_info (data, *data->operand_loc[i], uid,
1712 static_data->operand[i].type,
1713 static_data->operand[i].early_clobber);
1714 if ((code = GET_CODE (PATTERN (insn))) == CLOBBER || code == USE)
1715 add_regs_to_insn_regno_info (data, XEXP (PATTERN (insn), 0), uid,
1716 code == USE ? OP_IN : OP_OUT, false);
1717 if (NONDEBUG_INSN_P (insn))
1718 setup_insn_reg_info (data, freq);
1719}
1720
1721/* Return reg info of insn given by it UID. */
1722struct lra_insn_reg *
1723lra_get_insn_regs (int uid)
1724{
1725 lra_insn_recog_data_t data;
1726
1727 data = get_insn_recog_data_by_uid (uid);
1728 return data->regs;
1729}
1730
1731\f
1732
1733/* This page contains code dealing with stack of the insns which
1734 should be processed by the next constraint pass. */
1735
1736/* Bitmap used to put an insn on the stack only in one exemplar. */
1737static sbitmap lra_constraint_insn_stack_bitmap;
1738
1739/* The stack itself. */
1740VEC (rtx, heap) *lra_constraint_insn_stack;
1741
1742/* Put INSN on the stack. If ALWAYS_UPDATE is true, always update the reg
1743 info for INSN, otherwise only update it if INSN is not already on the
1744 stack. */
1745static inline void
1746lra_push_insn_1 (rtx insn, bool always_update)
1747{
1748 unsigned int uid = INSN_UID (insn);
1749 if (always_update)
1750 lra_update_insn_regno_info (insn);
1751 if (uid >= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap))
1752 lra_constraint_insn_stack_bitmap =
1753 sbitmap_resize (lra_constraint_insn_stack_bitmap, 3 * uid / 2, 0);
08b7917c 1754 if (bitmap_bit_p (lra_constraint_insn_stack_bitmap, uid))
c6a6cdaa 1755 return;
08b7917c 1756 bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
c6a6cdaa 1757 if (! always_update)
1758 lra_update_insn_regno_info (insn);
1759 VEC_safe_push (rtx, heap, lra_constraint_insn_stack, insn);
1760}
1761
1762/* Put INSN on the stack. */
1763void
1764lra_push_insn (rtx insn)
1765{
1766 lra_push_insn_1 (insn, false);
1767}
1768
1769/* Put INSN on the stack and update its reg info. */
1770void
1771lra_push_insn_and_update_insn_regno_info (rtx insn)
1772{
1773 lra_push_insn_1 (insn, true);
1774}
1775
1776/* Put insn with UID on the stack. */
1777void
1778lra_push_insn_by_uid (unsigned int uid)
1779{
1780 lra_push_insn (lra_insn_recog_data[uid]->insn);
1781}
1782
1783/* Take the last-inserted insns off the stack and return it. */
1784rtx
1785lra_pop_insn (void)
1786{
1787 rtx insn = VEC_pop (rtx, lra_constraint_insn_stack);
08b7917c 1788 bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
c6a6cdaa 1789 return insn;
1790}
1791
1792/* Return the current size of the insn stack. */
1793unsigned int
1794lra_insn_stack_length (void)
1795{
1796 return VEC_length (rtx, lra_constraint_insn_stack);
1797}
1798
1799/* Push insns FROM to TO (excluding it) going in reverse order. */
1800static void
1801push_insns (rtx from, rtx to)
1802{
1803 rtx insn;
1804
1805 if (from == NULL_RTX)
1806 return;
1807 for (insn = from; insn != to; insn = PREV_INSN (insn))
1808 if (INSN_P (insn))
1809 lra_push_insn (insn);
1810}
1811
1812/* Emit insns BEFORE before INSN and insns AFTER after INSN. Put the
1813 insns onto the stack. Print about emitting the insns with
1814 TITLE. */
1815void
1816lra_process_new_insns (rtx insn, rtx before, rtx after, const char *title)
1817{
1818 rtx last;
1819
1820 if (lra_dump_file != NULL && (before != NULL_RTX || after != NULL_RTX))
1821 {
1822 debug_rtl_slim (lra_dump_file, insn, insn, -1, 0);
1823 if (before != NULL_RTX)
1824 {
1825 fprintf (lra_dump_file," %s before:\n", title);
1826 debug_rtl_slim (lra_dump_file, before, NULL_RTX, -1, 0);
1827 }
1828 if (after != NULL_RTX)
1829 {
1830 fprintf (lra_dump_file, " %s after:\n", title);
1831 debug_rtl_slim (lra_dump_file, after, NULL_RTX, -1, 0);
1832 }
1833 fprintf (lra_dump_file, "\n");
1834 }
1835 if (before != NULL_RTX)
1836 {
1837 emit_insn_before (before, insn);
1838 push_insns (PREV_INSN (insn), PREV_INSN (before));
1839 }
1840 if (after != NULL_RTX)
1841 {
1842 for (last = after; NEXT_INSN (last) != NULL_RTX; last = NEXT_INSN (last))
1843 ;
1844 emit_insn_after (after, insn);
1845 push_insns (last, insn);
1846 }
1847}
1848
1849\f
1850
1851/* This page contains code dealing with scratches (changing them onto
1852 pseudos and restoring them from the pseudos).
1853
1854 We change scratches into pseudos at the beginning of LRA to
1855 simplify dealing with them (conflicts, hard register assignments).
1856
1857 If the pseudo denoting scratch was spilled it means that we do need
1858 a hard register for it. Such pseudos are transformed back to
1859 scratches at the end of LRA. */
1860
1861/* Description of location of a former scratch operand. */
453f1a8c 1862struct sloc
c6a6cdaa 1863{
1864 rtx insn; /* Insn where the scratch was. */
1865 int nop; /* Number of the operand which was a scratch. */
1866};
1867
453f1a8c 1868typedef struct sloc *sloc_t;
c6a6cdaa 1869
453f1a8c 1870DEF_VEC_P(sloc_t);
1871DEF_VEC_ALLOC_P(sloc_t, heap);
c6a6cdaa 1872
1873/* Locations of the former scratches. */
453f1a8c 1874static VEC (sloc_t, heap) *scratches;
c6a6cdaa 1875
1876/* Bitmap of scratch regnos. */
1877static bitmap_head scratch_bitmap;
1878
1879/* Bitmap of scratch operands. */
1880static bitmap_head scratch_operand_bitmap;
1881
1882/* Return true if pseudo REGNO is made of SCRATCH. */
1883bool
1884lra_former_scratch_p (int regno)
1885{
1886 return bitmap_bit_p (&scratch_bitmap, regno);
1887}
1888
1889/* Return true if the operand NOP of INSN is a former scratch. */
1890bool
1891lra_former_scratch_operand_p (rtx insn, int nop)
1892{
1893 return bitmap_bit_p (&scratch_operand_bitmap,
1894 INSN_UID (insn) * MAX_RECOG_OPERANDS + nop) != 0;
1895}
1896
1897/* Change scratches onto pseudos and save their location. */
1898static void
1899remove_scratches (void)
1900{
1901 int i;
1902 bool insn_changed_p;
1903 basic_block bb;
1904 rtx insn, reg;
453f1a8c 1905 sloc_t loc;
c6a6cdaa 1906 lra_insn_recog_data_t id;
1907 struct lra_static_insn_data *static_id;
1908
453f1a8c 1909 scratches = VEC_alloc (sloc_t, heap, get_max_uid ());
c6a6cdaa 1910 bitmap_initialize (&scratch_bitmap, &reg_obstack);
1911 bitmap_initialize (&scratch_operand_bitmap, &reg_obstack);
1912 FOR_EACH_BB (bb)
1913 FOR_BB_INSNS (bb, insn)
1914 if (INSN_P (insn))
1915 {
1916 id = lra_get_insn_recog_data (insn);
1917 static_id = id->insn_static_data;
1918 insn_changed_p = false;
1919 for (i = 0; i < static_id->n_operands; i++)
1920 if (GET_CODE (*id->operand_loc[i]) == SCRATCH
1921 && GET_MODE (*id->operand_loc[i]) != VOIDmode)
1922 {
1923 insn_changed_p = true;
1924 *id->operand_loc[i] = reg
1925 = lra_create_new_reg (static_id->operand[i].mode,
1926 *id->operand_loc[i], ALL_REGS, NULL);
1927 add_reg_note (insn, REG_UNUSED, reg);
1928 lra_update_dup (id, i);
453f1a8c 1929 loc = XNEW (struct sloc);
c6a6cdaa 1930 loc->insn = insn;
1931 loc->nop = i;
453f1a8c 1932 VEC_safe_push (sloc_t, heap, scratches, loc);
c6a6cdaa 1933 bitmap_set_bit (&scratch_bitmap, REGNO (*id->operand_loc[i]));
1934 bitmap_set_bit (&scratch_operand_bitmap,
1935 INSN_UID (insn) * MAX_RECOG_OPERANDS + i);
1936 if (lra_dump_file != NULL)
1937 fprintf (lra_dump_file,
1938 "Removing SCRATCH in insn #%u (nop %d)\n",
1939 INSN_UID (insn), i);
1940 }
1941 if (insn_changed_p)
1942 /* Because we might use DF right after caller-saves sub-pass
1943 we need to keep DF info up to date. */
1944 df_insn_rescan (insn);
1945 }
1946}
1947
1948/* Changes pseudos created by function remove_scratches onto scratches. */
1949static void
1950restore_scratches (void)
1951{
1952 int i, regno;
453f1a8c 1953 sloc_t loc;
c6a6cdaa 1954 rtx last = NULL_RTX;
1955 lra_insn_recog_data_t id = NULL;
1956
453f1a8c 1957 for (i = 0; VEC_iterate (sloc_t, scratches, i, loc); i++)
c6a6cdaa 1958 {
1959 if (last != loc->insn)
1960 {
1961 last = loc->insn;
1962 id = lra_get_insn_recog_data (last);
1963 }
1964 if (REG_P (*id->operand_loc[loc->nop])
1965 && ((regno = REGNO (*id->operand_loc[loc->nop]))
1966 >= FIRST_PSEUDO_REGISTER)
1967 && lra_get_regno_hard_regno (regno) < 0)
1968 {
1969 /* It should be only case when scratch register with chosen
1970 constraint 'X' did not get memory or hard register. */
1971 lra_assert (lra_former_scratch_p (regno));
1972 *id->operand_loc[loc->nop]
1973 = gen_rtx_SCRATCH (GET_MODE (*id->operand_loc[loc->nop]));
1974 lra_update_dup (id, loc->nop);
1975 if (lra_dump_file != NULL)
1976 fprintf (lra_dump_file, "Restoring SCRATCH in insn #%u(nop %d)\n",
1977 INSN_UID (loc->insn), loc->nop);
1978 }
1979 }
453f1a8c 1980 for (i = 0; VEC_iterate (sloc_t, scratches, i, loc); i++)
c6a6cdaa 1981 free (loc);
453f1a8c 1982 VEC_free (sloc_t, heap, scratches);
c6a6cdaa 1983 bitmap_clear (&scratch_bitmap);
1984 bitmap_clear (&scratch_operand_bitmap);
1985}
1986
1987\f
1988
1989#ifdef ENABLE_CHECKING
1990
1991/* Function checks RTL for correctness. If FINAL_P is true, it is
1992 done at the end of LRA and the check is more rigorous. */
1993static void
1994check_rtl (bool final_p)
1995{
1996 int i;
1997 basic_block bb;
1998 rtx insn;
1999 lra_insn_recog_data_t id;
2000
2001 lra_assert (! final_p || reload_completed);
2002 FOR_EACH_BB (bb)
2003 FOR_BB_INSNS (bb, insn)
2004 if (NONDEBUG_INSN_P (insn)
2005 && GET_CODE (PATTERN (insn)) != USE
2006 && GET_CODE (PATTERN (insn)) != CLOBBER
2007 && GET_CODE (PATTERN (insn)) != ADDR_VEC
2008 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
2009 && GET_CODE (PATTERN (insn)) != ASM_INPUT)
2010 {
2011 if (final_p)
2012 {
2013 extract_insn (insn);
2014 lra_assert (constrain_operands (1));
2015 continue;
2016 }
2017 if (insn_invalid_p (insn, false))
2018 fatal_insn_not_found (insn);
2019 if (asm_noperands (PATTERN (insn)) >= 0)
2020 continue;
2021 id = lra_get_insn_recog_data (insn);
2022 /* The code is based on assumption that all addresses in
2023 regular instruction are legitimate before LRA. The code in
2024 lra-constraints.c is based on assumption that there is no
2025 subreg of memory as an insn operand. */
2026 for (i = 0; i < id->insn_static_data->n_operands; i++)
2027 {
2028 rtx op = *id->operand_loc[i];
1a8f8886 2029
c6a6cdaa 2030 if (MEM_P (op)
2031 && (GET_MODE (op) != BLKmode
2032 || GET_CODE (XEXP (op, 0)) != SCRATCH)
2033 && ! memory_address_p (GET_MODE (op), XEXP (op, 0))
2034 /* Some ports don't recognize the following addresses
2035 as legitimate. Although they are legitimate if
2036 they satisfies the constraints and will be checked
2037 by insn constraints which we ignore here. */
2038 && GET_CODE (XEXP (op, 0)) != UNSPEC
2039 && GET_RTX_CLASS (GET_CODE (XEXP (op, 0))) != RTX_AUTOINC)
2040 fatal_insn_not_found (insn);
2041 }
2042 }
2043}
2044#endif /* #ifdef ENABLE_CHECKING */
2045
2046/* Determine if the current function has an exception receiver block
2047 that reaches the exit block via non-exceptional edges */
2048static bool
2049has_nonexceptional_receiver (void)
2050{
2051 edge e;
2052 edge_iterator ei;
2053 basic_block *tos, *worklist, bb;
2054
2055 /* If we're not optimizing, then just err on the safe side. */
2056 if (!optimize)
2057 return true;
1a8f8886 2058
c6a6cdaa 2059 /* First determine which blocks can reach exit via normal paths. */
2060 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
2061
2062 FOR_EACH_BB (bb)
2063 bb->flags &= ~BB_REACHABLE;
2064
2065 /* Place the exit block on our worklist. */
2066 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
2067 *tos++ = EXIT_BLOCK_PTR;
1a8f8886 2068
c6a6cdaa 2069 /* Iterate: find everything reachable from what we've already seen. */
2070 while (tos != worklist)
2071 {
2072 bb = *--tos;
2073
2074 FOR_EACH_EDGE (e, ei, bb->preds)
2075 if (e->flags & EDGE_ABNORMAL)
2076 {
2077 free (worklist);
2078 return true;
2079 }
2080 else
2081 {
2082 basic_block src = e->src;
2083
2084 if (!(src->flags & BB_REACHABLE))
2085 {
2086 src->flags |= BB_REACHABLE;
2087 *tos++ = src;
2088 }
2089 }
2090 }
2091 free (worklist);
2092 /* No exceptional block reached exit unexceptionally. */
2093 return false;
2094}
2095
2096#ifdef AUTO_INC_DEC
2097
2098/* Process recursively X of INSN and add REG_INC notes if necessary. */
2099static void
2100add_auto_inc_notes (rtx insn, rtx x)
2101{
2102 enum rtx_code code = GET_CODE (x);
2103 const char *fmt;
2104 int i, j;
2105
2106 if (code == MEM && auto_inc_p (XEXP (x, 0)))
2107 {
2108 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
2109 return;
2110 }
2111
2112 /* Scan all X sub-expressions. */
2113 fmt = GET_RTX_FORMAT (code);
2114 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2115 {
2116 if (fmt[i] == 'e')
2117 add_auto_inc_notes (insn, XEXP (x, i));
2118 else if (fmt[i] == 'E')
2119 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2120 add_auto_inc_notes (insn, XVECEXP (x, i, j));
2121 }
2122}
2123
2124#endif
2125
2126/* Remove all REG_DEAD and REG_UNUSED notes and regenerate REG_INC.
2127 We change pseudos by hard registers without notification of DF and
2128 that can make the notes obsolete. DF-infrastructure does not deal
2129 with REG_INC notes -- so we should regenerate them here. */
2130static void
2131update_inc_notes (void)
2132{
2133 rtx *pnote;
2134 basic_block bb;
2135 rtx insn;
2136
2137 FOR_EACH_BB (bb)
2138 FOR_BB_INSNS (bb, insn)
2139 if (NONDEBUG_INSN_P (insn))
2140 {
2141 pnote = &REG_NOTES (insn);
2142 while (*pnote != 0)
2143 {
2144 if (REG_NOTE_KIND (*pnote) == REG_INC)
2145 *pnote = XEXP (*pnote, 1);
2146 else
2147 pnote = &XEXP (*pnote, 1);
2148 }
2149#ifdef AUTO_INC_DEC
2150 add_auto_inc_notes (insn, PATTERN (insn));
2151#endif
2152 }
2153}
2154
2155/* Set to 1 while in lra. */
2156int lra_in_progress;
2157
1a8f8886 2158/* Start of reload pseudo regnos before the new spill pass. */
c6a6cdaa 2159int lra_constraint_new_regno_start;
2160
1a8f8886 2161/* Inheritance pseudo regnos before the new spill pass. */
c6a6cdaa 2162bitmap_head lra_inheritance_pseudos;
2163
1a8f8886 2164/* Split regnos before the new spill pass. */
c6a6cdaa 2165bitmap_head lra_split_regs;
2166
2167/* Reload pseudo regnos before the new assign pass which still can be
1a8f8886 2168 spilled after the assinment pass. */
c6a6cdaa 2169bitmap_head lra_optional_reload_pseudos;
2170
2171/* First UID of insns generated before a new spill pass. */
2172int lra_constraint_new_insn_uid_start;
2173
2174/* File used for output of LRA debug information. */
2175FILE *lra_dump_file;
2176
2177/* True if we should try spill into registers of different classes
2178 instead of memory. */
2179bool lra_reg_spill_p;
2180
2181/* Set up value LRA_REG_SPILL_P. */
2182static void
2183setup_reg_spill_flag (void)
2184{
2185 int cl, mode;
2186
2187 if (targetm.spill_class != NULL)
2188 for (cl = 0; cl < (int) LIM_REG_CLASSES; cl++)
2189 for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
2190 if (targetm.spill_class ((enum reg_class) cl,
2191 (enum machine_mode) mode) != NO_REGS)
2192 {
2193 lra_reg_spill_p = true;
2194 return;
2195 }
2196 lra_reg_spill_p = false;
2197}
2198
2199/* True if the current function is too big to use regular algorithms
2200 in LRA. In other words, we should use simpler and faster algorithms
2201 in LRA. It also means we should not worry about generation code
2202 for caller saves. The value is set up in IRA. */
2203bool lra_simple_p;
2204
2205/* Major LRA entry function. F is a file should be used to dump LRA
2206 debug info. */
2207void
2208lra (FILE *f)
2209{
2210 int i;
2211 bool live_p, scratch_p, inserted_p;
2212
2213 lra_dump_file = f;
2214
2215 timevar_push (TV_LRA);
2216
2217 init_insn_recog_data ();
2218
2219#ifdef ENABLE_CHECKING
2220 check_rtl (false);
2221#endif
2222
2223 COPY_HARD_REG_SET (lra_no_alloc_regs, ira_no_alloc_regs);
2224
2225 lra_live_range_iter = lra_coalesce_iter = 0;
2226 lra_constraint_iter = lra_constraint_iter_after_spill = 0;
2227 lra_inheritance_iter = lra_undo_inheritance_iter = 0;
2228
2229 setup_reg_spill_flag ();
2230
2231 /* We can not set up reload_in_progress because it prevents new
2232 pseudo creation. */
2233 lra_in_progress = 1;
2234
2235 init_reg_info ();
2236 expand_reg_info ();
2237
2238 /* Function remove_scratches can creates new pseudos for clobbers --
2239 so set up lra_constraint_new_regno_start before its call to
2240 permit changing reg classes for pseudos created by this
2241 simplification. */
2242 lra_constraint_new_regno_start = max_reg_num ();
2243 remove_scratches ();
2244 scratch_p = lra_constraint_new_regno_start != max_reg_num ();
2245
2246 /* A function that has a non-local label that can reach the exit
2247 block via non-exceptional paths must save all call-saved
2248 registers. */
2249 if (cfun->has_nonlocal_label && has_nonexceptional_receiver ())
2250 crtl->saves_all_registers = 1;
2251
2252 if (crtl->saves_all_registers)
2253 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2254 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
2255 df_set_regs_ever_live (i, true);
2256
2257 /* We don't DF from now and avoid its using because it is to
2258 expensive when a lot of RTL changes are made. */
2259 df_set_flags (DF_NO_INSN_RESCAN);
2260 lra_constraint_insn_stack = VEC_alloc (rtx, heap, get_max_uid ());
2261 lra_constraint_insn_stack_bitmap = sbitmap_alloc (get_max_uid ());
53c5d9d4 2262 bitmap_clear (lra_constraint_insn_stack_bitmap);
c6a6cdaa 2263 lra_live_ranges_init ();
2264 lra_constraints_init ();
2265 lra_curr_reload_num = 0;
2266 push_insns (get_last_insn (), NULL_RTX);
2267 /* It is needed for the 1st coalescing. */
2268 lra_constraint_new_insn_uid_start = get_max_uid ();
2269 bitmap_initialize (&lra_inheritance_pseudos, &reg_obstack);
2270 bitmap_initialize (&lra_split_regs, &reg_obstack);
2271 bitmap_initialize (&lra_optional_reload_pseudos, &reg_obstack);
2272 live_p = false;
2273 for (;;)
2274 {
2275 for (;;)
2276 {
2277 bitmap_clear (&lra_optional_reload_pseudos);
2278 /* We should try to assign hard registers to scratches even
2279 if there were no RTL transformations in
2280 lra_constraints. */
2281 if (! lra_constraints (lra_constraint_iter == 0)
2282 && (lra_constraint_iter > 1
2283 || (! scratch_p && ! caller_save_needed)))
2284 break;
2285 /* Constraint transformations may result in that eliminable
2286 hard regs become uneliminable and pseudos which use them
2287 should be spilled. It is better to do it before pseudo
2288 assignments.
2289
2290 For example, rs6000 can make
2291 RS6000_PIC_OFFSET_TABLE_REGNUM uneliminable if we started
2292 to use a constant pool. */
2293 lra_eliminate (false);
2294 /* Do inheritance only for regular algorithms. */
2295 if (! lra_simple_p)
2296 lra_inheritance ();
2297 /* We need live ranges for lra_assign -- so build them. */
2298 lra_create_live_ranges (true);
2299 live_p = true;
2300 /* If we don't spill non-reload and non-inheritance pseudos,
2301 there is no sense to run memory-memory move coalescing.
2302 If inheritance pseudos were spilled, the memory-memory
2303 moves involving them will be removed by pass undoing
2304 inheritance. */
2305 if (lra_simple_p)
2306 lra_assign ();
2307 else
2308 {
2309 /* Do coalescing only for regular algorithms. */
1a8f8886 2310 if (! lra_assign () && lra_coalesce ())
c6a6cdaa 2311 live_p = false;
2312 if (lra_undo_inheritance ())
2313 live_p = false;
2314 }
2315 }
2316 bitmap_clear (&lra_inheritance_pseudos);
2317 bitmap_clear (&lra_split_regs);
2318 if (! lra_need_for_spills_p ())
2319 break;
2320 if (! live_p)
2321 {
2322 /* We need full live info for spilling pseudos into
2323 registers instead of memory. */
2324 lra_create_live_ranges (lra_reg_spill_p);
2325 live_p = true;
2326 }
2327 lra_spill ();
2328 /* Assignment of stack slots changes elimination offsets for
2329 some eliminations. So update the offsets here. */
2330 lra_eliminate (false);
2331 lra_constraint_new_regno_start = max_reg_num ();
2332 lra_constraint_new_insn_uid_start = get_max_uid ();
2333 lra_constraint_iter_after_spill = 0;
2334 }
2335 restore_scratches ();
2336 lra_eliminate (true);
ae72d5b2 2337 lra_final_code_change ();
c6a6cdaa 2338 lra_in_progress = 0;
2339 lra_clear_live_ranges ();
2340 lra_live_ranges_finish ();
2341 lra_constraints_finish ();
2342 finish_reg_info ();
2343 sbitmap_free (lra_constraint_insn_stack_bitmap);
2344 VEC_free (rtx, heap, lra_constraint_insn_stack);
2345 finish_insn_recog_data ();
2346 regstat_free_n_sets_and_refs ();
2347 regstat_free_ri ();
2348 reload_completed = 1;
2349 update_inc_notes ();
2350
2351 inserted_p = fixup_abnormal_edges ();
2352
2353 /* We've possibly turned single trapping insn into multiple ones. */
2354 if (cfun->can_throw_non_call_exceptions)
2355 {
2356 sbitmap blocks;
2357 blocks = sbitmap_alloc (last_basic_block);
53c5d9d4 2358 bitmap_ones (blocks);
c6a6cdaa 2359 find_many_sub_basic_blocks (blocks);
2360 sbitmap_free (blocks);
2361 }
2362
2363 if (inserted_p)
2364 commit_edge_insertions ();
2365
2366 /* Replacing pseudos with their memory equivalents might have
2367 created shared rtx. Subsequent passes would get confused
2368 by this, so unshare everything here. */
2369 unshare_all_rtl_again (get_insns ());
2370
2371#ifdef ENABLE_CHECKING
2372 check_rtl (true);
2373#endif
2374
2375 timevar_pop (TV_LRA);
2376}
2377
2378/* Called once per compiler to initialize LRA data once. */
2379void
2380lra_init_once (void)
2381{
2382 init_insn_code_data_once ();
2383}
2384
2385/* Initialize LRA whenever register-related information is changed. */
2386void
2387lra_init (void)
2388{
2389 init_op_alt_data ();
2390}
2391
2392/* Called once per compiler to finish LRA data which are initialize
2393 once. */
2394void
2395lra_finish_once (void)
2396{
2397 finish_insn_code_data_once ();
2398}