]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/lra-constraints.c
PR c++/89705 - ICE with reference binding with conversion function.
[thirdparty/gcc.git] / gcc / lra-constraints.c
CommitLineData
c6a6cdaa 1/* Code for RTL transformations to satisfy insn constraints.
fbd26352 2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
c6a6cdaa 3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22/* This file contains code for 3 passes: constraint pass,
23 inheritance/split pass, and pass for undoing failed inheritance and
24 split.
25
26 The major goal of constraint pass is to transform RTL to satisfy
27 insn and address constraints by:
28 o choosing insn alternatives;
29 o generating *reload insns* (or reloads in brief) and *reload
30 pseudos* which will get necessary hard registers later;
31 o substituting pseudos with equivalent values and removing the
32 instructions that initialized those pseudos.
33
34 The constraint pass has biggest and most complicated code in LRA.
35 There are a lot of important details like:
36 o reuse of input reload pseudos to simplify reload pseudo
37 allocations;
38 o some heuristics to choose insn alternative to improve the
39 inheritance;
40 o early clobbers etc.
41
42 The pass is mimicking former reload pass in alternative choosing
43 because the reload pass is oriented to current machine description
44 model. It might be changed if the machine description model is
45 changed.
46
47 There is special code for preventing all LRA and this pass cycling
48 in case of bugs.
49
50 On the first iteration of the pass we process every instruction and
51 choose an alternative for each one. On subsequent iterations we try
52 to avoid reprocessing instructions if we can be sure that the old
53 choice is still valid.
54
55 The inheritance/spilt pass is to transform code to achieve
56 ineheritance and live range splitting. It is done on backward
57 traversal of EBBs.
58
59 The inheritance optimization goal is to reuse values in hard
60 registers. There is analogous optimization in old reload pass. The
61 inheritance is achieved by following transformation:
62
63 reload_p1 <- p reload_p1 <- p
64 ... new_p <- reload_p1
65 ... => ...
66 reload_p2 <- p reload_p2 <- new_p
67
68 where p is spilled and not changed between the insns. Reload_p1 is
69 also called *original pseudo* and new_p is called *inheritance
70 pseudo*.
71
72 The subsequent assignment pass will try to assign the same (or
73 another if it is not possible) hard register to new_p as to
74 reload_p1 or reload_p2.
75
76 If the assignment pass fails to assign a hard register to new_p,
77 this file will undo the inheritance and restore the original code.
78 This is because implementing the above sequence with a spilled
79 new_p would make the code much worse. The inheritance is done in
80 EBB scope. The above is just a simplified example to get an idea
81 of the inheritance as the inheritance is also done for non-reload
82 insns.
83
84 Splitting (transformation) is also done in EBB scope on the same
85 pass as the inheritance:
86
87 r <- ... or ... <- r r <- ... or ... <- r
88 ... s <- r (new insn -- save)
1a8f8886 89 ... =>
c6a6cdaa 90 ... r <- s (new insn -- restore)
91 ... <- r ... <- r
92
93 The *split pseudo* s is assigned to the hard register of the
94 original pseudo or hard register r.
95
96 Splitting is done:
97 o In EBBs with high register pressure for global pseudos (living
98 in at least 2 BBs) and assigned to hard registers when there
99 are more one reloads needing the hard registers;
100 o for pseudos needing save/restore code around calls.
101
102 If the split pseudo still has the same hard register as the
103 original pseudo after the subsequent assignment pass or the
104 original pseudo was split, the opposite transformation is done on
105 the same pass for undoing inheritance. */
106
107#undef REG_OK_STRICT
108
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
9ef16211 112#include "backend.h"
7c29e30e 113#include "target.h"
c6a6cdaa 114#include "rtl.h"
7c29e30e 115#include "tree.h"
116#include "predict.h"
9ef16211 117#include "df.h"
ad7b10a2 118#include "memmodel.h"
c6a6cdaa 119#include "tm_p.h"
7c29e30e 120#include "expmed.h"
121#include "optabs.h"
c6a6cdaa 122#include "regs.h"
7c29e30e 123#include "ira.h"
c6a6cdaa 124#include "recog.h"
125#include "output.h"
126#include "addresses.h"
c6a6cdaa 127#include "expr.h"
94ea8568 128#include "cfgrtl.h"
c6a6cdaa 129#include "rtl-error.h"
4b69081d 130#include "params.h"
9ef16211 131#include "lra.h"
c6a6cdaa 132#include "lra-int.h"
397881d3 133#include "print-rtl.h"
c6a6cdaa 134
135/* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
136 insn. Remember that LRA_CURR_RELOAD_NUM is the number of emitted
137 reload insns. */
138static int bb_reload_num;
139
ea99c7a1 140/* The current insn being processed and corresponding its single set
141 (NULL otherwise), its data (basic block, the insn data, the insn
142 static data, and the mode of each operand). */
7f836b57 143static rtx_insn *curr_insn;
ea99c7a1 144static rtx curr_insn_set;
c6a6cdaa 145static basic_block curr_bb;
146static lra_insn_recog_data_t curr_id;
147static struct lra_static_insn_data *curr_static_id;
3754d046 148static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
1aae95ec 149/* Mode of the register substituted by its equivalence with VOIDmode
150 (e.g. constant) and whose subreg is given operand of the current
151 insn. VOIDmode in all other cases. */
152static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
c6a6cdaa 153
154\f
155
156/* Start numbers for new registers and insns at the current constraints
157 pass start. */
158static int new_regno_start;
159static int new_insn_uid_start;
160
1efe9e9d 161/* If LOC is nonnull, strip any outer subreg from it. */
162static inline rtx *
163strip_subreg (rtx *loc)
164{
165 return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
166}
167
c6a6cdaa 168/* Return hard regno of REGNO or if it is was not assigned to a hard
169 register, use a hard register from its allocno class. */
170static int
171get_try_hard_regno (int regno)
172{
173 int hard_regno;
174 enum reg_class rclass;
175
176 if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
177 hard_regno = lra_get_regno_hard_regno (regno);
178 if (hard_regno >= 0)
179 return hard_regno;
180 rclass = lra_get_allocno_class (regno);
181 if (rclass == NO_REGS)
182 return -1;
183 return ira_class_hard_regs[rclass][0];
184}
185
9731eaaf 186/* Return the hard regno of X after removing its subreg. If X is not
187 a register or a subreg of a register, return -1. If X is a pseudo,
331a9ecc 188 use its assignment. If FINAL_P return the final hard regno which will
189 be after elimination. */
c6a6cdaa 190static int
331a9ecc 191get_hard_regno (rtx x, bool final_p)
c6a6cdaa 192{
193 rtx reg;
331a9ecc 194 int hard_regno;
c6a6cdaa 195
196 reg = x;
9731eaaf 197 if (SUBREG_P (x))
c6a6cdaa 198 reg = SUBREG_REG (x);
199 if (! REG_P (reg))
200 return -1;
9731eaaf 201 if (! HARD_REGISTER_NUM_P (hard_regno = REGNO (reg)))
c6a6cdaa 202 hard_regno = lra_get_regno_hard_regno (hard_regno);
203 if (hard_regno < 0)
204 return -1;
331a9ecc 205 if (final_p)
206 hard_regno = lra_get_elimination_hard_regno (hard_regno);
9731eaaf 207 if (SUBREG_P (x))
331a9ecc 208 hard_regno += subreg_regno_offset (hard_regno, GET_MODE (reg),
209 SUBREG_BYTE (x), GET_MODE (x));
210 return hard_regno;
c6a6cdaa 211}
212
213/* If REGNO is a hard register or has been allocated a hard register,
214 return the class of that register. If REGNO is a reload pseudo
215 created by the current constraints pass, return its allocno class.
216 Return NO_REGS otherwise. */
217static enum reg_class
218get_reg_class (int regno)
219{
220 int hard_regno;
221
331a9ecc 222 if (! HARD_REGISTER_NUM_P (hard_regno = regno))
c6a6cdaa 223 hard_regno = lra_get_regno_hard_regno (regno);
224 if (hard_regno >= 0)
225 {
331a9ecc 226 hard_regno = lra_get_elimination_hard_regno (hard_regno);
c6a6cdaa 227 return REGNO_REG_CLASS (hard_regno);
228 }
229 if (regno >= new_regno_start)
230 return lra_get_allocno_class (regno);
231 return NO_REGS;
232}
233
234/* Return true if REG satisfies (or will satisfy) reg class constraint
235 CL. Use elimination first if REG is a hard register. If REG is a
236 reload pseudo created by this constraints pass, assume that it will
237 be allocated a hard register from its allocno class, but allow that
238 class to be narrowed to CL if it is currently a superset of CL.
239
240 If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
241 REGNO (reg), or NO_REGS if no change in its class was needed. */
242static bool
243in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class)
244{
245 enum reg_class rclass, common_class;
3754d046 246 machine_mode reg_mode;
c6a6cdaa 247 int class_size, hard_regno, nregs, i, j;
248 int regno = REGNO (reg);
1a8f8886 249
c6a6cdaa 250 if (new_class != NULL)
251 *new_class = NO_REGS;
252 if (regno < FIRST_PSEUDO_REGISTER)
253 {
254 rtx final_reg = reg;
255 rtx *final_loc = &final_reg;
1a8f8886 256
c6a6cdaa 257 lra_eliminate_reg_if_possible (final_loc);
258 return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
259 }
260 reg_mode = GET_MODE (reg);
261 rclass = get_reg_class (regno);
262 if (regno < new_regno_start
263 /* Do not allow the constraints for reload instructions to
264 influence the classes of new pseudos. These reloads are
265 typically moves that have many alternatives, and restricting
266 reload pseudos for one alternative may lead to situations
267 where other reload pseudos are no longer allocatable. */
7619e612 268 || (INSN_UID (curr_insn) >= new_insn_uid_start
269 && curr_insn_set != NULL
58f94f4a 270 && ((OBJECT_P (SET_SRC (curr_insn_set))
271 && ! CONSTANT_P (SET_SRC (curr_insn_set)))
7619e612 272 || (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
58f94f4a 273 && OBJECT_P (SUBREG_REG (SET_SRC (curr_insn_set)))
274 && ! CONSTANT_P (SUBREG_REG (SET_SRC (curr_insn_set)))))))
c6a6cdaa 275 /* When we don't know what class will be used finally for reload
276 pseudos, we use ALL_REGS. */
277 return ((regno >= new_regno_start && rclass == ALL_REGS)
278 || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
279 && ! hard_reg_set_subset_p (reg_class_contents[cl],
280 lra_no_alloc_regs)));
281 else
282 {
283 common_class = ira_reg_class_subset[rclass][cl];
284 if (new_class != NULL)
285 *new_class = common_class;
286 if (hard_reg_set_subset_p (reg_class_contents[common_class],
287 lra_no_alloc_regs))
288 return false;
289 /* Check that there are enough allocatable regs. */
290 class_size = ira_class_hard_regs_num[common_class];
291 for (i = 0; i < class_size; i++)
292 {
293 hard_regno = ira_class_hard_regs[common_class][i];
92d2aec3 294 nregs = hard_regno_nregs (hard_regno, reg_mode);
c6a6cdaa 295 if (nregs == 1)
296 return true;
297 for (j = 0; j < nregs; j++)
68132dc4 298 if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
299 || ! TEST_HARD_REG_BIT (reg_class_contents[common_class],
300 hard_regno + j))
c6a6cdaa 301 break;
302 if (j >= nregs)
303 return true;
304 }
305 return false;
306 }
307}
308
309/* Return true if REGNO satisfies a memory constraint. */
310static bool
311in_mem_p (int regno)
312{
313 return get_reg_class (regno) == NO_REGS;
314}
315
67e22af9 316/* Return 1 if ADDR is a valid memory address for mode MODE in address
317 space AS, and check that each pseudo has the proper kind of hard
318 reg. */
319static int
3754d046 320valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
67e22af9 321 rtx addr, addr_space_t as)
322{
323#ifdef GO_IF_LEGITIMATE_ADDRESS
324 lra_assert (ADDR_SPACE_GENERIC_P (as));
325 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
326 return 0;
327
328 win:
329 return 1;
330#else
331 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
332#endif
333}
334
335namespace {
336 /* Temporarily eliminates registers in an address (for the lifetime of
337 the object). */
338 class address_eliminator {
339 public:
340 address_eliminator (struct address_info *ad);
341 ~address_eliminator ();
342
343 private:
344 struct address_info *m_ad;
345 rtx *m_base_loc;
346 rtx m_base_reg;
347 rtx *m_index_loc;
348 rtx m_index_reg;
349 };
350}
351
352address_eliminator::address_eliminator (struct address_info *ad)
353 : m_ad (ad),
354 m_base_loc (strip_subreg (ad->base_term)),
355 m_base_reg (NULL_RTX),
356 m_index_loc (strip_subreg (ad->index_term)),
357 m_index_reg (NULL_RTX)
358{
359 if (m_base_loc != NULL)
360 {
361 m_base_reg = *m_base_loc;
6dc6c0a7 362 /* If we have non-legitimate address which is decomposed not in
363 the way we expected, don't do elimination here. In such case
364 the address will be reloaded and elimination will be done in
365 reload insn finally. */
366 if (REG_P (m_base_reg))
367 lra_eliminate_reg_if_possible (m_base_loc);
67e22af9 368 if (m_ad->base_term2 != NULL)
369 *m_ad->base_term2 = *m_ad->base_term;
370 }
371 if (m_index_loc != NULL)
372 {
373 m_index_reg = *m_index_loc;
6dc6c0a7 374 if (REG_P (m_index_reg))
375 lra_eliminate_reg_if_possible (m_index_loc);
67e22af9 376 }
377}
378
379address_eliminator::~address_eliminator ()
380{
381 if (m_base_loc && *m_base_loc != m_base_reg)
382 {
383 *m_base_loc = m_base_reg;
384 if (m_ad->base_term2 != NULL)
385 *m_ad->base_term2 = *m_ad->base_term;
386 }
387 if (m_index_loc && *m_index_loc != m_index_reg)
388 *m_index_loc = m_index_reg;
389}
390
391/* Return true if the eliminated form of AD is a legitimate target address. */
392static bool
393valid_address_p (struct address_info *ad)
394{
395 address_eliminator eliminator (ad);
396 return valid_address_p (ad->mode, *ad->outer, ad->as);
397}
398
67e22af9 399/* Return true if the eliminated form of memory reference OP satisfies
6b3b345a 400 extra (special) memory constraint CONSTRAINT. */
67e22af9 401static bool
79bc09fb 402satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
67e22af9 403{
404 struct address_info ad;
405
406 decompose_mem_address (&ad, op);
407 address_eliminator eliminator (&ad);
79bc09fb 408 return constraint_satisfied_p (op, constraint);
67e22af9 409}
410
411/* Return true if the eliminated form of address AD satisfies extra
412 address constraint CONSTRAINT. */
413static bool
414satisfies_address_constraint_p (struct address_info *ad,
79bc09fb 415 enum constraint_num constraint)
67e22af9 416{
417 address_eliminator eliminator (ad);
79bc09fb 418 return constraint_satisfied_p (*ad->outer, constraint);
67e22af9 419}
420
421/* Return true if the eliminated form of address OP satisfies extra
422 address constraint CONSTRAINT. */
423static bool
79bc09fb 424satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
67e22af9 425{
426 struct address_info ad;
427
428 decompose_lea_address (&ad, &op);
429 return satisfies_address_constraint_p (&ad, constraint);
430}
67e22af9 431
61cd3e57 432/* Initiate equivalences for LRA. As we keep original equivalences
433 before any elimination, we need to make copies otherwise any change
434 in insns might change the equivalences. */
435void
436lra_init_equiv (void)
437{
438 ira_expand_reg_equiv ();
439 for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
440 {
441 rtx res;
442
443 if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
444 ira_reg_equiv[i].memory = copy_rtx (res);
445 if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
446 ira_reg_equiv[i].invariant = copy_rtx (res);
447 }
448}
449
450static rtx loc_equivalence_callback (rtx, const_rtx, void *);
451
452/* Update equivalence for REGNO. We need to this as the equivalence
453 might contain other pseudos which are changed by their
454 equivalences. */
455static void
456update_equiv (int regno)
457{
458 rtx x;
459
460 if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
461 ira_reg_equiv[regno].memory
462 = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
463 NULL_RTX);
464 if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
465 ira_reg_equiv[regno].invariant
466 = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
467 NULL_RTX);
468}
469
c6a6cdaa 470/* If we have decided to substitute X with another value, return that
471 value, otherwise return X. */
472static rtx
3b3a5e5f 473get_equiv (rtx x)
c6a6cdaa 474{
475 int regno;
476 rtx res;
477
478 if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
479 || ! ira_reg_equiv[regno].defined_p
480 || ! ira_reg_equiv[regno].profitable_p
481 || lra_get_regno_hard_regno (regno) >= 0)
482 return x;
483 if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
f4447329 484 {
485 if (targetm.cannot_substitute_mem_equiv_p (res))
486 return x;
487 return res;
488 }
c6a6cdaa 489 if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
490 return res;
491 if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
492 return res;
493 gcc_unreachable ();
494}
495
3b3a5e5f 496/* If we have decided to substitute X with the equivalent value,
497 return that value after elimination for INSN, otherwise return
498 X. */
499static rtx
7f836b57 500get_equiv_with_elimination (rtx x, rtx_insn *insn)
3b3a5e5f 501{
502 rtx res = get_equiv (x);
503
504 if (x == res || CONSTANT_P (res))
505 return res;
497ba60f 506 return lra_eliminate_regs_1 (insn, res, GET_MODE (res),
99535fab 507 false, false, 0, true);
3b3a5e5f 508}
509
c6a6cdaa 510/* Set up curr_operand_mode. */
511static void
512init_curr_operand_mode (void)
513{
514 int nop = curr_static_id->n_operands;
515 for (int i = 0; i < nop; i++)
516 {
3754d046 517 machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
c6a6cdaa 518 if (mode == VOIDmode)
519 {
520 /* The .md mode for address operands is the mode of the
521 addressed value rather than the mode of the address itself. */
522 if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
523 mode = Pmode;
524 else
525 mode = curr_static_id->operand[i].mode;
526 }
527 curr_operand_mode[i] = mode;
528 }
529}
530
531\f
532
533/* The page contains code to reuse input reloads. */
534
535/* Structure describes input reload of the current insns. */
536struct input_reload
537{
bd13359a 538 /* True for input reload of matched operands. */
539 bool match_p;
c6a6cdaa 540 /* Reloaded value. */
541 rtx input;
542 /* Reload pseudo used. */
543 rtx reg;
544};
545
546/* The number of elements in the following array. */
547static int curr_insn_input_reloads_num;
548/* Array containing info about input reloads. It is used to find the
549 same input reload and reuse the reload pseudo in this case. */
550static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
551
552/* Initiate data concerning reuse of input reloads for the current
553 insn. */
554static void
555init_curr_insn_input_reloads (void)
556{
557 curr_insn_input_reloads_num = 0;
558}
559
c6a6cdaa 560/* Create a new pseudo using MODE, RCLASS, ORIGINAL or reuse already
6cadc8f7 561 created input reload pseudo (only if TYPE is not OP_OUT). Don't
562 reuse pseudo if IN_SUBREG_P is true and the reused pseudo should be
563 wrapped up in SUBREG. The result pseudo is returned through
564 RESULT_REG. Return TRUE if we created a new pseudo, FALSE if we
565 reused the already created input reload pseudo. Use TITLE to
566 describe new registers for debug purposes. */
c6a6cdaa 567static bool
3754d046 568get_reload_reg (enum op_type type, machine_mode mode, rtx original,
6cadc8f7 569 enum reg_class rclass, bool in_subreg_p,
570 const char *title, rtx *result_reg)
c6a6cdaa 571{
572 int i, regno;
573 enum reg_class new_class;
bd13359a 574 bool unique_p = false;
c6a6cdaa 575
576 if (type == OP_OUT)
577 {
578 *result_reg
579 = lra_create_new_reg_with_unique_value (mode, original, rclass, title);
580 return true;
581 }
85276115 582 /* Prevent reuse value of expression with side effects,
583 e.g. volatile memory. */
584 if (! side_effects_p (original))
585 for (i = 0; i < curr_insn_input_reloads_num; i++)
bd13359a 586 {
587 if (! curr_insn_input_reloads[i].match_p
588 && rtx_equal_p (curr_insn_input_reloads[i].input, original)
589 && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
590 {
591 rtx reg = curr_insn_input_reloads[i].reg;
592 regno = REGNO (reg);
593 /* If input is equal to original and both are VOIDmode,
594 GET_MODE (reg) might be still different from mode.
595 Ensure we don't return *result_reg with wrong mode. */
596 if (GET_MODE (reg) != mode)
597 {
598 if (in_subreg_p)
599 continue;
52acb7ae 600 if (maybe_lt (GET_MODE_SIZE (GET_MODE (reg)),
601 GET_MODE_SIZE (mode)))
bd13359a 602 continue;
603 reg = lowpart_subreg (mode, reg, GET_MODE (reg));
604 if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
605 continue;
606 }
607 *result_reg = reg;
608 if (lra_dump_file != NULL)
609 {
610 fprintf (lra_dump_file, " Reuse r%d for reload ", regno);
611 dump_value_slim (lra_dump_file, original, 1);
612 }
613 if (new_class != lra_get_allocno_class (regno))
614 lra_change_class (regno, new_class, ", change to", false);
615 if (lra_dump_file != NULL)
616 fprintf (lra_dump_file, "\n");
617 return false;
618 }
619 /* If we have an input reload with a different mode, make sure it
620 will get a different hard reg. */
621 else if (REG_P (original)
622 && REG_P (curr_insn_input_reloads[i].input)
623 && REGNO (original) == REGNO (curr_insn_input_reloads[i].input)
624 && (GET_MODE (original)
625 != GET_MODE (curr_insn_input_reloads[i].input)))
626 unique_p = true;
627 }
628 *result_reg = (unique_p
629 ? lra_create_new_reg_with_unique_value
630 : lra_create_new_reg) (mode, original, rclass, title);
c6a6cdaa 631 lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
632 curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
bd13359a 633 curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
c6a6cdaa 634 curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
635 return true;
636}
637
638\f
c6a6cdaa 639/* The page contains major code to choose the current insn alternative
640 and generate reloads for it. */
641
642/* Return the offset from REGNO of the least significant register
643 in (reg:MODE REGNO).
644
645 This function is used to tell whether two registers satisfy
646 a matching constraint. (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
647
648 REGNO1 + lra_constraint_offset (REGNO1, MODE1)
649 == REGNO2 + lra_constraint_offset (REGNO2, MODE2) */
650int
3754d046 651lra_constraint_offset (int regno, machine_mode mode)
c6a6cdaa 652{
653 lra_assert (regno < FIRST_PSEUDO_REGISTER);
8974b7a3 654
655 scalar_int_mode int_mode;
656 if (WORDS_BIG_ENDIAN
657 && is_a <scalar_int_mode> (mode, &int_mode)
658 && GET_MODE_SIZE (int_mode) > UNITS_PER_WORD)
92d2aec3 659 return hard_regno_nregs (regno, mode) - 1;
c6a6cdaa 660 return 0;
661}
662
663/* Like rtx_equal_p except that it allows a REG and a SUBREG to match
664 if they are the same hard reg, and has special hacks for
665 auto-increment and auto-decrement. This is specifically intended for
666 process_alt_operands to use in determining whether two operands
667 match. X is the operand whose number is the lower of the two.
668
669 It is supposed that X is the output operand and Y is the input
670 operand. Y_HARD_REGNO is the final hard regno of register Y or
671 register in subreg Y as we know it now. Otherwise, it is a
672 negative value. */
673static bool
674operands_match_p (rtx x, rtx y, int y_hard_regno)
675{
676 int i;
677 RTX_CODE code = GET_CODE (x);
678 const char *fmt;
679
680 if (x == y)
681 return true;
682 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
683 && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
684 {
685 int j;
1a8f8886 686
331a9ecc 687 i = get_hard_regno (x, false);
c6a6cdaa 688 if (i < 0)
689 goto slow;
690
691 if ((j = y_hard_regno) < 0)
692 goto slow;
693
694 i += lra_constraint_offset (i, GET_MODE (x));
695 j += lra_constraint_offset (j, GET_MODE (y));
696
697 return i == j;
698 }
699
700 /* If two operands must match, because they are really a single
701 operand of an assembler insn, then two post-increments are invalid
702 because the assembler insn would increment only once. On the
703 other hand, a post-increment matches ordinary indexing if the
704 post-increment is the output operand. */
705 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
706 return operands_match_p (XEXP (x, 0), y, y_hard_regno);
707
708 /* Two pre-increments are invalid because the assembler insn would
709 increment only once. On the other hand, a pre-increment matches
710 ordinary indexing if the pre-increment is the input operand. */
711 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
712 || GET_CODE (y) == PRE_MODIFY)
713 return operands_match_p (x, XEXP (y, 0), -1);
1a8f8886 714
c6a6cdaa 715 slow:
716
15183fd2 717 if (code == REG && REG_P (y))
718 return REGNO (x) == REGNO (y);
719
c6a6cdaa 720 if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
721 && x == SUBREG_REG (y))
722 return true;
723 if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
724 && SUBREG_REG (x) == y)
725 return true;
726
727 /* Now we have disposed of all the cases in which different rtx
728 codes can match. */
729 if (code != GET_CODE (y))
730 return false;
731
732 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
733 if (GET_MODE (x) != GET_MODE (y))
734 return false;
735
736 switch (code)
737 {
738 CASE_CONST_UNIQUE:
739 return false;
740
741 case LABEL_REF:
c7799456 742 return label_ref_label (x) == label_ref_label (y);
c6a6cdaa 743 case SYMBOL_REF:
744 return XSTR (x, 0) == XSTR (y, 0);
745
746 default:
747 break;
748 }
749
750 /* Compare the elements. If any pair of corresponding elements fail
751 to match, return false for the whole things. */
752
753 fmt = GET_RTX_FORMAT (code);
754 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
755 {
756 int val, j;
757 switch (fmt[i])
758 {
759 case 'w':
760 if (XWINT (x, i) != XWINT (y, i))
761 return false;
762 break;
763
764 case 'i':
765 if (XINT (x, i) != XINT (y, i))
766 return false;
767 break;
768
9edf7ea8 769 case 'p':
770 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
771 return false;
772 break;
773
c6a6cdaa 774 case 'e':
775 val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
776 if (val == 0)
777 return false;
778 break;
779
780 case '0':
781 break;
782
783 case 'E':
784 if (XVECLEN (x, i) != XVECLEN (y, i))
785 return false;
786 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
787 {
788 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
789 if (val == 0)
790 return false;
791 }
792 break;
793
794 /* It is believed that rtx's at this level will never
795 contain anything but integers and other rtx's, except for
796 within LABEL_REFs and SYMBOL_REFs. */
797 default:
798 gcc_unreachable ();
799 }
800 }
801 return true;
802}
803
804/* True if X is a constant that can be forced into the constant pool.
805 MODE is the mode of the operand, or VOIDmode if not known. */
806#define CONST_POOL_OK_P(MODE, X) \
807 ((MODE) != VOIDmode \
808 && CONSTANT_P (X) \
809 && GET_CODE (X) != HIGH \
52acb7ae 810 && GET_MODE_SIZE (MODE).is_constant () \
c6a6cdaa 811 && !targetm.cannot_force_const_mem (MODE, X))
812
813/* True if C is a non-empty register class that has too few registers
814 to be safely used as a reload target class. */
4f428208 815#define SMALL_REGISTER_CLASS_P(C) \
816 (ira_class_hard_regs_num [(C)] == 1 \
817 || (ira_class_hard_regs_num [(C)] >= 1 \
818 && targetm.class_likely_spilled_p (C)))
c6a6cdaa 819
820/* If REG is a reload pseudo, try to make its class satisfying CL. */
821static void
822narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
823{
824 enum reg_class rclass;
825
826 /* Do not make more accurate class from reloads generated. They are
827 mostly moves with a lot of constraints. Making more accurate
828 class may results in very narrow class and impossibility of find
829 registers for several reloads of one insn. */
830 if (INSN_UID (curr_insn) >= new_insn_uid_start)
831 return;
832 if (GET_CODE (reg) == SUBREG)
833 reg = SUBREG_REG (reg);
834 if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
835 return;
836 if (in_class_p (reg, cl, &rclass) && rclass != cl)
7619e612 837 lra_change_class (REGNO (reg), rclass, " Change to", true);
c6a6cdaa 838}
839
f64b137f 840/* Searches X for any reference to a reg with the same value as REGNO,
841 returning the rtx of the reference found if any. Otherwise,
842 returns NULL_RTX. */
843static rtx
844regno_val_use_in (unsigned int regno, rtx x)
845{
846 const char *fmt;
847 int i, j;
848 rtx tem;
849
850 if (REG_P (x) && lra_reg_info[REGNO (x)].val == lra_reg_info[regno].val)
851 return x;
852
853 fmt = GET_RTX_FORMAT (GET_CODE (x));
854 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
855 {
856 if (fmt[i] == 'e')
857 {
858 if ((tem = regno_val_use_in (regno, XEXP (x, i))))
859 return tem;
860 }
861 else if (fmt[i] == 'E')
862 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
863 if ((tem = regno_val_use_in (regno , XVECEXP (x, i, j))))
864 return tem;
865 }
866
867 return NULL_RTX;
868}
869
099c19e2 870/* Return true if all current insn non-output operands except INS (it
871 has a negaitve end marker) do not use pseudos with the same value
872 as REGNO. */
873static bool
874check_conflict_input_operands (int regno, signed char *ins)
875{
876 int in;
877 int n_operands = curr_static_id->n_operands;
878
879 for (int nop = 0; nop < n_operands; nop++)
880 if (! curr_static_id->operand[nop].is_operator
881 && curr_static_id->operand[nop].type != OP_OUT)
882 {
883 for (int i = 0; (in = ins[i]) >= 0; i++)
884 if (in == nop)
885 break;
886 if (in < 0
887 && regno_val_use_in (regno, *curr_id->operand_loc[nop]) != NULL_RTX)
888 return false;
889 }
890 return true;
891}
892
c6a6cdaa 893/* Generate reloads for matching OUT and INS (array of input operand
dd083a02 894 numbers with end marker -1) with reg class GOAL_CLASS, considering
895 output operands OUTS (similar array to INS) needing to be in different
896 registers. Add input and output reloads correspondingly to the lists
897 *BEFORE and *AFTER. OUT might be negative. In this case we generate
898 input reloads for matched input operands INS. EARLY_CLOBBER_P is a flag
899 that the output operand is early clobbered for chosen alternative. */
c6a6cdaa 900static void
dd083a02 901match_reload (signed char out, signed char *ins, signed char *outs,
902 enum reg_class goal_class, rtx_insn **before,
903 rtx_insn **after, bool early_clobber_p)
c6a6cdaa 904{
dd083a02 905 bool out_conflict;
c6a6cdaa 906 int i, in;
9ed997be 907 rtx new_in_reg, new_out_reg, reg;
3754d046 908 machine_mode inmode, outmode;
c6a6cdaa 909 rtx in_rtx = *curr_id->operand_loc[ins[0]];
aa3ce8ba 910 rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
c6a6cdaa 911
c6a6cdaa 912 inmode = curr_operand_mode[ins[0]];
aa3ce8ba 913 outmode = out < 0 ? inmode : curr_operand_mode[out];
c6a6cdaa 914 push_to_sequence (*before);
915 if (inmode != outmode)
916 {
e23bf764 917 /* process_alt_operands has already checked that the mode sizes
918 are ordered. */
974534ab 919 if (partial_subreg_p (outmode, inmode))
c6a6cdaa 920 {
921 reg = new_in_reg
922 = lra_create_new_reg_with_unique_value (inmode, in_rtx,
923 goal_class, "");
9346305f 924 new_out_reg = gen_lowpart_SUBREG (outmode, reg);
ea99c7a1 925 LRA_SUBREG_P (new_out_reg) = 1;
ad6dc746 926 /* If the input reg is dying here, we can use the same hard
edfb1d8f 927 register for REG and IN_RTX. We do it only for original
928 pseudos as reload pseudos can die although original
929 pseudos still live where reload pseudos dies. */
930 if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
099c19e2 931 && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
932 && (!early_clobber_p
933 || check_conflict_input_operands(REGNO (in_rtx), ins)))
a1064490 934 lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
c6a6cdaa 935 }
936 else
937 {
938 reg = new_out_reg
939 = lra_create_new_reg_with_unique_value (outmode, out_rtx,
940 goal_class, "");
9346305f 941 new_in_reg = gen_lowpart_SUBREG (inmode, reg);
c6a6cdaa 942 /* NEW_IN_REG is non-paradoxical subreg. We don't want
943 NEW_OUT_REG living above. We add clobber clause for
ae72d5b2 944 this. This is just a temporary clobber. We can remove
945 it at the end of LRA work. */
9ed997be 946 rtx_insn *clobber = emit_clobber (new_out_reg);
ae72d5b2 947 LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
ea99c7a1 948 LRA_SUBREG_P (new_in_reg) = 1;
ad6dc746 949 if (GET_CODE (in_rtx) == SUBREG)
950 {
951 rtx subreg_reg = SUBREG_REG (in_rtx);
952
953 /* If SUBREG_REG is dying here and sub-registers IN_RTX
954 and NEW_IN_REG are similar, we can use the same hard
955 register for REG and SUBREG_REG. */
edfb1d8f 956 if (REG_P (subreg_reg)
957 && (int) REGNO (subreg_reg) < lra_new_regno_start
958 && GET_MODE (subreg_reg) == outmode
9edf7ea8 959 && known_eq (SUBREG_BYTE (in_rtx), SUBREG_BYTE (new_in_reg))
099c19e2 960 && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg))
961 && (! early_clobber_p
962 || check_conflict_input_operands (REGNO (subreg_reg),
963 ins)))
a1064490 964 lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
ad6dc746 965 }
c6a6cdaa 966 }
967 }
968 else
969 {
970 /* Pseudos have values -- see comments for lra_reg_info.
971 Different pseudos with the same value do not conflict even if
972 they live in the same place. When we create a pseudo we
973 assign value of original pseudo (if any) from which we
974 created the new pseudo. If we create the pseudo from the
0af99ebf 975 input pseudo, the new pseudo will have no conflict with the
976 input pseudo which is wrong when the input pseudo lives after
977 the insn and as the new pseudo value is changed by the insn
978 output. Therefore we create the new pseudo from the output
979 except the case when we have single matched dying input
980 pseudo.
1a8f8886 981
c6a6cdaa 982 We cannot reuse the current output register because we might
983 have a situation like "a <- a op b", where the constraints
984 force the second input operand ("b") to match the output
985 operand ("a"). "b" must then be copied into a new register
72460f4d 986 so that it doesn't clobber the current value of "a".
987
f4d3c071 988 We cannot use the same value if the output pseudo is
72460f4d 989 early clobbered or the input pseudo is mentioned in the
990 output, e.g. as an address part in memory, because
991 output reload will actually extend the pseudo liveness.
992 We don't care about eliminable hard regs here as we are
993 interesting only in pseudos. */
1a8f8886 994
dd083a02 995 /* Matching input's register value is the same as one of the other
996 output operand. Output operands in a parallel insn must be in
997 different registers. */
998 out_conflict = false;
999 if (REG_P (in_rtx))
1000 {
1001 for (i = 0; outs[i] >= 0; i++)
1002 {
1003 rtx other_out_rtx = *curr_id->operand_loc[outs[i]];
1004 if (REG_P (other_out_rtx)
1005 && (regno_val_use_in (REGNO (in_rtx), other_out_rtx)
1006 != NULL_RTX))
1007 {
1008 out_conflict = true;
1009 break;
1010 }
1011 }
1012 }
1013
c6a6cdaa 1014 new_in_reg = new_out_reg
72460f4d 1015 = (! early_clobber_p && ins[1] < 0 && REG_P (in_rtx)
0af99ebf 1016 && (int) REGNO (in_rtx) < lra_new_regno_start
1017 && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
099c19e2 1018 && (! early_clobber_p
1019 || check_conflict_input_operands (REGNO (in_rtx), ins))
f64b137f 1020 && (out < 0
1021 || regno_val_use_in (REGNO (in_rtx), out_rtx) == NULL_RTX)
dd083a02 1022 && !out_conflict
0af99ebf 1023 ? lra_create_new_reg (inmode, in_rtx, goal_class, "")
1024 : lra_create_new_reg_with_unique_value (outmode, out_rtx,
1025 goal_class, ""));
c6a6cdaa 1026 }
aa3ce8ba 1027 /* In operand can be got from transformations before processing insn
1028 constraints. One example of such transformations is subreg
1029 reloading (see function simplify_operand_subreg). The new
1030 pseudos created by the transformations might have inaccurate
c6a6cdaa 1031 class (ALL_REGS) and we should make their classes more
1032 accurate. */
1033 narrow_reload_pseudo_class (in_rtx, goal_class);
c6a6cdaa 1034 lra_emit_move (copy_rtx (new_in_reg), in_rtx);
1035 *before = get_insns ();
1036 end_sequence ();
bd13359a 1037 /* Add the new pseudo to consider values of subsequent input reload
1038 pseudos. */
1039 lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
1040 curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
1041 curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
1042 curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
c6a6cdaa 1043 for (i = 0; (in = ins[i]) >= 0; i++)
1044 {
1045 lra_assert
1046 (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
1047 || GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]));
1048 *curr_id->operand_loc[in] = new_in_reg;
1049 }
1050 lra_update_dups (curr_id, ins);
aa3ce8ba 1051 if (out < 0)
1052 return;
1053 /* See a comment for the input operand above. */
1054 narrow_reload_pseudo_class (out_rtx, goal_class);
c6a6cdaa 1055 if (find_reg_note (curr_insn, REG_UNUSED, out_rtx) == NULL_RTX)
1056 {
1057 start_sequence ();
1058 lra_emit_move (out_rtx, copy_rtx (new_out_reg));
1059 emit_insn (*after);
1060 *after = get_insns ();
1061 end_sequence ();
1062 }
1063 *curr_id->operand_loc[out] = new_out_reg;
1064 lra_update_dup (curr_id, out);
1065}
1066
1067/* Return register class which is union of all reg classes in insn
1068 constraint alternative string starting with P. */
1069static enum reg_class
1070reg_class_from_constraints (const char *p)
1071{
1072 int c, len;
1073 enum reg_class op_class = NO_REGS;
1074
1075 do
1076 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1077 {
1078 case '#':
1079 case ',':
1080 return op_class;
1081
c6a6cdaa 1082 case 'g':
c6a6cdaa 1083 op_class = reg_class_subunion[op_class][GENERAL_REGS];
1084 break;
1a8f8886 1085
c6a6cdaa 1086 default:
79bc09fb 1087 enum constraint_num cn = lookup_constraint (p);
1088 enum reg_class cl = reg_class_for_constraint (cn);
1089 if (cl == NO_REGS)
c6a6cdaa 1090 {
79bc09fb 1091 if (insn_extra_address_constraint (cn))
c6a6cdaa 1092 op_class
1093 = (reg_class_subunion
1094 [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1095 ADDRESS, SCRATCH)]);
c6a6cdaa 1096 break;
1097 }
1a8f8886 1098
79bc09fb 1099 op_class = reg_class_subunion[op_class][cl];
1100 break;
c6a6cdaa 1101 }
1102 while ((p += len), c);
1103 return op_class;
1104}
1105
1106/* If OP is a register, return the class of the register as per
1107 get_reg_class, otherwise return NO_REGS. */
1108static inline enum reg_class
1109get_op_class (rtx op)
1110{
1111 return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1112}
1113
1114/* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1115 otherwise. If modes of MEM_PSEUDO and VAL are different, use
1116 SUBREG for VAL to make them equal. */
7f836b57 1117static rtx_insn *
c6a6cdaa 1118emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1119{
1120 if (GET_MODE (mem_pseudo) != GET_MODE (val))
ea99c7a1 1121 {
34575461 1122 /* Usually size of mem_pseudo is greater than val size but in
1123 rare cases it can be less as it can be defined by target
1124 dependent macro HARD_REGNO_CALLER_SAVE_MODE. */
cc0dc61b 1125 if (! MEM_P (val))
1126 {
05856efc 1127 val = gen_lowpart_SUBREG (GET_MODE (mem_pseudo),
1128 GET_CODE (val) == SUBREG
1129 ? SUBREG_REG (val) : val);
cc0dc61b 1130 LRA_SUBREG_P (val) = 1;
1131 }
1132 else
1133 {
1134 mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1135 LRA_SUBREG_P (mem_pseudo) = 1;
1136 }
ea99c7a1 1137 }
f9a00e9e 1138 return to_p ? gen_move_insn (mem_pseudo, val)
1139 : gen_move_insn (val, mem_pseudo);
c6a6cdaa 1140}
1141
1142/* Process a special case insn (register move), return true if we
ea99c7a1 1143 don't need to process it anymore. INSN should be a single set
c836e75b 1144 insn. Set up that RTL was changed through CHANGE_P and that hook
1145 TARGET_SECONDARY_MEMORY_NEEDED says to use secondary memory through
ea99c7a1 1146 SEC_MEM_P. */
c6a6cdaa 1147static bool
ea99c7a1 1148check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
c6a6cdaa 1149{
1150 int sregno, dregno;
28323099 1151 rtx dest, src, dreg, sreg, new_reg, scratch_reg;
7f836b57 1152 rtx_insn *before;
c6a6cdaa 1153 enum reg_class dclass, sclass, secondary_class;
c6a6cdaa 1154 secondary_reload_info sri;
1155
ea99c7a1 1156 lra_assert (curr_insn_set != NULL_RTX);
1157 dreg = dest = SET_DEST (curr_insn_set);
1158 sreg = src = SET_SRC (curr_insn_set);
c6a6cdaa 1159 if (GET_CODE (dest) == SUBREG)
1160 dreg = SUBREG_REG (dest);
1161 if (GET_CODE (src) == SUBREG)
1162 sreg = SUBREG_REG (src);
cc0dc61b 1163 if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
c6a6cdaa 1164 return false;
1165 sclass = dclass = NO_REGS;
c6a6cdaa 1166 if (REG_P (dreg))
1167 dclass = get_reg_class (REGNO (dreg));
68e1f2b7 1168 gcc_assert (dclass < LIM_REG_CLASSES);
c6a6cdaa 1169 if (dclass == ALL_REGS)
1170 /* ALL_REGS is used for new pseudos created by transformations
1171 like reload of SUBREG_REG (see function
1172 simplify_operand_subreg). We don't know their class yet. We
1173 should figure out the class from processing the insn
1174 constraints not in this fast path function. Even if ALL_REGS
1175 were a right class for the pseudo, secondary_... hooks usually
1176 are not define for ALL_REGS. */
1177 return false;
c6a6cdaa 1178 if (REG_P (sreg))
1179 sclass = get_reg_class (REGNO (sreg));
68e1f2b7 1180 gcc_assert (sclass < LIM_REG_CLASSES);
c6a6cdaa 1181 if (sclass == ALL_REGS)
1182 /* See comments above. */
1183 return false;
cc0dc61b 1184 if (sclass == NO_REGS && dclass == NO_REGS)
1185 return false;
c836e75b 1186 if (targetm.secondary_memory_needed (GET_MODE (src), sclass, dclass)
cc0dc61b 1187 && ((sclass != NO_REGS && dclass != NO_REGS)
1041f930 1188 || (GET_MODE (src)
1189 != targetm.secondary_memory_needed_mode (GET_MODE (src)))))
c6a6cdaa 1190 {
1191 *sec_mem_p = true;
1192 return false;
1193 }
cc0dc61b 1194 if (! REG_P (dreg) || ! REG_P (sreg))
1195 return false;
c6a6cdaa 1196 sri.prev_sri = NULL;
1197 sri.icode = CODE_FOR_nothing;
1198 sri.extra_cost = 0;
1199 secondary_class = NO_REGS;
1200 /* Set up hard register for a reload pseudo for hook
1201 secondary_reload because some targets just ignore unassigned
1202 pseudos in the hook. */
1203 if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1204 {
1205 dregno = REGNO (dreg);
1206 reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1207 }
1208 else
1209 dregno = -1;
1210 if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1211 {
1212 sregno = REGNO (sreg);
1213 reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1214 }
1215 else
1216 sregno = -1;
1217 if (sclass != NO_REGS)
1218 secondary_class
1219 = (enum reg_class) targetm.secondary_reload (false, dest,
1220 (reg_class_t) sclass,
1221 GET_MODE (src), &sri);
1222 if (sclass == NO_REGS
1223 || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1224 && dclass != NO_REGS))
1225 {
c6a6cdaa 1226 enum reg_class old_sclass = secondary_class;
1227 secondary_reload_info old_sri = sri;
c6a6cdaa 1228
1229 sri.prev_sri = NULL;
1230 sri.icode = CODE_FOR_nothing;
1231 sri.extra_cost = 0;
1232 secondary_class
28323099 1233 = (enum reg_class) targetm.secondary_reload (true, src,
c6a6cdaa 1234 (reg_class_t) dclass,
28323099 1235 GET_MODE (src), &sri);
c6a6cdaa 1236 /* Check the target hook consistency. */
1237 lra_assert
1238 ((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1239 || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1240 || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1241 }
1242 if (sregno >= 0)
1243 reg_renumber [sregno] = -1;
1244 if (dregno >= 0)
1245 reg_renumber [dregno] = -1;
1246 if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1247 return false;
1248 *change_p = true;
1249 new_reg = NULL_RTX;
1250 if (secondary_class != NO_REGS)
28323099 1251 new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
c6a6cdaa 1252 secondary_class,
1253 "secondary");
1254 start_sequence ();
c6a6cdaa 1255 if (sri.icode == CODE_FOR_nothing)
28323099 1256 lra_emit_move (new_reg, src);
c6a6cdaa 1257 else
1258 {
1259 enum reg_class scratch_class;
1260
1261 scratch_class = (reg_class_from_constraints
1262 (insn_data[sri.icode].operand[2].constraint));
1263 scratch_reg = (lra_create_new_reg_with_unique_value
1264 (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1265 scratch_class, "scratch"));
1266 emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
28323099 1267 src, scratch_reg));
c6a6cdaa 1268 }
1269 before = get_insns ();
1270 end_sequence ();
7f836b57 1271 lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
c6a6cdaa 1272 if (new_reg != NULL_RTX)
28323099 1273 SET_SRC (curr_insn_set) = new_reg;
c6a6cdaa 1274 else
1275 {
1276 if (lra_dump_file != NULL)
1277 {
1278 fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
6dde9719 1279 dump_insn_slim (lra_dump_file, curr_insn);
c6a6cdaa 1280 }
1281 lra_set_insn_deleted (curr_insn);
1282 return true;
1283 }
1284 return false;
1285}
1286
1287/* The following data describe the result of process_alt_operands.
1288 The data are used in curr_insn_transform to generate reloads. */
1289
1290/* The chosen reg classes which should be used for the corresponding
1291 operands. */
1292static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1293/* True if the operand should be the same as another operand and that
1294 other operand does not need a reload. */
1295static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1296/* True if the operand does not need a reload. */
1297static bool goal_alt_win[MAX_RECOG_OPERANDS];
1298/* True if the operand can be offsetable memory. */
1299static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1300/* The number of an operand to which given operand can be matched to. */
1301static int goal_alt_matches[MAX_RECOG_OPERANDS];
1302/* The number of elements in the following array. */
1303static int goal_alt_dont_inherit_ops_num;
1304/* Numbers of operands whose reload pseudos should not be inherited. */
1305static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1306/* True if the insn commutative operands should be swapped. */
1307static bool goal_alt_swapped;
1308/* The chosen insn alternative. */
1309static int goal_alt_number;
1310
003000a4 1311/* True if the corresponding operand is the result of an equivalence
1312 substitution. */
1313static bool equiv_substition_p[MAX_RECOG_OPERANDS];
1314
c6a6cdaa 1315/* The following five variables are used to choose the best insn
1316 alternative. They reflect final characteristics of the best
1317 alternative. */
1318
1319/* Number of necessary reloads and overall cost reflecting the
1320 previous value and other unpleasantness of the best alternative. */
1321static int best_losers, best_overall;
c6a6cdaa 1322/* Overall number hard registers used for reloads. For example, on
1323 some targets we need 2 general registers to reload DFmode and only
1324 one floating point register. */
1325static int best_reload_nregs;
1326/* Overall number reflecting distances of previous reloading the same
1327 value. The distances are counted from the current BB start. It is
1328 used to improve inheritance chances. */
1329static int best_reload_sum;
1330
1331/* True if the current insn should have no correspondingly input or
1332 output reloads. */
1333static bool no_input_reloads_p, no_output_reloads_p;
1334
1335/* True if we swapped the commutative operands in the current
1336 insn. */
1337static int curr_swapped;
1338
497ba60f 1339/* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
1340 register of class CL. Add any input reloads to list BEFORE. AFTER
1341 is nonnull if *LOC is an automodified value; handle that case by
1342 adding the required output reloads to list AFTER. Return true if
1343 the RTL was changed.
1344
1345 if CHECK_ONLY_P is true, check that the *LOC is a correct address
1346 register. Return false if the address register is correct. */
c6a6cdaa 1347static bool
497ba60f 1348process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
7f836b57 1349 enum reg_class cl)
c6a6cdaa 1350{
1351 int regno;
1352 enum reg_class rclass, new_class;
1efe9e9d 1353 rtx reg;
c6a6cdaa 1354 rtx new_reg;
3754d046 1355 machine_mode mode;
6cadc8f7 1356 bool subreg_p, before_p = false;
c6a6cdaa 1357
6cadc8f7 1358 subreg_p = GET_CODE (*loc) == SUBREG;
1359 if (subreg_p)
4fe01ba9 1360 {
1361 reg = SUBREG_REG (*loc);
1362 mode = GET_MODE (reg);
1363
1364 /* For mode with size bigger than ptr_mode, there unlikely to be "mov"
1365 between two registers with different classes, but there normally will
1366 be "mov" which transfers element of vector register into the general
1367 register, and this normally will be a subreg which should be reloaded
1368 as a whole. This is particularly likely to be triggered when
1369 -fno-split-wide-types specified. */
2e9acae0 1370 if (!REG_P (reg)
1371 || in_class_p (reg, cl, &new_class)
52acb7ae 1372 || known_le (GET_MODE_SIZE (mode), GET_MODE_SIZE (ptr_mode)))
4fe01ba9 1373 loc = &SUBREG_REG (*loc);
1374 }
1375
1efe9e9d 1376 reg = *loc;
c6a6cdaa 1377 mode = GET_MODE (reg);
1378 if (! REG_P (reg))
1379 {
497ba60f 1380 if (check_only_p)
1381 return true;
c6a6cdaa 1382 /* Always reload memory in an address even if the target supports
1383 such addresses. */
1384 new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, "address");
1385 before_p = true;
1386 }
1387 else
1388 {
1389 regno = REGNO (reg);
1390 rclass = get_reg_class (regno);
497ba60f 1391 if (! check_only_p
1392 && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
c6a6cdaa 1393 {
1394 if (lra_dump_file != NULL)
1395 {
1396 fprintf (lra_dump_file,
1397 "Changing pseudo %d in address of insn %u on equiv ",
1398 REGNO (reg), INSN_UID (curr_insn));
6dde9719 1399 dump_value_slim (lra_dump_file, *loc, 1);
c6a6cdaa 1400 fprintf (lra_dump_file, "\n");
1401 }
1402 *loc = copy_rtx (*loc);
1403 }
1404 if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1405 {
497ba60f 1406 if (check_only_p)
1407 return true;
c6a6cdaa 1408 reg = *loc;
1409 if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
6cadc8f7 1410 mode, reg, cl, subreg_p, "address", &new_reg))
c6a6cdaa 1411 before_p = true;
1412 }
1413 else if (new_class != NO_REGS && rclass != new_class)
1414 {
497ba60f 1415 if (check_only_p)
1416 return true;
7619e612 1417 lra_change_class (regno, new_class, " Change to", true);
c6a6cdaa 1418 return false;
1419 }
1420 else
1421 return false;
1422 }
1423 if (before_p)
1424 {
1425 push_to_sequence (*before);
1426 lra_emit_move (new_reg, reg);
1427 *before = get_insns ();
1428 end_sequence ();
1429 }
1430 *loc = new_reg;
1431 if (after != NULL)
1432 {
1433 start_sequence ();
c8f7cecf 1434 lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
c6a6cdaa 1435 emit_insn (*after);
1436 *after = get_insns ();
1437 end_sequence ();
1438 }
1439 return true;
1440}
1441
c5334148 1442/* Insert move insn in simplify_operand_subreg. BEFORE returns
1443 the insn to be inserted before curr insn. AFTER returns the
1444 the insn to be inserted after curr insn. ORIGREG and NEWREG
1445 are the original reg and new reg for reload. */
1446static void
7f836b57 1447insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
1448 rtx newreg)
c5334148 1449{
1450 if (before)
1451 {
1452 push_to_sequence (*before);
1453 lra_emit_move (newreg, origreg);
1454 *before = get_insns ();
1455 end_sequence ();
1456 }
1457 if (after)
1458 {
1459 start_sequence ();
1460 lra_emit_move (origreg, newreg);
1461 emit_insn (*after);
1462 *after = get_insns ();
1463 end_sequence ();
1464 }
1465}
1466
3754d046 1467static int valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
856bd6f2 1468static bool process_address (int, bool, rtx_insn **, rtx_insn **);
1a68e833 1469
c6a6cdaa 1470/* Make reloads for subreg in operand NOP with internal subreg mode
1471 REG_MODE, add new reloads for further processing. Return true if
1aae95ec 1472 any change was done. */
c6a6cdaa 1473static bool
3754d046 1474simplify_operand_subreg (int nop, machine_mode reg_mode)
c6a6cdaa 1475{
1476 int hard_regno;
7f836b57 1477 rtx_insn *before, *after;
1aae95ec 1478 machine_mode mode, innermode;
c6a6cdaa 1479 rtx reg, new_reg;
1480 rtx operand = *curr_id->operand_loc[nop];
c5334148 1481 enum reg_class regclass;
1482 enum op_type type;
c6a6cdaa 1483
7f836b57 1484 before = after = NULL;
c6a6cdaa 1485
1486 if (GET_CODE (operand) != SUBREG)
1487 return false;
1a8f8886 1488
c6a6cdaa 1489 mode = GET_MODE (operand);
1490 reg = SUBREG_REG (operand);
1aae95ec 1491 innermode = GET_MODE (reg);
c5334148 1492 type = curr_static_id->operand[nop].type;
2d2b78a1 1493 if (MEM_P (reg))
1a68e833 1494 {
856bd6f2 1495 const bool addr_was_valid
1496 = valid_address_p (innermode, XEXP (reg, 0), MEM_ADDR_SPACE (reg));
1a68e833 1497 alter_subreg (curr_id->operand_loc[nop], false);
856bd6f2 1498 rtx subst = *curr_id->operand_loc[nop];
1a68e833 1499 lra_assert (MEM_P (subst));
3143c7ef 1500 const bool addr_is_valid = valid_address_p (GET_MODE (subst),
1501 XEXP (subst, 0),
1502 MEM_ADDR_SPACE (subst));
856bd6f2 1503 if (!addr_was_valid
3143c7ef 1504 || addr_is_valid
2d2b78a1 1505 || ((get_constraint_type (lookup_constraint
1506 (curr_static_id->operand[nop].constraint))
1507 != CT_SPECIAL_MEMORY)
1508 /* We still can reload address and if the address is
1509 valid, we can remove subreg without reloading its
1510 inner memory. */
1511 && valid_address_p (GET_MODE (subst),
1512 regno_reg_rtx
1513 [ira_class_hard_regs
1514 [base_reg_class (GET_MODE (subst),
1515 MEM_ADDR_SPACE (subst),
1516 ADDRESS, SCRATCH)][0]],
1517 MEM_ADDR_SPACE (subst))))
1518 {
856bd6f2 1519 /* If we change the address for a paradoxical subreg of memory, the
b0f26d5e 1520 new address might violate the necessary alignment or the access
1521 might be slow; take this into consideration. We need not worry
856bd6f2 1522 about accesses beyond allocated memory for paradoxical memory
2d2b78a1 1523 subregs as we don't substitute such equiv memory (see processing
1524 equivalences in function lra_constraints) and because for spilled
1525 pseudos we allocate stack memory enough for the biggest
8d7a5013 1526 corresponding paradoxical subreg.
1527
1528 However, do not blindly simplify a (subreg (mem ...)) for
1529 WORD_REGISTER_OPERATIONS targets as this may lead to loading junk
1530 data into a register when the inner is narrower than outer or
1531 missing important data from memory when the inner is wider than
1532 outer. This rule only applies to modes that are no wider than
3143c7ef 1533 a word.
1534
1535 If valid memory becomes invalid after subreg elimination
c2041c0a 1536 and address might be different we still have to reload
1537 memory.
3143c7ef 1538 */
c2041c0a 1539 if ((! addr_was_valid
1540 || addr_is_valid
1541 || known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (innermode)))
3143c7ef 1542 && !(maybe_ne (GET_MODE_PRECISION (mode),
1543 GET_MODE_PRECISION (innermode))
1544 && known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD)
1545 && known_le (GET_MODE_SIZE (innermode), UNITS_PER_WORD)
1546 && WORD_REGISTER_OPERATIONS)
8d7a5013 1547 && (!(MEM_ALIGN (subst) < GET_MODE_ALIGNMENT (mode)
dfdced85 1548 && targetm.slow_unaligned_access (mode, MEM_ALIGN (subst)))
8d7a5013 1549 || (MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (innermode)
dfdced85 1550 && targetm.slow_unaligned_access (innermode,
1551 MEM_ALIGN (reg)))))
2d2b78a1 1552 return true;
1553
856bd6f2 1554 *curr_id->operand_loc[nop] = operand;
1555
1556 /* But if the address was not valid, we cannot reload the MEM without
1557 reloading the address first. */
1558 if (!addr_was_valid)
1559 process_address (nop, false, &before, &after);
1560
2d2b78a1 1561 /* INNERMODE is fast, MODE slow. Reload the mem in INNERMODE. */
1562 enum reg_class rclass
1563 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
856bd6f2 1564 if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
3143c7ef 1565 reg, rclass, TRUE, "slow/invalid mem", &new_reg))
2d2b78a1 1566 {
1567 bool insert_before, insert_after;
1568 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1569
1570 insert_before = (type != OP_OUT
974534ab 1571 || partial_subreg_p (mode, innermode));
2d2b78a1 1572 insert_after = type != OP_IN;
1573 insert_move_for_subreg (insert_before ? &before : NULL,
1574 insert_after ? &after : NULL,
1575 reg, new_reg);
1576 }
2d2b78a1 1577 SUBREG_REG (operand) = new_reg;
1578
1579 /* Convert to MODE. */
1580 reg = operand;
856bd6f2 1581 rclass
1582 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
2d2b78a1 1583 if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
3143c7ef 1584 rclass, TRUE, "slow/invalid mem", &new_reg))
2d2b78a1 1585 {
1586 bool insert_before, insert_after;
1587 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1588
1589 insert_before = type != OP_OUT;
1590 insert_after = type != OP_IN;
1591 insert_move_for_subreg (insert_before ? &before : NULL,
1592 insert_after ? &after : NULL,
1593 reg, new_reg);
1594 }
1595 *curr_id->operand_loc[nop] = new_reg;
1596 lra_process_new_insns (curr_insn, before, after,
3143c7ef 1597 "Inserting slow/invalid mem reload");
2d2b78a1 1598 return true;
1599 }
401bd0c8 1600
1a68e833 1601 /* If the address was valid and became invalid, prefer to reload
1602 the memory. Typical case is when the index scale should
1603 correspond the memory. */
2d2b78a1 1604 *curr_id->operand_loc[nop] = operand;
483f7b77 1605 /* Do not return false here as the MEM_P (reg) will be processed
1606 later in this function. */
1a68e833 1607 }
1608 else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
c6a6cdaa 1609 {
1610 alter_subreg (curr_id->operand_loc[nop], false);
1611 return true;
1612 }
1aae95ec 1613 else if (CONSTANT_P (reg))
1614 {
1615 /* Try to simplify subreg of constant. It is usually result of
1616 equivalence substitution. */
1617 if (innermode == VOIDmode
1618 && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
1619 innermode = curr_static_id->operand[nop].mode;
1620 if ((new_reg = simplify_subreg (mode, reg, innermode,
1621 SUBREG_BYTE (operand))) != NULL_RTX)
1622 {
1623 *curr_id->operand_loc[nop] = new_reg;
1624 return true;
1625 }
1626 }
c6a6cdaa 1627 /* Put constant into memory when we have mixed modes. It generates
1628 a better code in most cases as it does not need a secondary
1629 reload memory. It also prevents LRA looping when LRA is using
1630 secondary reload memory again and again. */
1631 if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1632 && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1633 {
1634 SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1635 alter_subreg (curr_id->operand_loc[nop], false);
1636 return true;
1637 }
1638 /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1639 if there may be a problem accessing OPERAND in the outer
1640 mode. */
1641 if ((REG_P (reg)
1642 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1643 && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1644 /* Don't reload paradoxical subregs because we could be looping
1645 having repeatedly final regno out of hard regs range. */
92d2aec3 1646 && (hard_regno_nregs (hard_regno, innermode)
1647 >= hard_regno_nregs (hard_regno, mode))
1aae95ec 1648 && simplify_subreg_regno (hard_regno, innermode,
ea99c7a1 1649 SUBREG_BYTE (operand), mode) < 0
1650 /* Don't reload subreg for matching reload. It is actually
1651 valid subreg in LRA. */
1652 && ! LRA_SUBREG_P (operand))
c6a6cdaa 1653 || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1654 {
9c8190ea 1655 enum reg_class rclass;
1656
6ba9136f 1657 if (REG_P (reg))
1658 /* There is a big probability that we will get the same class
9c8190ea 1659 for the new pseudo and we will get the same insn which
1660 means infinite looping. So spill the new pseudo. */
1661 rclass = NO_REGS;
1662 else
1663 /* The class will be defined later in curr_insn_transform. */
1664 rclass
1665 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
c6a6cdaa 1666
4aa54340 1667 if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
6cadc8f7 1668 rclass, TRUE, "subreg reg", &new_reg))
c6a6cdaa 1669 {
c5334148 1670 bool insert_before, insert_after;
1f3a048a 1671 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
c5334148 1672
1673 insert_before = (type != OP_OUT
9f2c0e68 1674 || read_modify_subreg_p (operand));
c5334148 1675 insert_after = (type != OP_IN);
1676 insert_move_for_subreg (insert_before ? &before : NULL,
1677 insert_after ? &after : NULL,
1678 reg, new_reg);
c6a6cdaa 1679 }
1680 SUBREG_REG (operand) = new_reg;
1681 lra_process_new_insns (curr_insn, before, after,
1682 "Inserting subreg reload");
1683 return true;
1684 }
c5334148 1685 /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1686 IRA allocates hardreg to the inner pseudo reg according to its mode
1687 instead of the outermode, so the size of the hardreg may not be enough
1688 to contain the outermode operand, in that case we may need to insert
1689 reload for the reg. For the following two types of paradoxical subreg,
1690 we need to insert reload:
1691 1. If the op_type is OP_IN, and the hardreg could not be paired with
1692 other hardreg to contain the outermode operand
1693 (checked by in_hard_reg_set_p), we need to insert the reload.
1694 2. If the op_type is OP_OUT or OP_INOUT.
1695
1696 Here is a paradoxical subreg example showing how the reload is generated:
1697
1698 (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1699 (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1700
1701 In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1702 here, if reg107 is assigned to hardreg R15, because R15 is the last
1703 hardreg, compiler cannot find another hardreg to pair with R15 to
1704 contain TImode data. So we insert a TImode reload reg180 for it.
1705 After reload is inserted:
1706
1707 (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1708 (reg:DI 107 [ __comp ])) -1
1709 (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1710 (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1711
1712 Two reload hard registers will be allocated to reg180 to save TImode data
4b6df2e8 1713 in LRA_assign.
1714
1715 For LRA pseudos this should normally be handled by the biggest_mode
1716 mechanism. However, it's possible for new uses of an LRA pseudo
1717 to be introduced after we've allocated it, such as when undoing
1718 inheritance, and the allocated register might not then be appropriate
1719 for the new uses. */
c5334148 1720 else if (REG_P (reg)
1721 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1722 && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
92d2aec3 1723 && (hard_regno_nregs (hard_regno, innermode)
1724 < hard_regno_nregs (hard_regno, mode))
c5334148 1725 && (regclass = lra_get_allocno_class (REGNO (reg)))
1726 && (type != OP_IN
1727 || !in_hard_reg_set_p (reg_class_contents[regclass],
4b6df2e8 1728 mode, hard_regno)
1729 || overlaps_hard_reg_set_p (lra_no_alloc_regs,
1730 mode, hard_regno)))
c5334148 1731 {
1732 /* The class will be defined later in curr_insn_transform. */
1733 enum reg_class rclass
1734 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1735
1736 if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
6cadc8f7 1737 rclass, TRUE, "paradoxical subreg", &new_reg))
c5334148 1738 {
1739 rtx subreg;
1740 bool insert_before, insert_after;
1741
1742 PUT_MODE (new_reg, mode);
48a08d24 1743 subreg = gen_lowpart_SUBREG (innermode, new_reg);
c5334148 1744 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1745
1746 insert_before = (type != OP_OUT);
1747 insert_after = (type != OP_IN);
1748 insert_move_for_subreg (insert_before ? &before : NULL,
1749 insert_after ? &after : NULL,
1750 reg, subreg);
1751 }
1752 SUBREG_REG (operand) = new_reg;
1753 lra_process_new_insns (curr_insn, before, after,
1754 "Inserting paradoxical subreg reload");
1755 return true;
1756 }
c6a6cdaa 1757 return false;
1758}
1759
1760/* Return TRUE if X refers for a hard register from SET. */
1761static bool
1762uses_hard_regs_p (rtx x, HARD_REG_SET set)
1763{
1764 int i, j, x_hard_regno;
3754d046 1765 machine_mode mode;
c6a6cdaa 1766 const char *fmt;
1767 enum rtx_code code;
1768
1769 if (x == NULL_RTX)
1770 return false;
1771 code = GET_CODE (x);
1772 mode = GET_MODE (x);
3a7d0e9f 1773
c6a6cdaa 1774 if (code == SUBREG)
1775 {
3a7d0e9f 1776 /* For all SUBREGs we want to check whether the full multi-register
1777 overlaps the set. For normal SUBREGs this means 'get_hard_regno' of
1778 the inner register, for paradoxical SUBREGs this means the
1779 'get_hard_regno' of the full SUBREG and for complete SUBREGs either is
1780 fine. Use the wider mode for all cases. */
1781 rtx subreg = SUBREG_REG (x);
081c1d32 1782 mode = wider_subreg_mode (x);
3a7d0e9f 1783 if (mode == GET_MODE (subreg))
1784 {
1785 x = subreg;
1786 code = GET_CODE (x);
1787 }
c6a6cdaa 1788 }
1a8f8886 1789
3a7d0e9f 1790 if (REG_P (x) || SUBREG_P (x))
c6a6cdaa 1791 {
331a9ecc 1792 x_hard_regno = get_hard_regno (x, true);
c6a6cdaa 1793 return (x_hard_regno >= 0
1794 && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
1795 }
1796 if (MEM_P (x))
1797 {
1efe9e9d 1798 struct address_info ad;
c6a6cdaa 1799
1efe9e9d 1800 decompose_mem_address (&ad, x);
1801 if (ad.base_term != NULL && uses_hard_regs_p (*ad.base_term, set))
1802 return true;
1803 if (ad.index_term != NULL && uses_hard_regs_p (*ad.index_term, set))
1804 return true;
c6a6cdaa 1805 }
1806 fmt = GET_RTX_FORMAT (code);
1807 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1808 {
1809 if (fmt[i] == 'e')
1810 {
1811 if (uses_hard_regs_p (XEXP (x, i), set))
1812 return true;
1813 }
1814 else if (fmt[i] == 'E')
1815 {
1816 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1817 if (uses_hard_regs_p (XVECEXP (x, i, j), set))
1818 return true;
1819 }
1820 }
1821 return false;
1822}
1823
1824/* Return true if OP is a spilled pseudo. */
1825static inline bool
1826spilled_pseudo_p (rtx op)
1827{
1828 return (REG_P (op)
1829 && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
1830}
1831
1832/* Return true if X is a general constant. */
1833static inline bool
1834general_constant_p (rtx x)
1835{
1836 return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
1837}
1838
ea99c7a1 1839static bool
1840reg_in_class_p (rtx reg, enum reg_class cl)
1841{
1842 if (cl == NO_REGS)
1843 return get_reg_class (REGNO (reg)) == NO_REGS;
1844 return in_class_p (reg, cl, NULL);
1845}
1846
25cd984c 1847/* Return true if SET of RCLASS contains no hard regs which can be
1848 used in MODE. */
1849static bool
1850prohibited_class_reg_set_mode_p (enum reg_class rclass,
1851 HARD_REG_SET &set,
582adad1 1852 machine_mode mode)
25cd984c 1853{
1854 HARD_REG_SET temp;
1855
e6ea917c 1856 lra_assert (hard_reg_set_subset_p (reg_class_contents[rclass], set));
25cd984c 1857 COPY_HARD_REG_SET (temp, set);
1858 AND_COMPL_HARD_REG_SET (temp, lra_no_alloc_regs);
1859 return (hard_reg_set_subset_p
1860 (temp, ira_prohibited_class_mode_regs[rclass][mode]));
1861}
1862
8afaf3bf 1863
1864/* Used to check validity info about small class input operands. It
1865 should be incremented at start of processing an insn
1866 alternative. */
1867static unsigned int curr_small_class_check = 0;
1868
1869/* Update number of used inputs of class OP_CLASS for operand NOP.
1870 Return true if we have more such class operands than the number of
1871 available regs. */
1872static bool
1873update_and_check_small_class_inputs (int nop, enum reg_class op_class)
1874{
1875 static unsigned int small_class_check[LIM_REG_CLASSES];
1876 static int small_class_input_nums[LIM_REG_CLASSES];
1877
1878 if (SMALL_REGISTER_CLASS_P (op_class)
1879 /* We are interesting in classes became small because of fixing
1880 some hard regs, e.g. by an user through GCC options. */
1881 && hard_reg_set_intersect_p (reg_class_contents[op_class],
1882 ira_no_alloc_regs)
1883 && (curr_static_id->operand[nop].type != OP_OUT
1884 || curr_static_id->operand[nop].early_clobber))
1885 {
1886 if (small_class_check[op_class] == curr_small_class_check)
1887 small_class_input_nums[op_class]++;
1888 else
1889 {
1890 small_class_check[op_class] = curr_small_class_check;
1891 small_class_input_nums[op_class] = 1;
1892 }
1893 if (small_class_input_nums[op_class] > ira_class_hard_regs_num[op_class])
1894 return true;
1895 }
1896 return false;
1897}
1898
c6a6cdaa 1899/* Major function to choose the current insn alternative and what
1900 operands should be reloaded and how. If ONLY_ALTERNATIVE is not
1901 negative we should consider only this alternative. Return false if
f4d3c071 1902 we cannot choose the alternative or find how to reload the
c6a6cdaa 1903 operands. */
1904static bool
1905process_alt_operands (int only_alternative)
1906{
1907 bool ok_p = false;
273c330a 1908 int nop, overall, nalt;
c6a6cdaa 1909 int n_alternatives = curr_static_id->n_alternatives;
1910 int n_operands = curr_static_id->n_operands;
1911 /* LOSERS counts the operands that don't fit this alternative and
1912 would require loading. */
1913 int losers;
eb70a065 1914 int addr_losers;
c6a6cdaa 1915 /* REJECT is a count of how undesirable this alternative says it is
1916 if any reloading is required. If the alternative matches exactly
1917 then REJECT is ignored, but otherwise it gets this much counted
1918 against it in addition to the reloading needed. */
1919 int reject;
eb70a065 1920 /* This is defined by '!' or '?' alternative constraint and added to
1921 reject. But in some cases it can be ignored. */
1922 int static_reject;
ed6272f7 1923 int op_reject;
c6a6cdaa 1924 /* The number of elements in the following array. */
1925 int early_clobbered_regs_num;
1926 /* Numbers of operands which are early clobber registers. */
1927 int early_clobbered_nops[MAX_RECOG_OPERANDS];
1928 enum reg_class curr_alt[MAX_RECOG_OPERANDS];
1929 HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
1930 bool curr_alt_match_win[MAX_RECOG_OPERANDS];
1931 bool curr_alt_win[MAX_RECOG_OPERANDS];
1932 bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
1933 int curr_alt_matches[MAX_RECOG_OPERANDS];
1934 /* The number of elements in the following array. */
1935 int curr_alt_dont_inherit_ops_num;
1936 /* Numbers of operands whose reload pseudos should not be inherited. */
1937 int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1938 rtx op;
1939 /* The register when the operand is a subreg of register, otherwise the
1940 operand itself. */
1941 rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
1942 /* The register if the operand is a register or subreg of register,
1943 otherwise NULL. */
1944 rtx operand_reg[MAX_RECOG_OPERANDS];
1945 int hard_regno[MAX_RECOG_OPERANDS];
3754d046 1946 machine_mode biggest_mode[MAX_RECOG_OPERANDS];
c6a6cdaa 1947 int reload_nregs, reload_sum;
1948 bool costly_p;
1949 enum reg_class cl;
1950
1951 /* Calculate some data common for all alternatives to speed up the
1952 function. */
1953 for (nop = 0; nop < n_operands; nop++)
1954 {
0244be31 1955 rtx reg;
1956
c6a6cdaa 1957 op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
1958 /* The real hard regno of the operand after the allocation. */
331a9ecc 1959 hard_regno[nop] = get_hard_regno (op, true);
1a8f8886 1960
0244be31 1961 operand_reg[nop] = reg = op;
1962 biggest_mode[nop] = GET_MODE (op);
1963 if (GET_CODE (op) == SUBREG)
c6a6cdaa 1964 {
081c1d32 1965 biggest_mode[nop] = wider_subreg_mode (op);
0244be31 1966 operand_reg[nop] = reg = SUBREG_REG (op);
c6a6cdaa 1967 }
0244be31 1968 if (! REG_P (reg))
c6a6cdaa 1969 operand_reg[nop] = NULL_RTX;
0244be31 1970 else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
1971 || ((int) REGNO (reg)
1972 == lra_get_elimination_hard_regno (REGNO (reg))))
1973 no_subreg_reg_operand[nop] = reg;
1974 else
1975 operand_reg[nop] = no_subreg_reg_operand[nop]
1976 /* Just use natural mode for elimination result. It should
1977 be enough for extra constraints hooks. */
1978 = regno_reg_rtx[hard_regno[nop]];
c6a6cdaa 1979 }
1980
1981 /* The constraints are made of several alternatives. Each operand's
1982 constraint looks like foo,bar,... with commas separating the
1983 alternatives. The first alternatives for all operands go
1984 together, the second alternatives go together, etc.
1985
1986 First loop over alternatives. */
e1a797ad 1987 alternative_mask preferred = curr_id->preferred_alternatives;
d2b854bc 1988 if (only_alternative >= 0)
e1a797ad 1989 preferred &= ALTERNATIVE_BIT (only_alternative);
d2b854bc 1990
c6a6cdaa 1991 for (nalt = 0; nalt < n_alternatives; nalt++)
1992 {
1993 /* Loop over operands for one constraint alternative. */
e1a797ad 1994 if (!TEST_BIT (preferred, nalt))
c6a6cdaa 1995 continue;
1996
3dfcf76a 1997 bool matching_early_clobber[MAX_RECOG_OPERANDS];
8afaf3bf 1998 curr_small_class_check++;
eb70a065 1999 overall = losers = addr_losers = 0;
2000 static_reject = reject = reload_nregs = reload_sum = 0;
c6a6cdaa 2001 for (nop = 0; nop < n_operands; nop++)
34575461 2002 {
2003 int inc = (curr_static_id
2004 ->operand_alternative[nalt * n_operands + nop].reject);
2005 if (lra_dump_file != NULL && inc != 0)
2006 fprintf (lra_dump_file,
2007 " Staticly defined alt reject+=%d\n", inc);
eb70a065 2008 static_reject += inc;
3dfcf76a 2009 matching_early_clobber[nop] = 0;
34575461 2010 }
eb70a065 2011 reject += static_reject;
c6a6cdaa 2012 early_clobbered_regs_num = 0;
2013
2014 for (nop = 0; nop < n_operands; nop++)
2015 {
2016 const char *p;
2017 char *end;
2018 int len, c, m, i, opalt_num, this_alternative_matches;
2019 bool win, did_match, offmemok, early_clobber_p;
2020 /* false => this operand can be reloaded somehow for this
2021 alternative. */
2022 bool badop;
2023 /* true => this operand can be reloaded if the alternative
2024 allows regs. */
2025 bool winreg;
2026 /* True if a constant forced into memory would be OK for
2027 this operand. */
2028 bool constmemok;
2029 enum reg_class this_alternative, this_costly_alternative;
2030 HARD_REG_SET this_alternative_set, this_costly_alternative_set;
2031 bool this_alternative_match_win, this_alternative_win;
2032 bool this_alternative_offmemok;
2b1732ad 2033 bool scratch_p;
3754d046 2034 machine_mode mode;
79bc09fb 2035 enum constraint_num cn;
c6a6cdaa 2036
2037 opalt_num = nalt * n_operands + nop;
2038 if (curr_static_id->operand_alternative[opalt_num].anything_ok)
2039 {
2040 /* Fast track for no constraints at all. */
2041 curr_alt[nop] = NO_REGS;
2042 CLEAR_HARD_REG_SET (curr_alt_set[nop]);
2043 curr_alt_win[nop] = true;
2044 curr_alt_match_win[nop] = false;
2045 curr_alt_offmemok[nop] = false;
2046 curr_alt_matches[nop] = -1;
2047 continue;
2048 }
1a8f8886 2049
c6a6cdaa 2050 op = no_subreg_reg_operand[nop];
2051 mode = curr_operand_mode[nop];
2052
2053 win = did_match = winreg = offmemok = constmemok = false;
2054 badop = true;
1a8f8886 2055
c6a6cdaa 2056 early_clobber_p = false;
2057 p = curr_static_id->operand_alternative[opalt_num].constraint;
1a8f8886 2058
c6a6cdaa 2059 this_costly_alternative = this_alternative = NO_REGS;
2060 /* We update set of possible hard regs besides its class
2061 because reg class might be inaccurate. For example,
2062 union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
2063 is translated in HI_REGS because classes are merged by
2064 pairs and there is no accurate intermediate class. */
2065 CLEAR_HARD_REG_SET (this_alternative_set);
2066 CLEAR_HARD_REG_SET (this_costly_alternative_set);
2067 this_alternative_win = false;
2068 this_alternative_match_win = false;
2069 this_alternative_offmemok = false;
2070 this_alternative_matches = -1;
1a8f8886 2071
c6a6cdaa 2072 /* An empty constraint should be excluded by the fast
2073 track. */
2074 lra_assert (*p != 0 && *p != ',');
1a8f8886 2075
ed6272f7 2076 op_reject = 0;
c6a6cdaa 2077 /* Scan this alternative's specs for this operand; set WIN
2078 if the operand fits any letter in this alternative.
2079 Otherwise, clear BADOP if this operand could fit some
2080 letter after reloads, or set WINREG if this operand could
2081 fit after reloads provided the constraint allows some
2082 registers. */
2083 costly_p = false;
2084 do
2085 {
2086 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
2087 {
2088 case '\0':
2089 len = 0;
2090 break;
2091 case ',':
2092 c = '\0';
2093 break;
1a8f8886 2094
c6a6cdaa 2095 case '&':
2096 early_clobber_p = true;
2097 break;
1a8f8886 2098
ed6272f7 2099 case '$':
2100 op_reject += LRA_MAX_REJECT;
2101 break;
2102 case '^':
2103 op_reject += LRA_LOSER_COST_FACTOR;
2104 break;
2105
c6a6cdaa 2106 case '#':
2107 /* Ignore rest of this alternative. */
2108 c = '\0';
2109 break;
1a8f8886 2110
c6a6cdaa 2111 case '0': case '1': case '2': case '3': case '4':
2112 case '5': case '6': case '7': case '8': case '9':
2113 {
2114 int m_hregno;
2115 bool match_p;
1a8f8886 2116
c6a6cdaa 2117 m = strtoul (p, &end, 10);
2118 p = end;
2119 len = 0;
2120 lra_assert (nop > m);
1a8f8886 2121
e23bf764 2122 /* Reject matches if we don't know which operand is
2123 bigger. This situation would arguably be a bug in
2124 an .md pattern, but could also occur in a user asm. */
2125 if (!ordered_p (GET_MODE_SIZE (biggest_mode[m]),
2126 GET_MODE_SIZE (biggest_mode[nop])))
2127 break;
2128
529fdbd1 2129 /* Don't match wrong asm insn operands for proper
2130 diagnostic later. */
2131 if (INSN_CODE (curr_insn) < 0
2132 && (curr_operand_mode[m] == BLKmode
2133 || curr_operand_mode[nop] == BLKmode)
2134 && curr_operand_mode[m] != curr_operand_mode[nop])
2135 break;
2136
331a9ecc 2137 m_hregno = get_hard_regno (*curr_id->operand_loc[m], false);
c6a6cdaa 2138 /* We are supposed to match a previous operand.
2139 If we do, we win if that one did. If we do
2140 not, count both of the operands as losers.
2141 (This is too conservative, since most of the
2142 time only a single reload insn will be needed
2143 to make the two operands win. As a result,
2144 this alternative may be rejected when it is
2145 actually desirable.) */
2146 match_p = false;
2147 if (operands_match_p (*curr_id->operand_loc[nop],
2148 *curr_id->operand_loc[m], m_hregno))
2149 {
2150 /* We should reject matching of an early
2151 clobber operand if the matching operand is
2152 not dying in the insn. */
2153 if (! curr_static_id->operand[m].early_clobber
2154 || operand_reg[nop] == NULL_RTX
2155 || (find_regno_note (curr_insn, REG_DEAD,
89c2edcf 2156 REGNO (op))
2157 || REGNO (op) == REGNO (operand_reg[m])))
c6a6cdaa 2158 match_p = true;
2159 }
2160 if (match_p)
2161 {
2162 /* If we are matching a non-offsettable
2163 address where an offsettable address was
2164 expected, then we must reject this
2165 combination, because we can't reload
2166 it. */
2167 if (curr_alt_offmemok[m]
2168 && MEM_P (*curr_id->operand_loc[m])
2169 && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
2170 continue;
c6a6cdaa 2171 }
2172 else
2173 {
b782636f 2174 /* Operands don't match. If the operands are
2175 different user defined explicit hard registers,
2176 then we cannot make them match. */
2177 if ((REG_P (*curr_id->operand_loc[nop])
2178 || SUBREG_P (*curr_id->operand_loc[nop]))
2179 && (REG_P (*curr_id->operand_loc[m])
2180 || SUBREG_P (*curr_id->operand_loc[m])))
2181 {
2182 rtx nop_reg = *curr_id->operand_loc[nop];
2183 if (SUBREG_P (nop_reg))
2184 nop_reg = SUBREG_REG (nop_reg);
2185 rtx m_reg = *curr_id->operand_loc[m];
2186 if (SUBREG_P (m_reg))
2187 m_reg = SUBREG_REG (m_reg);
2188
2189 if (REG_P (nop_reg)
2190 && HARD_REGISTER_P (nop_reg)
2191 && REG_USERVAR_P (nop_reg)
2192 && REG_P (m_reg)
2193 && HARD_REGISTER_P (m_reg)
2194 && REG_USERVAR_P (m_reg))
2195 break;
2196 }
2197
2198 /* Both operands must allow a reload register,
2199 otherwise we cannot make them match. */
c6a6cdaa 2200 if (curr_alt[m] == NO_REGS)
2201 break;
2202 /* Retroactively mark the operand we had to
2203 match as a loser, if it wasn't already and
2204 it wasn't matched to a register constraint
2205 (e.g it might be matched by memory). */
2206 if (curr_alt_win[m]
2207 && (operand_reg[m] == NULL_RTX
2208 || hard_regno[m] < 0))
2209 {
2210 losers++;
2211 reload_nregs
2212 += (ira_reg_class_max_nregs[curr_alt[m]]
2213 [GET_MODE (*curr_id->operand_loc[m])]);
2214 }
1a8f8886 2215
53f1eb5d 2216 /* Prefer matching earlyclobber alternative as
2217 it results in less hard regs required for
2218 the insn than a non-matching earlyclobber
2219 alternative. */
2220 if (curr_static_id->operand[m].early_clobber)
2221 {
2222 if (lra_dump_file != NULL)
2223 fprintf
2224 (lra_dump_file,
2225 " %d Matching earlyclobber alt:"
2226 " reject--\n",
2227 nop);
3dfcf76a 2228 if (!matching_early_clobber[m])
2229 {
2230 reject--;
2231 matching_early_clobber[m] = 1;
2232 }
53f1eb5d 2233 }
2234 /* Otherwise we prefer no matching
2235 alternatives because it gives more freedom
2236 in RA. */
2237 else if (operand_reg[nop] == NULL_RTX
2238 || (find_regno_note (curr_insn, REG_DEAD,
2239 REGNO (operand_reg[nop]))
2240 == NULL_RTX))
34575461 2241 {
2242 if (lra_dump_file != NULL)
2243 fprintf
2244 (lra_dump_file,
2245 " %d Matching alt: reject+=2\n",
2246 nop);
2247 reject += 2;
2248 }
c6a6cdaa 2249 }
2250 /* If we have to reload this operand and some
2251 previous operand also had to match the same
2252 thing as this operand, we don't know how to do
2253 that. */
2254 if (!match_p || !curr_alt_win[m])
2255 {
2256 for (i = 0; i < nop; i++)
2257 if (curr_alt_matches[i] == m)
2258 break;
2259 if (i < nop)
2260 break;
2261 }
2262 else
2263 did_match = true;
1a8f8886 2264
7ceb795f 2265 this_alternative_matches = m;
c6a6cdaa 2266 /* This can be fixed with reloads if the operand
2267 we are supposed to match can be fixed with
2268 reloads. */
2269 badop = false;
2270 this_alternative = curr_alt[m];
2271 COPY_HARD_REG_SET (this_alternative_set, curr_alt_set[m]);
4b3aba76 2272 winreg = this_alternative != NO_REGS;
c6a6cdaa 2273 break;
2274 }
1a8f8886 2275
c6a6cdaa 2276 case 'g':
2277 if (MEM_P (op)
2278 || general_constant_p (op)
2279 || spilled_pseudo_p (op))
2280 win = true;
79bc09fb 2281 cl = GENERAL_REGS;
c6a6cdaa 2282 goto reg;
1a8f8886 2283
c6a6cdaa 2284 default:
79bc09fb 2285 cn = lookup_constraint (p);
2286 switch (get_constraint_type (cn))
c6a6cdaa 2287 {
79bc09fb 2288 case CT_REGISTER:
2289 cl = reg_class_for_constraint (cn);
2290 if (cl != NO_REGS)
2291 goto reg;
2292 break;
1a8f8886 2293
4e67d0bf 2294 case CT_CONST_INT:
2295 if (CONST_INT_P (op)
2296 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2297 win = true;
2298 break;
2299
79bc09fb 2300 case CT_MEMORY:
2301 if (MEM_P (op)
2302 && satisfies_memory_constraint_p (op, cn))
2303 win = true;
2304 else if (spilled_pseudo_p (op))
2305 win = true;
2306
2307 /* If we didn't already win, we can reload constants
2308 via force_const_mem or put the pseudo value into
2309 memory, or make other memory by reloading the
2310 address like for 'o'. */
2311 if (CONST_POOL_OK_P (mode, op)
003000a4 2312 || MEM_P (op) || REG_P (op)
2313 /* We can restore the equiv insn by a
2314 reload. */
2315 || equiv_substition_p[nop])
79bc09fb 2316 badop = false;
2317 constmemok = true;
2318 offmemok = true;
2319 break;
2320
2321 case CT_ADDRESS:
afca8a73 2322 /* An asm operand with an address constraint
2323 that doesn't satisfy address_operand has
2324 is_address cleared, so that we don't try to
2325 make a non-address fit. */
2326 if (!curr_static_id->operand[nop].is_address)
2327 break;
79bc09fb 2328 /* If we didn't already win, we can reload the address
2329 into a base register. */
2330 if (satisfies_address_constraint_p (op, cn))
2331 win = true;
2332 cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2333 ADDRESS, SCRATCH);
2334 badop = false;
2335 goto reg;
2336
2337 case CT_FIXED_FORM:
2338 if (constraint_satisfied_p (op, cn))
c6a6cdaa 2339 win = true;
c6a6cdaa 2340 break;
6b3b345a 2341
2342 case CT_SPECIAL_MEMORY:
2343 if (MEM_P (op)
2344 && satisfies_memory_constraint_p (op, cn))
2345 win = true;
2346 else if (spilled_pseudo_p (op))
2347 win = true;
2348 break;
c6a6cdaa 2349 }
79bc09fb 2350 break;
1a8f8886 2351
79bc09fb 2352 reg:
fcf9e045 2353 if (mode == BLKmode)
2354 break;
c6a6cdaa 2355 this_alternative = reg_class_subunion[this_alternative][cl];
2356 IOR_HARD_REG_SET (this_alternative_set,
2357 reg_class_contents[cl]);
2358 if (costly_p)
2359 {
2360 this_costly_alternative
2361 = reg_class_subunion[this_costly_alternative][cl];
2362 IOR_HARD_REG_SET (this_costly_alternative_set,
2363 reg_class_contents[cl]);
2364 }
c6a6cdaa 2365 winreg = true;
2366 if (REG_P (op))
2367 {
2368 if (hard_regno[nop] >= 0
2369 && in_hard_reg_set_p (this_alternative_set,
2370 mode, hard_regno[nop]))
2371 win = true;
2372 else if (hard_regno[nop] < 0
2373 && in_class_p (op, this_alternative, NULL))
2374 win = true;
2375 }
2376 break;
2377 }
2378 if (c != ' ' && c != '\t')
2379 costly_p = c == '*';
2380 }
2381 while ((p += len), c);
1a8f8886 2382
2b1732ad 2383 scratch_p = (operand_reg[nop] != NULL_RTX
2384 && lra_former_scratch_p (REGNO (operand_reg[nop])));
c6a6cdaa 2385 /* Record which operands fit this alternative. */
2386 if (win)
2387 {
2388 this_alternative_win = true;
2389 if (operand_reg[nop] != NULL_RTX)
2390 {
2391 if (hard_regno[nop] >= 0)
2392 {
2393 if (in_hard_reg_set_p (this_costly_alternative_set,
2394 mode, hard_regno[nop]))
34575461 2395 {
2396 if (lra_dump_file != NULL)
2397 fprintf (lra_dump_file,
2398 " %d Costly set: reject++\n",
2399 nop);
2400 reject++;
2401 }
c6a6cdaa 2402 }
2403 else
2404 {
2b1732ad 2405 /* Prefer won reg to spilled pseudo under other
2406 equal conditions for possibe inheritance. */
2407 if (! scratch_p)
2408 {
2409 if (lra_dump_file != NULL)
2410 fprintf
2411 (lra_dump_file,
2412 " %d Non pseudo reload: reject++\n",
2413 nop);
2414 reject++;
2415 }
c6a6cdaa 2416 if (in_class_p (operand_reg[nop],
2417 this_costly_alternative, NULL))
34575461 2418 {
2419 if (lra_dump_file != NULL)
2420 fprintf
2421 (lra_dump_file,
2422 " %d Non pseudo costly reload:"
2423 " reject++\n",
2424 nop);
2425 reject++;
2426 }
c6a6cdaa 2427 }
67cf9b55 2428 /* We simulate the behavior of old reload here.
c6a6cdaa 2429 Although scratches need hard registers and it
2430 might result in spilling other pseudos, no reload
2431 insns are generated for the scratches. So it
2432 might cost something but probably less than old
2433 reload pass believes. */
2b1732ad 2434 if (scratch_p)
34575461 2435 {
2436 if (lra_dump_file != NULL)
2437 fprintf (lra_dump_file,
2b1732ad 2438 " %d Scratch win: reject+=2\n",
34575461 2439 nop);
2b1732ad 2440 reject += 2;
34575461 2441 }
c6a6cdaa 2442 }
2443 }
2444 else if (did_match)
2445 this_alternative_match_win = true;
2446 else
2447 {
2448 int const_to_mem = 0;
2449 bool no_regs_p;
2450
ed6272f7 2451 reject += op_reject;
3b3a5e5f 2452 /* Never do output reload of stack pointer. It makes
2453 impossible to do elimination when SP is changed in
2454 RTL. */
2455 if (op == stack_pointer_rtx && ! frame_pointer_needed
2456 && curr_static_id->operand[nop].type != OP_IN)
2457 goto fail;
2458
aa46b107 2459 /* If this alternative asks for a specific reg class, see if there
2460 is at least one allocatable register in that class. */
c6a6cdaa 2461 no_regs_p
2462 = (this_alternative == NO_REGS
2463 || (hard_reg_set_subset_p
2464 (reg_class_contents[this_alternative],
2465 lra_no_alloc_regs)));
aa46b107 2466
2467 /* For asms, verify that the class for this alternative is possible
2468 for the mode that is specified. */
1524bcdc 2469 if (!no_regs_p && INSN_CODE (curr_insn) < 0)
aa46b107 2470 {
2471 int i;
2472 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
b395382f 2473 if (targetm.hard_regno_mode_ok (i, mode)
b3d446cb 2474 && in_hard_reg_set_p (reg_class_contents[this_alternative],
2475 mode, i))
aa46b107 2476 break;
2477 if (i == FIRST_PSEUDO_REGISTER)
2478 winreg = false;
2479 }
2480
c6a6cdaa 2481 /* If this operand accepts a register, and if the
2482 register class has at least one allocatable register,
2483 then this operand can be reloaded. */
2484 if (winreg && !no_regs_p)
2485 badop = false;
1a8f8886 2486
c6a6cdaa 2487 if (badop)
b3d446cb 2488 {
2489 if (lra_dump_file != NULL)
2490 fprintf (lra_dump_file,
2491 " alt=%d: Bad operand -- refuse\n",
2492 nalt);
2493 goto fail;
2494 }
c6a6cdaa 2495
a2ebcb84 2496 if (this_alternative != NO_REGS)
2497 {
2498 HARD_REG_SET available_regs;
2499
2500 COPY_HARD_REG_SET (available_regs,
2501 reg_class_contents[this_alternative]);
2502 AND_COMPL_HARD_REG_SET
2503 (available_regs,
2504 ira_prohibited_class_mode_regs[this_alternative][mode]);
2505 AND_COMPL_HARD_REG_SET (available_regs, lra_no_alloc_regs);
2506 if (hard_reg_set_empty_p (available_regs))
2507 {
2508 /* There are no hard regs holding a value of given
2509 mode. */
2510 if (offmemok)
2511 {
2512 this_alternative = NO_REGS;
2513 if (lra_dump_file != NULL)
2514 fprintf (lra_dump_file,
2515 " %d Using memory because of"
2516 " a bad mode: reject+=2\n",
2517 nop);
2518 reject += 2;
2519 }
2520 else
2521 {
2522 if (lra_dump_file != NULL)
2523 fprintf (lra_dump_file,
2524 " alt=%d: Wrong mode -- refuse\n",
2525 nalt);
2526 goto fail;
2527 }
2528 }
2529 }
2530
77a00b11 2531 /* If not assigned pseudo has a class which a subset of
2532 required reg class, it is a less costly alternative
2533 as the pseudo still can get a hard reg of necessary
2534 class. */
2535 if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2536 && (cl = get_reg_class (REGNO (op))) != NO_REGS
2537 && ira_class_subset_p[this_alternative][cl])
2538 {
2539 if (lra_dump_file != NULL)
2540 fprintf
2541 (lra_dump_file,
2542 " %d Super set class reg: reject-=3\n", nop);
2543 reject -= 3;
2544 }
2545
c6a6cdaa 2546 this_alternative_offmemok = offmemok;
2547 if (this_costly_alternative != NO_REGS)
34575461 2548 {
2549 if (lra_dump_file != NULL)
2550 fprintf (lra_dump_file,
2551 " %d Costly loser: reject++\n", nop);
2552 reject++;
2553 }
c6a6cdaa 2554 /* If the operand is dying, has a matching constraint,
2555 and satisfies constraints of the matched operand
53f1eb5d 2556 which failed to satisfy the own constraints, most probably
4f428208 2557 the reload for this operand will be gone. */
2558 if (this_alternative_matches >= 0
2559 && !curr_alt_win[this_alternative_matches]
2560 && REG_P (op)
2561 && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2562 && (hard_regno[nop] >= 0
2563 ? in_hard_reg_set_p (this_alternative_set,
2564 mode, hard_regno[nop])
2565 : in_class_p (op, this_alternative, NULL)))
2566 {
2567 if (lra_dump_file != NULL)
2568 fprintf
2569 (lra_dump_file,
2570 " %d Dying matched operand reload: reject++\n",
2571 nop);
2572 reject++;
2573 }
2574 else
2e620dc7 2575 {
92dfb77d 2576 /* Strict_low_part requires to reload the register
2577 not the sub-register. In this case we should
2578 check that a final reload hard reg can hold the
2579 value mode. */
2e620dc7 2580 if (curr_static_id->operand[nop].strict_low
2581 && REG_P (op)
2582 && hard_regno[nop] < 0
2583 && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
2584 && ira_class_hard_regs_num[this_alternative] > 0
b395382f 2585 && (!targetm.hard_regno_mode_ok
2586 (ira_class_hard_regs[this_alternative][0],
2587 GET_MODE (*curr_id->operand_loc[nop]))))
b3d446cb 2588 {
2589 if (lra_dump_file != NULL)
2590 fprintf
2591 (lra_dump_file,
2592 " alt=%d: Strict low subreg reload -- refuse\n",
2593 nalt);
2594 goto fail;
2595 }
2e620dc7 2596 losers++;
2597 }
c6a6cdaa 2598 if (operand_reg[nop] != NULL_RTX
2599 /* Output operands and matched input operands are
2600 not inherited. The following conditions do not
2601 exactly describe the previous statement but they
2602 are pretty close. */
2603 && curr_static_id->operand[nop].type != OP_OUT
2604 && (this_alternative_matches < 0
2605 || curr_static_id->operand[nop].type != OP_IN))
2606 {
2607 int last_reload = (lra_reg_info[ORIGINAL_REGNO
2608 (operand_reg[nop])]
2609 .last_reload);
2610
92b64c52 2611 /* The value of reload_sum has sense only if we
2612 process insns in their order. It happens only on
2613 the first constraints sub-pass when we do most of
2614 reload work. */
2615 if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
c6a6cdaa 2616 reload_sum += last_reload - bb_reload_num;
2617 }
2618 /* If this is a constant that is reloaded into the
2619 desired class by copying it to memory first, count
2620 that as another reload. This is consistent with
2621 other code and is required to avoid choosing another
2622 alternative when the constant is moved into memory.
2623 Note that the test here is precisely the same as in
2624 the code below that calls force_const_mem. */
2625 if (CONST_POOL_OK_P (mode, op)
2626 && ((targetm.preferred_reload_class
2627 (op, this_alternative) == NO_REGS)
2628 || no_input_reloads_p))
2629 {
2630 const_to_mem = 1;
2631 if (! no_regs_p)
2632 losers++;
2633 }
1a8f8886 2634
c6a6cdaa 2635 /* Alternative loses if it requires a type of reload not
2636 permitted for this insn. We can always reload
2637 objects with a REG_UNUSED note. */
2638 if ((curr_static_id->operand[nop].type != OP_IN
2639 && no_output_reloads_p
2640 && ! find_reg_note (curr_insn, REG_UNUSED, op))
2641 || (curr_static_id->operand[nop].type != OP_OUT
b3d446cb 2642 && no_input_reloads_p && ! const_to_mem)
2643 || (this_alternative_matches >= 0
fe5cb3e1 2644 && (no_input_reloads_p
2645 || (no_output_reloads_p
2646 && (curr_static_id->operand
2647 [this_alternative_matches].type != OP_IN)
2648 && ! find_reg_note (curr_insn, REG_UNUSED,
2649 no_subreg_reg_operand
2650 [this_alternative_matches])))))
b3d446cb 2651 {
2652 if (lra_dump_file != NULL)
2653 fprintf
2654 (lra_dump_file,
2655 " alt=%d: No input/otput reload -- refuse\n",
2656 nalt);
2657 goto fail;
2658 }
1a8f8886 2659
f4d3c071 2660 /* Alternative loses if it required class pseudo cannot
2e19c420 2661 hold value of required mode. Such insns can be
2bd08537 2662 described by insn definitions with mode iterators. */
2e19c420 2663 if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
2664 && ! hard_reg_set_empty_p (this_alternative_set)
2bd08537 2665 /* It is common practice for constraints to use a
2666 class which does not have actually enough regs to
2667 hold the value (e.g. x86 AREG for mode requiring
2668 more one general reg). Therefore we have 2
07c11f2b 2669 conditions to check that the reload pseudo cannot
2670 hold the mode value. */
b395382f 2671 && (!targetm.hard_regno_mode_ok
2672 (ira_class_hard_regs[this_alternative][0],
2673 GET_MODE (*curr_id->operand_loc[nop])))
2bd08537 2674 /* The above condition is not enough as the first
2675 reg in ira_class_hard_regs can be not aligned for
2676 multi-words mode values. */
25cd984c 2677 && (prohibited_class_reg_set_mode_p
2678 (this_alternative, this_alternative_set,
2679 GET_MODE (*curr_id->operand_loc[nop]))))
2680 {
2681 if (lra_dump_file != NULL)
2682 fprintf (lra_dump_file,
2683 " alt=%d: reload pseudo for op %d "
000969f9 2684 "cannot hold the mode value -- refuse\n",
25cd984c 2685 nalt, nop);
2686 goto fail;
2e19c420 2687 }
2688
4b3aba76 2689 /* Check strong discouragement of reload of non-constant
2690 into class THIS_ALTERNATIVE. */
2691 if (! CONSTANT_P (op) && ! no_regs_p
2692 && (targetm.preferred_reload_class
2693 (op, this_alternative) == NO_REGS
2694 || (curr_static_id->operand[nop].type == OP_OUT
2695 && (targetm.preferred_output_reload_class
2696 (op, this_alternative) == NO_REGS))))
34575461 2697 {
2698 if (lra_dump_file != NULL)
2699 fprintf (lra_dump_file,
2700 " %d Non-prefered reload: reject+=%d\n",
2701 nop, LRA_MAX_REJECT);
2702 reject += LRA_MAX_REJECT;
2703 }
1a8f8886 2704
0178c26e 2705 if (! (MEM_P (op) && offmemok)
2706 && ! (const_to_mem && constmemok))
c6a6cdaa 2707 {
2708 /* We prefer to reload pseudos over reloading other
2709 things, since such reloads may be able to be
2710 eliminated later. So bump REJECT in other cases.
2711 Don't do this in the case where we are forcing a
2712 constant into memory and it will then win since
2713 we don't want to have a different alternative
2714 match then. */
2715 if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
34575461 2716 {
2717 if (lra_dump_file != NULL)
2718 fprintf
2719 (lra_dump_file,
2720 " %d Non-pseudo reload: reject+=2\n",
2721 nop);
2722 reject += 2;
2723 }
1a8f8886 2724
c6a6cdaa 2725 if (! no_regs_p)
2726 reload_nregs
2727 += ira_reg_class_max_nregs[this_alternative][mode];
273c330a 2728
2729 if (SMALL_REGISTER_CLASS_P (this_alternative))
34575461 2730 {
2731 if (lra_dump_file != NULL)
2732 fprintf
2733 (lra_dump_file,
2734 " %d Small class reload: reject+=%d\n",
2735 nop, LRA_LOSER_COST_FACTOR / 2);
2736 reject += LRA_LOSER_COST_FACTOR / 2;
2737 }
c6a6cdaa 2738 }
2739
70892847 2740 /* We are trying to spill pseudo into memory. It is
2741 usually more costly than moving to a hard register
2742 although it might takes the same number of
b02d1ebc 2743 reloads.
2744
2745 Non-pseudo spill may happen also. Suppose a target allows both
2746 register and memory in the operand constraint alternatives,
2747 then it's typical that an eliminable register has a substition
2748 of "base + offset" which can either be reloaded by a simple
2749 "new_reg <= base + offset" which will match the register
2750 constraint, or a similar reg addition followed by further spill
2751 to and reload from memory which will match the memory
2752 constraint, but this memory spill will be much more costly
2753 usually.
2754
2755 Code below increases the reject for both pseudo and non-pseudo
2756 spill. */
21b32b46 2757 if (no_regs_p
2758 && !(MEM_P (op) && offmemok)
2759 && !(REG_P (op) && hard_regno[nop] < 0))
34575461 2760 {
2761 if (lra_dump_file != NULL)
2762 fprintf
2763 (lra_dump_file,
b02d1ebc 2764 " %d Spill %spseudo into memory: reject+=3\n",
2765 nop, REG_P (op) ? "" : "Non-");
34575461 2766 reject += 3;
c2d1c3eb 2767 if (VECTOR_MODE_P (mode))
2768 {
2769 /* Spilling vectors into memory is usually more
2770 costly as they contain big values. */
2771 if (lra_dump_file != NULL)
2772 fprintf
2773 (lra_dump_file,
2774 " %d Spill vector pseudo: reject+=2\n",
2775 nop);
2776 reject += 2;
2777 }
34575461 2778 }
70892847 2779
9cd589b8 2780 /* When we use an operand requiring memory in given
2781 alternative, the insn should write *and* read the
2782 value to/from memory it is costly in comparison with
2783 an insn alternative which does not use memory
2784 (e.g. register or immediate operand). We exclude
2785 memory operand for such case as we can satisfy the
2786 memory constraints by reloading address. */
2787 if (no_regs_p && offmemok && !MEM_P (op))
8afaf3bf 2788 {
2789 if (lra_dump_file != NULL)
2790 fprintf
2791 (lra_dump_file,
2792 " Using memory insn operand %d: reject+=3\n",
2793 nop);
2794 reject += 3;
2795 }
2796
2590979b 2797 /* If reload requires moving value through secondary
2798 memory, it will need one more insn at least. */
2799 if (this_alternative != NO_REGS
2800 && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
2801 && ((curr_static_id->operand[nop].type != OP_OUT
c836e75b 2802 && targetm.secondary_memory_needed (GET_MODE (op), cl,
2803 this_alternative))
2590979b 2804 || (curr_static_id->operand[nop].type != OP_IN
c836e75b 2805 && (targetm.secondary_memory_needed
2806 (GET_MODE (op), this_alternative, cl)))))
2590979b 2807 losers++;
c836e75b 2808
eb70a065 2809 if (MEM_P (op) && offmemok)
2810 addr_losers++;
67f1426f 2811 else
44cafa9a 2812 {
67f1426f 2813 /* Input reloads can be inherited more often than
2814 output reloads can be removed, so penalize output
2815 reloads. */
2816 if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
2817 {
2818 if (lra_dump_file != NULL)
2819 fprintf
2820 (lra_dump_file,
2821 " %d Non input pseudo reload: reject++\n",
2822 nop);
2823 reject++;
2824 }
2825
2826 if (curr_static_id->operand[nop].type == OP_INOUT)
2827 {
2828 if (lra_dump_file != NULL)
2829 fprintf
2830 (lra_dump_file,
2831 " %d Input/Output reload: reject+=%d\n",
2832 nop, LRA_LOSER_COST_FACTOR);
2833 reject += LRA_LOSER_COST_FACTOR;
2834 }
44cafa9a 2835 }
c6a6cdaa 2836 }
1a8f8886 2837
2b1732ad 2838 if (early_clobber_p && ! scratch_p)
34575461 2839 {
2840 if (lra_dump_file != NULL)
2841 fprintf (lra_dump_file,
2842 " %d Early clobber: reject++\n", nop);
2843 reject++;
2844 }
c6a6cdaa 2845 /* ??? We check early clobbers after processing all operands
2846 (see loop below) and there we update the costs more.
2847 Should we update the cost (may be approximately) here
2848 because of early clobber register reloads or it is a rare
2849 or non-important thing to be worth to do it. */
eb70a065 2850 overall = (losers * LRA_LOSER_COST_FACTOR + reject
2851 - (addr_losers == losers ? static_reject : 0));
c6a6cdaa 2852 if ((best_losers == 0 || losers != 0) && best_overall < overall)
f7c98bb1 2853 {
2854 if (lra_dump_file != NULL)
2855 fprintf (lra_dump_file,
34575461 2856 " alt=%d,overall=%d,losers=%d -- refuse\n",
f7c98bb1 2857 nalt, overall, losers);
2858 goto fail;
2859 }
c6a6cdaa 2860
8afaf3bf 2861 if (update_and_check_small_class_inputs (nop, this_alternative))
2862 {
2863 if (lra_dump_file != NULL)
2864 fprintf (lra_dump_file,
2865 " alt=%d, not enough small class regs -- refuse\n",
2866 nalt);
2867 goto fail;
2868 }
c6a6cdaa 2869 curr_alt[nop] = this_alternative;
2870 COPY_HARD_REG_SET (curr_alt_set[nop], this_alternative_set);
2871 curr_alt_win[nop] = this_alternative_win;
2872 curr_alt_match_win[nop] = this_alternative_match_win;
2873 curr_alt_offmemok[nop] = this_alternative_offmemok;
2874 curr_alt_matches[nop] = this_alternative_matches;
1a8f8886 2875
c6a6cdaa 2876 if (this_alternative_matches >= 0
2877 && !did_match && !this_alternative_win)
2878 curr_alt_win[this_alternative_matches] = false;
1a8f8886 2879
c6a6cdaa 2880 if (early_clobber_p && operand_reg[nop] != NULL_RTX)
2881 early_clobbered_nops[early_clobbered_regs_num++] = nop;
2882 }
eb70a065 2883
ea99c7a1 2884 if (curr_insn_set != NULL_RTX && n_operands == 2
2885 /* Prevent processing non-move insns. */
2886 && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
2887 || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
2888 && ((! curr_alt_win[0] && ! curr_alt_win[1]
2889 && REG_P (no_subreg_reg_operand[0])
2890 && REG_P (no_subreg_reg_operand[1])
2891 && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2892 || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
2893 || (! curr_alt_win[0] && curr_alt_win[1]
2894 && REG_P (no_subreg_reg_operand[1])
eb70a065 2895 /* Check that we reload memory not the memory
2896 address. */
9782b2bc 2897 && ! (curr_alt_offmemok[0]
2898 && MEM_P (no_subreg_reg_operand[0]))
ea99c7a1 2899 && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
2900 || (curr_alt_win[0] && ! curr_alt_win[1]
2901 && REG_P (no_subreg_reg_operand[0])
eb70a065 2902 /* Check that we reload memory not the memory
2903 address. */
9782b2bc 2904 && ! (curr_alt_offmemok[1]
2905 && MEM_P (no_subreg_reg_operand[1]))
ea99c7a1 2906 && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2907 && (! CONST_POOL_OK_P (curr_operand_mode[1],
2908 no_subreg_reg_operand[1])
2909 || (targetm.preferred_reload_class
2910 (no_subreg_reg_operand[1],
2911 (enum reg_class) curr_alt[1]) != NO_REGS))
2912 /* If it is a result of recent elimination in move
2913 insn we can transform it into an add still by
2914 using this alternative. */
69f0f127 2915 && GET_CODE (no_subreg_reg_operand[1]) != PLUS
2916 /* Likewise if the source has been replaced with an
2917 equivalent value. This only happens once -- the reload
2918 will use the equivalent value instead of the register it
2919 replaces -- so there should be no danger of cycling. */
2920 && !equiv_substition_p[1])))
34575461 2921 {
2922 /* We have a move insn and a new reload insn will be similar
9782b2bc 2923 to the current insn. We should avoid such situation as
2924 it results in LRA cycling. */
2925 if (lra_dump_file != NULL)
2926 fprintf (lra_dump_file,
2927 " Cycle danger: overall += LRA_MAX_REJECT\n");
34575461 2928 overall += LRA_MAX_REJECT;
2929 }
c6a6cdaa 2930 ok_p = true;
2931 curr_alt_dont_inherit_ops_num = 0;
2932 for (nop = 0; nop < early_clobbered_regs_num; nop++)
2933 {
8c3a9b39 2934 int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
c6a6cdaa 2935 HARD_REG_SET temp_set;
2936
2937 i = early_clobbered_nops[nop];
2938 if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
2939 || hard_regno[i] < 0)
2940 continue;
89c2edcf 2941 lra_assert (operand_reg[i] != NULL_RTX);
c6a6cdaa 2942 clobbered_hard_regno = hard_regno[i];
2943 CLEAR_HARD_REG_SET (temp_set);
2944 add_to_hard_reg_set (&temp_set, biggest_mode[i], clobbered_hard_regno);
8c3a9b39 2945 first_conflict_j = last_conflict_j = -1;
c6a6cdaa 2946 for (j = 0; j < n_operands; j++)
2947 if (j == i
2948 /* We don't want process insides of match_operator and
2949 match_parallel because otherwise we would process
2950 their operands once again generating a wrong
2951 code. */
2952 || curr_static_id->operand[j].is_operator)
2953 continue;
2954 else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
2955 || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
2956 continue;
89c2edcf 2957 /* If we don't reload j-th operand, check conflicts. */
2958 else if ((curr_alt_win[j] || curr_alt_match_win[j])
2959 && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
8c3a9b39 2960 {
2961 if (first_conflict_j < 0)
2962 first_conflict_j = j;
2963 last_conflict_j = j;
b782636f 2964 /* Both the earlyclobber operand and conflicting operand
2965 cannot both be user defined hard registers. */
2966 if (HARD_REGISTER_P (operand_reg[i])
2967 && REG_USERVAR_P (operand_reg[i])
2968 && operand_reg[j] != NULL_RTX
2969 && HARD_REGISTER_P (operand_reg[j])
2970 && REG_USERVAR_P (operand_reg[j]))
2971 fatal_insn ("unable to generate reloads for "
2972 "impossible constraints:", curr_insn);
8c3a9b39 2973 }
2974 if (last_conflict_j < 0)
c6a6cdaa 2975 continue;
b782636f 2976
2977 /* If an earlyclobber operand conflicts with another non-matching
2978 operand (ie, they have been assigned the same hard register),
2979 then it is better to reload the other operand, as there may
2980 exist yet another operand with a matching constraint associated
2981 with the earlyclobber operand. However, if one of the operands
2982 is an explicit use of a hard register, then we must reload the
2983 other non-hard register operand. */
2984 if (HARD_REGISTER_P (operand_reg[i])
2985 || (first_conflict_j == last_conflict_j
2986 && operand_reg[last_conflict_j] != NULL_RTX
2987 && !curr_alt_match_win[last_conflict_j]
2988 && !HARD_REGISTER_P (operand_reg[last_conflict_j])))
89c2edcf 2989 {
8c3a9b39 2990 curr_alt_win[last_conflict_j] = false;
2991 curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
2992 = last_conflict_j;
89c2edcf 2993 losers++;
34575461 2994 if (lra_dump_file != NULL)
2995 fprintf
2996 (lra_dump_file,
2997 " %d Conflict early clobber reload: reject--\n",
2998 i);
89c2edcf 2999 }
c6a6cdaa 3000 else
3001 {
89c2edcf 3002 /* We need to reload early clobbered register and the
3003 matched registers. */
3004 for (j = 0; j < n_operands; j++)
3005 if (curr_alt_matches[j] == i)
3006 {
3007 curr_alt_match_win[j] = false;
3008 losers++;
3009 overall += LRA_LOSER_COST_FACTOR;
3010 }
3011 if (! curr_alt_match_win[i])
3012 curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
3013 else
3014 {
3015 /* Remember pseudos used for match reloads are never
3016 inherited. */
3017 lra_assert (curr_alt_matches[i] >= 0);
3018 curr_alt_win[curr_alt_matches[i]] = false;
3019 }
3020 curr_alt_win[i] = curr_alt_match_win[i] = false;
3021 losers++;
34575461 3022 if (lra_dump_file != NULL)
3023 fprintf
3024 (lra_dump_file,
19efce70 3025 " %d Matched conflict early clobber reloads: "
34575461 3026 "reject--\n",
3027 i);
3dfcf76a 3028 }
3029 /* Early clobber was already reflected in REJECT. */
3030 if (!matching_early_clobber[i])
3031 {
3032 lra_assert (reject > 0);
f7c98bb1 3033 reject--;
3dfcf76a 3034 matching_early_clobber[i] = 1;
c6a6cdaa 3035 }
3dfcf76a 3036 overall += LRA_LOSER_COST_FACTOR - 1;
c6a6cdaa 3037 }
f7c98bb1 3038 if (lra_dump_file != NULL)
273c330a 3039 fprintf (lra_dump_file, " alt=%d,overall=%d,losers=%d,rld_nregs=%d\n",
3040 nalt, overall, losers, reload_nregs);
f7c98bb1 3041
c6a6cdaa 3042 /* If this alternative can be made to work by reloading, and it
3043 needs less reloading than the others checked so far, record
3044 it as the chosen goal for reloading. */
3045 if ((best_losers != 0 && losers == 0)
3046 || (((best_losers == 0 && losers == 0)
3047 || (best_losers != 0 && losers != 0))
3048 && (best_overall > overall
3049 || (best_overall == overall
3050 /* If the cost of the reloads is the same,
3051 prefer alternative which requires minimal
273c330a 3052 number of reload regs. */
3053 && (reload_nregs < best_reload_nregs
3054 || (reload_nregs == best_reload_nregs
04dda2a2 3055 && (best_reload_sum < reload_sum
3056 || (best_reload_sum == reload_sum
3057 && nalt < goal_alt_number))))))))
c6a6cdaa 3058 {
3059 for (nop = 0; nop < n_operands; nop++)
3060 {
3061 goal_alt_win[nop] = curr_alt_win[nop];
3062 goal_alt_match_win[nop] = curr_alt_match_win[nop];
3063 goal_alt_matches[nop] = curr_alt_matches[nop];
3064 goal_alt[nop] = curr_alt[nop];
3065 goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
3066 }
3067 goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
3068 for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
3069 goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
3070 goal_alt_swapped = curr_swapped;
3071 best_overall = overall;
3072 best_losers = losers;
c6a6cdaa 3073 best_reload_nregs = reload_nregs;
3074 best_reload_sum = reload_sum;
3075 goal_alt_number = nalt;
3076 }
3077 if (losers == 0)
3078 /* Everything is satisfied. Do not process alternatives
1a8f8886 3079 anymore. */
c6a6cdaa 3080 break;
3081 fail:
3082 ;
3083 }
3084 return ok_p;
3085}
3086
d03288b6 3087/* Make reload base reg from address AD. */
3088static rtx
3089base_to_reg (struct address_info *ad)
3090{
3091 enum reg_class cl;
3092 int code = -1;
3093 rtx new_inner = NULL_RTX;
3094 rtx new_reg = NULL_RTX;
57c26b3a 3095 rtx_insn *insn;
3096 rtx_insn *last_insn = get_last_insn();
d03288b6 3097
0508f466 3098 lra_assert (ad->disp == ad->disp_term);
d03288b6 3099 cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3100 get_index_code (ad));
0508f466 3101 new_reg = lra_create_new_reg (GET_MODE (*ad->base), NULL_RTX,
d03288b6 3102 cl, "base");
3103 new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
3104 ad->disp_term == NULL
0508f466 3105 ? const0_rtx
d03288b6 3106 : *ad->disp_term);
3107 if (!valid_address_p (ad->mode, new_inner, ad->as))
3108 return NULL_RTX;
0508f466 3109 insn = emit_insn (gen_rtx_SET (new_reg, *ad->base));
d03288b6 3110 code = recog_memoized (insn);
3111 if (code < 0)
3112 {
3113 delete_insns_since (last_insn);
3114 return NULL_RTX;
3115 }
3116
3117 return new_inner;
3118}
3119
6cc181b3 3120/* Make reload base reg + DISP from address AD. Return the new pseudo. */
c6a6cdaa 3121static rtx
6cc181b3 3122base_plus_disp_to_reg (struct address_info *ad, rtx disp)
c6a6cdaa 3123{
3124 enum reg_class cl;
3125 rtx new_reg;
3126
6cc181b3 3127 lra_assert (ad->base == ad->base_term);
1efe9e9d 3128 cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3129 get_index_code (ad));
3130 new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
3131 cl, "base + disp");
6cc181b3 3132 lra_emit_add (new_reg, *ad->base_term, disp);
c6a6cdaa 3133 return new_reg;
3134}
3135
28f7a2af 3136/* Make reload of index part of address AD. Return the new
3137 pseudo. */
3138static rtx
3139index_part_to_reg (struct address_info *ad)
3140{
3141 rtx new_reg;
3142
3143 new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
3144 INDEX_REG_CLASS, "index term");
3145 expand_mult (GET_MODE (*ad->index), *ad->index_term,
3146 GEN_INT (get_index_scale (ad)), new_reg, 1);
3147 return new_reg;
3148}
3149
1efe9e9d 3150/* Return true if we can add a displacement to address AD, even if that
3151 makes the address invalid. The fix-up code requires any new address
3152 to be the sum of the BASE_TERM, INDEX and DISP_TERM fields. */
809320f0 3153static bool
1efe9e9d 3154can_add_disp_p (struct address_info *ad)
809320f0 3155{
1efe9e9d 3156 return (!ad->autoinc_p
3157 && ad->segment == NULL
3158 && ad->base == ad->base_term
3159 && ad->disp == ad->disp_term);
809320f0 3160}
3161
1efe9e9d 3162/* Make equiv substitution in address AD. Return true if a substitution
3163 was made. */
c6a6cdaa 3164static bool
1efe9e9d 3165equiv_address_substitution (struct address_info *ad)
c6a6cdaa 3166{
1efe9e9d 3167 rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
a4686d0a 3168 poly_int64 disp;
3169 HOST_WIDE_INT scale;
c6a6cdaa 3170 bool change_p;
3171
1efe9e9d 3172 base_term = strip_subreg (ad->base_term);
3173 if (base_term == NULL)
c6a6cdaa 3174 base_reg = new_base_reg = NULL_RTX;
3175 else
3176 {
1efe9e9d 3177 base_reg = *base_term;
3b3a5e5f 3178 new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
c6a6cdaa 3179 }
1efe9e9d 3180 index_term = strip_subreg (ad->index_term);
3181 if (index_term == NULL)
c6a6cdaa 3182 index_reg = new_index_reg = NULL_RTX;
3183 else
3184 {
1efe9e9d 3185 index_reg = *index_term;
3b3a5e5f 3186 new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
c6a6cdaa 3187 }
3188 if (base_reg == new_base_reg && index_reg == new_index_reg)
3189 return false;
3190 disp = 0;
3191 change_p = false;
3192 if (lra_dump_file != NULL)
3193 {
3194 fprintf (lra_dump_file, "Changing address in insn %d ",
3195 INSN_UID (curr_insn));
6dde9719 3196 dump_value_slim (lra_dump_file, *ad->outer, 1);
c6a6cdaa 3197 }
3198 if (base_reg != new_base_reg)
3199 {
a4686d0a 3200 poly_int64 offset;
c6a6cdaa 3201 if (REG_P (new_base_reg))
3202 {
1efe9e9d 3203 *base_term = new_base_reg;
c6a6cdaa 3204 change_p = true;
3205 }
3206 else if (GET_CODE (new_base_reg) == PLUS
3207 && REG_P (XEXP (new_base_reg, 0))
a4686d0a 3208 && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
1efe9e9d 3209 && can_add_disp_p (ad))
c6a6cdaa 3210 {
a4686d0a 3211 disp += offset;
1efe9e9d 3212 *base_term = XEXP (new_base_reg, 0);
c6a6cdaa 3213 change_p = true;
3214 }
1efe9e9d 3215 if (ad->base_term2 != NULL)
3216 *ad->base_term2 = *ad->base_term;
c6a6cdaa 3217 }
c6a6cdaa 3218 if (index_reg != new_index_reg)
3219 {
a4686d0a 3220 poly_int64 offset;
c6a6cdaa 3221 if (REG_P (new_index_reg))
3222 {
1efe9e9d 3223 *index_term = new_index_reg;
c6a6cdaa 3224 change_p = true;
3225 }
3226 else if (GET_CODE (new_index_reg) == PLUS
3227 && REG_P (XEXP (new_index_reg, 0))
a4686d0a 3228 && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
1efe9e9d 3229 && can_add_disp_p (ad)
809320f0 3230 && (scale = get_index_scale (ad)))
c6a6cdaa 3231 {
a4686d0a 3232 disp += offset * scale;
1efe9e9d 3233 *index_term = XEXP (new_index_reg, 0);
c6a6cdaa 3234 change_p = true;
3235 }
3236 }
a4686d0a 3237 if (maybe_ne (disp, 0))
c6a6cdaa 3238 {
1efe9e9d 3239 if (ad->disp != NULL)
3240 *ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
c6a6cdaa 3241 else
3242 {
1efe9e9d 3243 *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
3244 update_address (ad);
c6a6cdaa 3245 }
3246 change_p = true;
3247 }
3248 if (lra_dump_file != NULL)
3249 {
3250 if (! change_p)
3251 fprintf (lra_dump_file, " -- no change\n");
3252 else
3253 {
3254 fprintf (lra_dump_file, " on equiv ");
6dde9719 3255 dump_value_slim (lra_dump_file, *ad->outer, 1);
c6a6cdaa 3256 fprintf (lra_dump_file, "\n");
3257 }
3258 }
3259 return change_p;
3260}
3261
497ba60f 3262/* Major function to make reloads for an address in operand NOP or
3263 check its correctness (If CHECK_ONLY_P is true). The supported
3264 cases are:
d9b69682 3265
c625778b 3266 1) an address that existed before LRA started, at which point it
3267 must have been valid. These addresses are subject to elimination
3268 and may have become invalid due to the elimination offset being out
3269 of range.
d9b69682 3270
c625778b 3271 2) an address created by forcing a constant to memory
3272 (force_const_to_mem). The initial form of these addresses might
3273 not be valid, and it is this function's job to make them valid.
d9b69682 3274
3275 3) a frame address formed from a register and a (possibly zero)
c625778b 3276 constant offset. As above, these addresses might not be valid and
3277 this function must make them so.
d9b69682 3278
3279 Add reloads to the lists *BEFORE and *AFTER. We might need to add
c6a6cdaa 3280 reloads to *AFTER because of inc/dec, {pre, post} modify in the
dcd5393f 3281 address. Return true for any RTL change.
3282
3283 The function is a helper function which does not produce all
497ba60f 3284 transformations (when CHECK_ONLY_P is false) which can be
3285 necessary. It does just basic steps. To do all necessary
3286 transformations use function process_address. */
c6a6cdaa 3287static bool
497ba60f 3288process_address_1 (int nop, bool check_only_p,
3289 rtx_insn **before, rtx_insn **after)
c6a6cdaa 3290{
1efe9e9d 3291 struct address_info ad;
3292 rtx new_reg;
72234ee9 3293 HOST_WIDE_INT scale;
c6a6cdaa 3294 rtx op = *curr_id->operand_loc[nop];
3295 const char *constraint = curr_static_id->operand[nop].constraint;
79bc09fb 3296 enum constraint_num cn = lookup_constraint (constraint);
497ba60f 3297 bool change_p = false;
c6a6cdaa 3298
556c2dd1 3299 if (MEM_P (op)
3300 && GET_MODE (op) == BLKmode
3301 && GET_CODE (XEXP (op, 0)) == SCRATCH)
3302 return false;
3303
afca8a73 3304 if (insn_extra_address_constraint (cn)
3305 /* When we find an asm operand with an address constraint that
3306 doesn't satisfy address_operand to begin with, we clear
3307 is_address, so that we don't try to make a non-address fit.
3308 If the asm statement got this far, it's because other
3309 constraints are available, and we'll use them, disregarding
3310 the unsatisfiable address ones. */
3311 && curr_static_id->operand[nop].is_address)
1efe9e9d 3312 decompose_lea_address (&ad, curr_id->operand_loc[nop]);
efd3cce2 3313 /* Do not attempt to decompose arbitrary addresses generated by combine
3314 for asm operands with loose constraints, e.g 'X'. */
3315 else if (MEM_P (op)
5e0f6ab6 3316 && !(INSN_CODE (curr_insn) < 0
3317 && get_constraint_type (cn) == CT_FIXED_FORM
efd3cce2 3318 && constraint_satisfied_p (op, cn)))
1efe9e9d 3319 decompose_mem_address (&ad, op);
c6a6cdaa 3320 else if (GET_CODE (op) == SUBREG
3321 && MEM_P (SUBREG_REG (op)))
1efe9e9d 3322 decompose_mem_address (&ad, SUBREG_REG (op));
c6a6cdaa 3323 else
3324 return false;
382efce6 3325 /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
3326 index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
3327 when INDEX_REG_CLASS is a single register class. */
3328 if (ad.base_term != NULL
3329 && ad.index_term != NULL
3330 && ira_class_hard_regs_num[INDEX_REG_CLASS] == 1
3331 && REG_P (*ad.base_term)
3332 && REG_P (*ad.index_term)
3333 && in_class_p (*ad.base_term, INDEX_REG_CLASS, NULL)
3334 && ! in_class_p (*ad.index_term, INDEX_REG_CLASS, NULL))
3335 {
3336 std::swap (ad.base, ad.index);
3337 std::swap (ad.base_term, ad.index_term);
3338 }
497ba60f 3339 if (! check_only_p)
3340 change_p = equiv_address_substitution (&ad);
1efe9e9d 3341 if (ad.base_term != NULL
c6a6cdaa 3342 && (process_addr_reg
497ba60f 3343 (ad.base_term, check_only_p, before,
1efe9e9d 3344 (ad.autoinc_p
3345 && !(REG_P (*ad.base_term)
3346 && find_regno_note (curr_insn, REG_DEAD,
3347 REGNO (*ad.base_term)) != NULL_RTX)
c6a6cdaa 3348 ? after : NULL),
1efe9e9d 3349 base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3350 get_index_code (&ad)))))
c6a6cdaa 3351 {
3352 change_p = true;
1efe9e9d 3353 if (ad.base_term2 != NULL)
3354 *ad.base_term2 = *ad.base_term;
c6a6cdaa 3355 }
1efe9e9d 3356 if (ad.index_term != NULL
497ba60f 3357 && process_addr_reg (ad.index_term, check_only_p,
3358 before, NULL, INDEX_REG_CLASS))
c6a6cdaa 3359 change_p = true;
3360
79bc09fb 3361 /* Target hooks sometimes don't treat extra-constraint addresses as
3362 legitimate address_operands, so handle them specially. */
69449463 3363 if (insn_extra_address_constraint (cn)
79bc09fb 3364 && satisfies_address_constraint_p (&ad, cn))
ea99c7a1 3365 return change_p;
ea99c7a1 3366
497ba60f 3367 if (check_only_p)
3368 return change_p;
3369
1efe9e9d 3370 /* There are three cases where the shape of *AD.INNER may now be invalid:
d9b69682 3371
3372 1) the original address was valid, but either elimination or
c625778b 3373 equiv_address_substitution was applied and that made
3374 the address invalid.
d9b69682 3375
3376 2) the address is an invalid symbolic address created by
c625778b 3377 force_const_to_mem.
d9b69682 3378
3379 3) the address is a frame address with an invalid offset.
3380
d03288b6 3381 4) the address is a frame address with an invalid base.
3382
ea99c7a1 3383 All these cases involve a non-autoinc address, so there is no
3384 point revalidating other types. */
3385 if (ad.autoinc_p || valid_address_p (&ad))
c6a6cdaa 3386 return change_p;
3387
d9b69682 3388 /* Any index existed before LRA started, so we can assume that the
3389 presence and shape of the index is valid. */
c6a6cdaa 3390 push_to_sequence (*before);
ea99c7a1 3391 lra_assert (ad.disp == ad.disp_term);
1efe9e9d 3392 if (ad.base == NULL)
c6a6cdaa 3393 {
1efe9e9d 3394 if (ad.index == NULL)
c6a6cdaa 3395 {
401bd0c8 3396 rtx_insn *insn;
3397 rtx_insn *last = get_last_insn ();
c6a6cdaa 3398 int code = -1;
1efe9e9d 3399 enum reg_class cl = base_reg_class (ad.mode, ad.as,
3400 SCRATCH, SCRATCH);
ea99c7a1 3401 rtx addr = *ad.inner;
1efe9e9d 3402
ea99c7a1 3403 new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
6cc3d6ec 3404 if (HAVE_lo_sum)
3405 {
6cc3d6ec 3406 /* addr => lo_sum (new_base, addr), case (2) above. */
3407 insn = emit_insn (gen_rtx_SET
3408 (new_reg,
3409 gen_rtx_HIGH (Pmode, copy_rtx (addr))));
3410 code = recog_memoized (insn);
3411 if (code >= 0)
3412 {
3413 *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
3414 if (! valid_address_p (ad.mode, *ad.outer, ad.as))
3415 {
3416 /* Try to put lo_sum into register. */
3417 insn = emit_insn (gen_rtx_SET
3418 (new_reg,
3419 gen_rtx_LO_SUM (Pmode, new_reg, addr)));
3420 code = recog_memoized (insn);
3421 if (code >= 0)
3422 {
3423 *ad.inner = new_reg;
3424 if (! valid_address_p (ad.mode, *ad.outer, ad.as))
3425 {
3426 *ad.inner = addr;
3427 code = -1;
3428 }
3429 }
3430
3431 }
3432 }
3433 if (code < 0)
3434 delete_insns_since (last);
3435 }
3436
c6a6cdaa 3437 if (code < 0)
3438 {
ea99c7a1 3439 /* addr => new_base, case (2) above. */
3440 lra_emit_move (new_reg, addr);
401bd0c8 3441
3442 for (insn = last == NULL_RTX ? get_insns () : NEXT_INSN (last);
3443 insn != NULL_RTX;
3444 insn = NEXT_INSN (insn))
3445 if (recog_memoized (insn) < 0)
3446 break;
3447 if (insn != NULL_RTX)
3448 {
3449 /* Do nothing if we cannot generate right insns.
67cf9b55 3450 This is analogous to reload pass behavior. */
401bd0c8 3451 delete_insns_since (last);
3452 end_sequence ();
3453 return false;
3454 }
ea99c7a1 3455 *ad.inner = new_reg;
c6a6cdaa 3456 }
3457 }
3458 else
3459 {
d9b69682 3460 /* index * scale + disp => new base + index * scale,
3461 case (1) above. */
1efe9e9d 3462 enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
3463 GET_CODE (*ad.index));
c6a6cdaa 3464
3465 lra_assert (INDEX_REG_CLASS != NO_REGS);
3466 new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "disp");
1efe9e9d 3467 lra_emit_move (new_reg, *ad.disp);
3468 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3469 new_reg, *ad.index);
c6a6cdaa 3470 }
3471 }
1efe9e9d 3472 else if (ad.index == NULL)
c6a6cdaa 3473 {
c625778b 3474 int regno;
3475 enum reg_class cl;
7f836b57 3476 rtx set;
3477 rtx_insn *insns, *last_insn;
d03288b6 3478 /* Try to reload base into register only if the base is invalid
3479 for the address but with valid offset, case (4) above. */
3480 start_sequence ();
3481 new_reg = base_to_reg (&ad);
3482
d9b69682 3483 /* base + disp => new base, cases (1) and (3) above. */
c6a6cdaa 3484 /* Another option would be to reload the displacement into an
3485 index register. However, postreload has code to optimize
3486 address reloads that have the same base and different
3487 displacements, so reloading into an index register would
3488 not necessarily be a win. */
d03288b6 3489 if (new_reg == NULL_RTX)
6cc181b3 3490 {
3491 /* See if the target can split the displacement into a
3492 legitimate new displacement from a local anchor. */
3493 gcc_assert (ad.disp == ad.disp_term);
3494 poly_int64 orig_offset;
3495 rtx offset1, offset2;
3496 if (poly_int_rtx_p (*ad.disp, &orig_offset)
3497 && targetm.legitimize_address_displacement (&offset1, &offset2,
3498 orig_offset,
3499 ad.mode))
3500 {
3501 new_reg = base_plus_disp_to_reg (&ad, offset1);
3502 new_reg = gen_rtx_PLUS (GET_MODE (new_reg), new_reg, offset2);
3503 }
3504 else
3505 new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
3506 }
c625778b 3507 insns = get_insns ();
3508 last_insn = get_last_insn ();
3509 /* If we generated at least two insns, try last insn source as
3510 an address. If we succeed, we generate one less insn. */
6cc181b3 3511 if (REG_P (new_reg)
3512 && last_insn != insns
3513 && (set = single_set (last_insn)) != NULL_RTX
c625778b 3514 && GET_CODE (SET_SRC (set)) == PLUS
3515 && REG_P (XEXP (SET_SRC (set), 0))
3516 && CONSTANT_P (XEXP (SET_SRC (set), 1)))
3517 {
3518 *ad.inner = SET_SRC (set);
3519 if (valid_address_p (ad.mode, *ad.outer, ad.as))
3520 {
3521 *ad.base_term = XEXP (SET_SRC (set), 0);
3522 *ad.disp_term = XEXP (SET_SRC (set), 1);
3523 cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3524 get_index_code (&ad));
3525 regno = REGNO (*ad.base_term);
3526 if (regno >= FIRST_PSEUDO_REGISTER
3527 && cl != lra_get_allocno_class (regno))
7619e612 3528 lra_change_class (regno, cl, " Change to", true);
c625778b 3529 new_reg = SET_SRC (set);
3530 delete_insns_since (PREV_INSN (last_insn));
3531 }
3532 }
3533 end_sequence ();
3534 emit_insn (insns);
1efe9e9d 3535 *ad.inner = new_reg;
c6a6cdaa 3536 }
28f7a2af 3537 else if (ad.disp_term != NULL)
c6a6cdaa 3538 {
d9b69682 3539 /* base + scale * index + disp => new base + scale * index,
3540 case (1) above. */
6cc181b3 3541 gcc_assert (ad.disp == ad.disp_term);
3542 new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
1efe9e9d 3543 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3544 new_reg, *ad.index);
c6a6cdaa 3545 }
72234ee9 3546 else if ((scale = get_index_scale (&ad)) == 1)
bbf73e27 3547 {
3548 /* The last transformation to one reg will be made in
3549 curr_insn_transform function. */
3550 end_sequence ();
3551 return false;
3552 }
72234ee9 3553 else if (scale != 0)
28f7a2af 3554 {
3555 /* base + scale * index => base + new_reg,
3556 case (1) above.
3557 Index part of address may become invalid. For example, we
3558 changed pseudo on the equivalent memory and a subreg of the
3559 pseudo onto the memory of different mode for which the scale is
3560 prohibitted. */
3561 new_reg = index_part_to_reg (&ad);
3562 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3563 *ad.base_term, new_reg);
3564 }
72234ee9 3565 else
3566 {
3567 enum reg_class cl = base_reg_class (ad.mode, ad.as,
3568 SCRATCH, SCRATCH);
3569 rtx addr = *ad.inner;
3570
3571 new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
3572 /* addr => new_base. */
3573 lra_emit_move (new_reg, addr);
3574 *ad.inner = new_reg;
3575 }
c6a6cdaa 3576 *before = get_insns ();
3577 end_sequence ();
3578 return true;
3579}
3580
497ba60f 3581/* If CHECK_ONLY_P is false, do address reloads until it is necessary.
3582 Use process_address_1 as a helper function. Return true for any
3583 RTL changes.
3584
3585 If CHECK_ONLY_P is true, just check address correctness. Return
3586 false if the address correct. */
dcd5393f 3587static bool
497ba60f 3588process_address (int nop, bool check_only_p,
3589 rtx_insn **before, rtx_insn **after)
dcd5393f 3590{
3591 bool res = false;
3592
497ba60f 3593 while (process_address_1 (nop, check_only_p, before, after))
3594 {
3595 if (check_only_p)
3596 return true;
3597 res = true;
3598 }
dcd5393f 3599 return res;
3600}
3601
c6a6cdaa 3602/* Emit insns to reload VALUE into a new register. VALUE is an
3603 auto-increment or auto-decrement RTX whose operand is a register or
3604 memory location; so reloading involves incrementing that location.
3605 IN is either identical to VALUE, or some cheaper place to reload
3606 value being incremented/decremented from.
3607
3608 INC_AMOUNT is the number to increment or decrement by (always
3609 positive and ignored for POST_MODIFY/PRE_MODIFY).
3610
3611 Return pseudo containing the result. */
3612static rtx
3173f31b 3613emit_inc (enum reg_class new_rclass, rtx in, rtx value, poly_int64 inc_amount)
c6a6cdaa 3614{
3615 /* REG or MEM to be copied and incremented. */
3616 rtx incloc = XEXP (value, 0);
3617 /* Nonzero if increment after copying. */
3618 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
3619 || GET_CODE (value) == POST_MODIFY);
7f836b57 3620 rtx_insn *last;
c6a6cdaa 3621 rtx inc;
ed3e6e5d 3622 rtx_insn *add_insn;
c6a6cdaa 3623 int code;
3624 rtx real_in = in == value ? incloc : in;
3625 rtx result;
3626 bool plus_p = true;
3627
3628 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
3629 {
3630 lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
3631 || GET_CODE (XEXP (value, 1)) == MINUS);
3632 lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
3633 plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
3634 inc = XEXP (XEXP (value, 1), 1);
3635 }
3636 else
3637 {
3638 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
3639 inc_amount = -inc_amount;
3640
3173f31b 3641 inc = gen_int_mode (inc_amount, GET_MODE (value));
c6a6cdaa 3642 }
3643
3644 if (! post && REG_P (incloc))
3645 result = incloc;
3646 else
3647 result = lra_create_new_reg (GET_MODE (value), value, new_rclass,
3648 "INC/DEC result");
3649
3650 if (real_in != result)
3651 {
3652 /* First copy the location to the result register. */
3653 lra_assert (REG_P (result));
3654 emit_insn (gen_move_insn (result, real_in));
3655 }
3656
3657 /* We suppose that there are insns to add/sub with the constant
3658 increment permitted in {PRE/POST)_{DEC/INC/MODIFY}. At least the
3659 old reload worked with this assumption. If the assumption
3660 becomes wrong, we should use approach in function
3661 base_plus_disp_to_reg. */
3662 if (in == value)
3663 {
3664 /* See if we can directly increment INCLOC. */
3665 last = get_last_insn ();
3666 add_insn = emit_insn (plus_p
3667 ? gen_add2_insn (incloc, inc)
3668 : gen_sub2_insn (incloc, inc));
3669
3670 code = recog_memoized (add_insn);
3671 if (code >= 0)
3672 {
3673 if (! post && result != incloc)
3674 emit_insn (gen_move_insn (result, incloc));
3675 return result;
3676 }
3677 delete_insns_since (last);
3678 }
3679
3680 /* If couldn't do the increment directly, must increment in RESULT.
3681 The way we do this depends on whether this is pre- or
3682 post-increment. For pre-increment, copy INCLOC to the reload
3683 register, increment it there, then save back. */
3684 if (! post)
3685 {
3686 if (real_in != result)
3687 emit_insn (gen_move_insn (result, real_in));
3688 if (plus_p)
3689 emit_insn (gen_add2_insn (result, inc));
3690 else
3691 emit_insn (gen_sub2_insn (result, inc));
3692 if (result != incloc)
3693 emit_insn (gen_move_insn (incloc, result));
3694 }
3695 else
3696 {
3697 /* Post-increment.
3698
3699 Because this might be a jump insn or a compare, and because
3700 RESULT may not be available after the insn in an input
3701 reload, we must do the incrementing before the insn being
3702 reloaded for.
3703
3704 We have already copied IN to RESULT. Increment the copy in
3705 RESULT, save that back, then decrement RESULT so it has
3706 the original value. */
3707 if (plus_p)
3708 emit_insn (gen_add2_insn (result, inc));
3709 else
3710 emit_insn (gen_sub2_insn (result, inc));
3711 emit_insn (gen_move_insn (incloc, result));
3712 /* Restore non-modified value for the result. We prefer this
3713 way because it does not require an additional hard
3714 register. */
3715 if (plus_p)
3716 {
a4686d0a 3717 poly_int64 offset;
3718 if (poly_int_rtx_p (inc, &offset))
d11aedc7 3719 emit_insn (gen_add2_insn (result,
a4686d0a 3720 gen_int_mode (-offset,
d11aedc7 3721 GET_MODE (result))));
c6a6cdaa 3722 else
3723 emit_insn (gen_sub2_insn (result, inc));
3724 }
3725 else
3726 emit_insn (gen_add2_insn (result, inc));
3727 }
3728 return result;
3729}
3730
ea99c7a1 3731/* Return true if the current move insn does not need processing as we
3732 already know that it satisfies its constraints. */
3733static bool
3734simple_move_p (void)
3735{
3736 rtx dest, src;
3737 enum reg_class dclass, sclass;
3738
3739 lra_assert (curr_insn_set != NULL_RTX);
3740 dest = SET_DEST (curr_insn_set);
3741 src = SET_SRC (curr_insn_set);
532322d3 3742
3743 /* If the instruction has multiple sets we need to process it even if it
3744 is single_set. This can happen if one or more of the SETs are dead.
3745 See PR73650. */
3746 if (multiple_sets (curr_insn))
3747 return false;
3748
ea99c7a1 3749 return ((dclass = get_op_class (dest)) != NO_REGS
3750 && (sclass = get_op_class (src)) != NO_REGS
3751 /* The backend guarantees that register moves of cost 2
3752 never need reloads. */
06d288a6 3753 && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
ea99c7a1 3754 }
3755
c6a6cdaa 3756/* Swap operands NOP and NOP + 1. */
3757static inline void
3758swap_operands (int nop)
3759{
dfcf26a5 3760 std::swap (curr_operand_mode[nop], curr_operand_mode[nop + 1]);
3761 std::swap (original_subreg_reg_mode[nop], original_subreg_reg_mode[nop + 1]);
3762 std::swap (*curr_id->operand_loc[nop], *curr_id->operand_loc[nop + 1]);
003000a4 3763 std::swap (equiv_substition_p[nop], equiv_substition_p[nop + 1]);
c6a6cdaa 3764 /* Swap the duplicates too. */
3765 lra_update_dup (curr_id, nop);
3766 lra_update_dup (curr_id, nop + 1);
3767}
3768
3769/* Main entry point of the constraint code: search the body of the
3770 current insn to choose the best alternative. It is mimicking insn
3771 alternative cost calculation model of former reload pass. That is
3772 because machine descriptions were written to use this model. This
3773 model can be changed in future. Make commutative operand exchange
3774 if it is chosen.
3775
497ba60f 3776 if CHECK_ONLY_P is false, do RTL changes to satisfy the
3777 constraints. Return true if any change happened during function
3778 call.
3779
3780 If CHECK_ONLY_P is true then don't do any transformation. Just
3781 check that the insn satisfies all constraints. If the insn does
3782 not satisfy any constraint, return true. */
c6a6cdaa 3783static bool
497ba60f 3784curr_insn_transform (bool check_only_p)
c6a6cdaa 3785{
3786 int i, j, k;
3787 int n_operands;
3788 int n_alternatives;
dd083a02 3789 int n_outputs;
c6a6cdaa 3790 int commutative;
3791 signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
aa3ce8ba 3792 signed char match_inputs[MAX_RECOG_OPERANDS + 1];
dd083a02 3793 signed char outputs[MAX_RECOG_OPERANDS + 1];
7f836b57 3794 rtx_insn *before, *after;
c6a6cdaa 3795 bool alt_p = false;
3796 /* Flag that the insn has been changed through a transformation. */
3797 bool change_p;
3798 bool sec_mem_p;
c6a6cdaa 3799 bool use_sec_mem_p;
c6a6cdaa 3800 int max_regno_before;
3801 int reused_alternative_num;
3802
ea99c7a1 3803 curr_insn_set = single_set (curr_insn);
3804 if (curr_insn_set != NULL_RTX && simple_move_p ())
71d47a14 3805 {
3806 /* We assume that the corresponding insn alternative has no
3807 earlier clobbers. If it is not the case, don't define move
3808 cost equal to 2 for the corresponding register classes. */
3809 lra_set_used_insn_alternative (curr_insn, LRA_NON_CLOBBERED_ALT);
3810 return false;
3811 }
ea99c7a1 3812
c6a6cdaa 3813 no_input_reloads_p = no_output_reloads_p = false;
3814 goal_alt_number = -1;
ea99c7a1 3815 change_p = sec_mem_p = false;
c6a6cdaa 3816 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output
3817 reloads; neither are insns that SET cc0. Insns that use CC0 are
3818 not allowed to have any input reloads. */
3819 if (JUMP_P (curr_insn) || CALL_P (curr_insn))
3820 no_output_reloads_p = true;
3821
ff900b8e 3822 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (curr_insn)))
c6a6cdaa 3823 no_input_reloads_p = true;
ff900b8e 3824 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (curr_insn)))
c6a6cdaa 3825 no_output_reloads_p = true;
c6a6cdaa 3826
3827 n_operands = curr_static_id->n_operands;
3828 n_alternatives = curr_static_id->n_alternatives;
3829
3830 /* Just return "no reloads" if insn has no operands with
3831 constraints. */
3832 if (n_operands == 0 || n_alternatives == 0)
3833 return false;
3834
3835 max_regno_before = max_reg_num ();
3836
3837 for (i = 0; i < n_operands; i++)
3838 {
3839 goal_alt_matched[i][0] = -1;
3840 goal_alt_matches[i] = -1;
3841 }
3842
3843 commutative = curr_static_id->commutative;
3844
3845 /* Now see what we need for pseudos that didn't get hard regs or got
3846 the wrong kind of hard reg. For this, we must consider all the
3847 operands together against the register constraints. */
3848
4b3aba76 3849 best_losers = best_overall = INT_MAX;
273c330a 3850 best_reload_sum = 0;
c6a6cdaa 3851
3852 curr_swapped = false;
3853 goal_alt_swapped = false;
3854
497ba60f 3855 if (! check_only_p)
3856 /* Make equivalence substitution and memory subreg elimination
3857 before address processing because an address legitimacy can
3858 depend on memory mode. */
3859 for (i = 0; i < n_operands; i++)
3860 {
af121a86 3861 rtx op, subst, old;
497ba60f 3862 bool op_change_p = false;
af121a86 3863
3864 if (curr_static_id->operand[i].is_operator)
3865 continue;
497ba60f 3866
af121a86 3867 old = op = *curr_id->operand_loc[i];
497ba60f 3868 if (GET_CODE (old) == SUBREG)
3869 old = SUBREG_REG (old);
3870 subst = get_equiv_with_elimination (old, curr_insn);
1aae95ec 3871 original_subreg_reg_mode[i] = VOIDmode;
003000a4 3872 equiv_substition_p[i] = false;
497ba60f 3873 if (subst != old)
3874 {
003000a4 3875 equiv_substition_p[i] = true;
497ba60f 3876 subst = copy_rtx (subst);
3877 lra_assert (REG_P (old));
1aae95ec 3878 if (GET_CODE (op) != SUBREG)
497ba60f 3879 *curr_id->operand_loc[i] = subst;
1aae95ec 3880 else
3881 {
3882 SUBREG_REG (op) = subst;
3883 if (GET_MODE (subst) == VOIDmode)
3884 original_subreg_reg_mode[i] = GET_MODE (old);
3885 }
497ba60f 3886 if (lra_dump_file != NULL)
3887 {
3888 fprintf (lra_dump_file,
3889 "Changing pseudo %d in operand %i of insn %u on equiv ",
3890 REGNO (old), i, INSN_UID (curr_insn));
3891 dump_value_slim (lra_dump_file, subst, 1);
1aae95ec 3892 fprintf (lra_dump_file, "\n");
497ba60f 3893 }
3894 op_change_p = change_p = true;
3895 }
3896 if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
3897 {
3898 change_p = true;
3899 lra_update_dup (curr_id, i);
3900 }
3901 }
c6a6cdaa 3902
3903 /* Reload address registers and displacements. We do it before
3904 finding an alternative because of memory constraints. */
7f836b57 3905 before = after = NULL;
c6a6cdaa 3906 for (i = 0; i < n_operands; i++)
3907 if (! curr_static_id->operand[i].is_operator
497ba60f 3908 && process_address (i, check_only_p, &before, &after))
c6a6cdaa 3909 {
497ba60f 3910 if (check_only_p)
3911 return true;
c6a6cdaa 3912 change_p = true;
3913 lra_update_dup (curr_id, i);
3914 }
dcd5393f 3915
c6a6cdaa 3916 if (change_p)
3917 /* If we've changed the instruction then any alternative that
3918 we chose previously may no longer be valid. */
71d47a14 3919 lra_set_used_insn_alternative (curr_insn, LRA_UNKNOWN_ALT);
c6a6cdaa 3920
497ba60f 3921 if (! check_only_p && curr_insn_set != NULL_RTX
ea99c7a1 3922 && check_and_process_move (&change_p, &sec_mem_p))
3923 return change_p;
3924
c6a6cdaa 3925 try_swapped:
3926
71d47a14 3927 reused_alternative_num = check_only_p ? LRA_UNKNOWN_ALT : curr_id->used_insn_alternative;
c6a6cdaa 3928 if (lra_dump_file != NULL && reused_alternative_num >= 0)
3929 fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
3930 reused_alternative_num, INSN_UID (curr_insn));
3931
3932 if (process_alt_operands (reused_alternative_num))
3933 alt_p = true;
3934
497ba60f 3935 if (check_only_p)
3936 return ! alt_p || best_losers != 0;
3937
c6a6cdaa 3938 /* If insn is commutative (it's safe to exchange a certain pair of
3939 operands) then we need to try each alternative twice, the second
3940 time matching those two operands as if we had exchanged them. To
3941 do this, really exchange them in operands.
3942
3943 If we have just tried the alternatives the second time, return
3944 operands to normal and drop through. */
3945
3946 if (reused_alternative_num < 0 && commutative >= 0)
3947 {
3948 curr_swapped = !curr_swapped;
3949 if (curr_swapped)
3950 {
3951 swap_operands (commutative);
3952 goto try_swapped;
3953 }
3954 else
3955 swap_operands (commutative);
3956 }
3957
c6a6cdaa 3958 if (! alt_p && ! sec_mem_p)
3959 {
3960 /* No alternative works with reloads?? */
3961 if (INSN_CODE (curr_insn) >= 0)
3962 fatal_insn ("unable to generate reloads for:", curr_insn);
3963 error_for_asm (curr_insn,
3964 "inconsistent operand constraints in an %<asm%>");
3923c63e 3965 lra_asm_error_p = true;
127e79a7 3966 /* Avoid further trouble with this insn. Don't generate use
3967 pattern here as we could use the insn SP offset. */
3968 lra_set_insn_deleted (curr_insn);
c6a6cdaa 3969 return true;
3970 }
3971
3972 /* If the best alternative is with operands 1 and 2 swapped, swap
3973 them. Update the operand numbers of any reloads already
3974 pushed. */
3975
3976 if (goal_alt_swapped)
3977 {
3978 if (lra_dump_file != NULL)
3979 fprintf (lra_dump_file, " Commutative operand exchange in insn %u\n",
3980 INSN_UID (curr_insn));
3981
3982 /* Swap the duplicates too. */
3983 swap_operands (commutative);
3984 change_p = true;
3985 }
3986
c836e75b 3987 /* Some targets' TARGET_SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
c6a6cdaa 3988 too conservatively. So we use the secondary memory only if there
3989 is no any alternative without reloads. */
3990 use_sec_mem_p = false;
3991 if (! alt_p)
3992 use_sec_mem_p = true;
3993 else if (sec_mem_p)
3994 {
3995 for (i = 0; i < n_operands; i++)
3996 if (! goal_alt_win[i] && ! goal_alt_match_win[i])
3997 break;
3998 use_sec_mem_p = i < n_operands;
3999 }
4000
4001 if (use_sec_mem_p)
4002 {
ec4ec95a 4003 int in = -1, out = -1;
e0420317 4004 rtx new_reg, src, dest, rld;
3754d046 4005 machine_mode sec_mode, rld_mode;
c6a6cdaa 4006
ec4ec95a 4007 lra_assert (curr_insn_set != NULL_RTX && sec_mem_p);
4008 dest = SET_DEST (curr_insn_set);
4009 src = SET_SRC (curr_insn_set);
4010 for (i = 0; i < n_operands; i++)
4011 if (*curr_id->operand_loc[i] == dest)
4012 out = i;
4013 else if (*curr_id->operand_loc[i] == src)
4014 in = i;
4015 for (i = 0; i < curr_static_id->n_dups; i++)
4016 if (out < 0 && *curr_id->dup_loc[i] == dest)
4017 out = curr_static_id->dup_num[i];
4018 else if (in < 0 && *curr_id->dup_loc[i] == src)
4019 in = curr_static_id->dup_num[i];
4020 lra_assert (out >= 0 && in >= 0
4021 && curr_static_id->operand[out].type == OP_OUT
4022 && curr_static_id->operand[in].type == OP_IN);
974534ab 4023 rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
c47331e3 4024 rld_mode = GET_MODE (rld);
1041f930 4025 sec_mode = targetm.secondary_memory_needed_mode (rld_mode);
c6a6cdaa 4026 new_reg = lra_create_new_reg (sec_mode, NULL_RTX,
4027 NO_REGS, "secondary");
4028 /* If the mode is changed, it should be wider. */
974534ab 4029 lra_assert (!partial_subreg_p (sec_mode, rld_mode));
e0420317 4030 if (sec_mode != rld_mode)
4031 {
4032 /* If the target says specifically to use another mode for
f4d3c071 4033 secondary memory moves we cannot reuse the original
e0420317 4034 insn. */
cc0dc61b 4035 after = emit_spill_move (false, new_reg, dest);
7f836b57 4036 lra_process_new_insns (curr_insn, NULL, after,
cc0dc61b 4037 "Inserting the sec. move");
4038 /* We may have non null BEFORE here (e.g. after address
4039 processing. */
4040 push_to_sequence (before);
4041 before = emit_spill_move (true, new_reg, src);
4042 emit_insn (before);
4043 before = get_insns ();
4044 end_sequence ();
7f836b57 4045 lra_process_new_insns (curr_insn, before, NULL, "Changing on");
cc0dc61b 4046 lra_set_insn_deleted (curr_insn);
4047 }
e0420317 4048 else if (dest == rld)
cc0dc61b 4049 {
ec4ec95a 4050 *curr_id->operand_loc[out] = new_reg;
4051 lra_update_dup (curr_id, out);
c47331e3 4052 after = emit_spill_move (false, new_reg, dest);
7f836b57 4053 lra_process_new_insns (curr_insn, NULL, after,
c47331e3 4054 "Inserting the sec. move");
4055 }
4056 else
4057 {
ec4ec95a 4058 *curr_id->operand_loc[in] = new_reg;
4059 lra_update_dup (curr_id, in);
cc0dc61b 4060 /* See comments above. */
4061 push_to_sequence (before);
c47331e3 4062 before = emit_spill_move (true, new_reg, src);
cc0dc61b 4063 emit_insn (before);
4064 before = get_insns ();
4065 end_sequence ();
7f836b57 4066 lra_process_new_insns (curr_insn, before, NULL,
c47331e3 4067 "Inserting the sec. move");
4068 }
4069 lra_update_insn_regno_info (curr_insn);
c6a6cdaa 4070 return true;
4071 }
c6a6cdaa 4072
4073 lra_assert (goal_alt_number >= 0);
4074 lra_set_used_insn_alternative (curr_insn, goal_alt_number);
4075
4076 if (lra_dump_file != NULL)
4077 {
4078 const char *p;
4079
4080 fprintf (lra_dump_file, " Choosing alt %d in insn %u:",
4081 goal_alt_number, INSN_UID (curr_insn));
4082 for (i = 0; i < n_operands; i++)
4083 {
4084 p = (curr_static_id->operand_alternative
4085 [goal_alt_number * n_operands + i].constraint);
4086 if (*p == '\0')
4087 continue;
4088 fprintf (lra_dump_file, " (%d) ", i);
4089 for (; *p != '\0' && *p != ',' && *p != '#'; p++)
4090 fputc (*p, lra_dump_file);
4091 }
273c330a 4092 if (INSN_CODE (curr_insn) >= 0
4093 && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
4094 fprintf (lra_dump_file, " {%s}", p);
a4686d0a 4095 if (maybe_ne (curr_id->sp_offset, 0))
4096 {
4097 fprintf (lra_dump_file, " (sp_off=");
4098 print_dec (curr_id->sp_offset, lra_dump_file);
4099 fprintf (lra_dump_file, ")");
4100 }
4101 fprintf (lra_dump_file, "\n");
c6a6cdaa 4102 }
4103
4104 /* Right now, for any pair of operands I and J that are required to
4105 match, with J < I, goal_alt_matches[I] is J. Add I to
4106 goal_alt_matched[J]. */
1a8f8886 4107
c6a6cdaa 4108 for (i = 0; i < n_operands; i++)
4109 if ((j = goal_alt_matches[i]) >= 0)
4110 {
4111 for (k = 0; goal_alt_matched[j][k] >= 0; k++)
4112 ;
4113 /* We allow matching one output operand and several input
4114 operands. */
4115 lra_assert (k == 0
4116 || (curr_static_id->operand[j].type == OP_OUT
4117 && curr_static_id->operand[i].type == OP_IN
4118 && (curr_static_id->operand
4119 [goal_alt_matched[j][0]].type == OP_IN)));
4120 goal_alt_matched[j][k] = i;
4121 goal_alt_matched[j][k + 1] = -1;
4122 }
1a8f8886 4123
c6a6cdaa 4124 for (i = 0; i < n_operands; i++)
4125 goal_alt_win[i] |= goal_alt_match_win[i];
1a8f8886 4126
c6a6cdaa 4127 /* Any constants that aren't allowed and can't be reloaded into
4128 registers are here changed into memory references. */
4129 for (i = 0; i < n_operands; i++)
4130 if (goal_alt_win[i])
4131 {
4132 int regno;
4133 enum reg_class new_class;
4134 rtx reg = *curr_id->operand_loc[i];
4135
4136 if (GET_CODE (reg) == SUBREG)
4137 reg = SUBREG_REG (reg);
1a8f8886 4138
c6a6cdaa 4139 if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
4140 {
4141 bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
4142
4143 if (new_class != NO_REGS && get_reg_class (regno) != new_class)
4144 {
4145 lra_assert (ok_p);
7619e612 4146 lra_change_class (regno, new_class, " Change to", true);
c6a6cdaa 4147 }
4148 }
4149 }
4150 else
4151 {
4152 const char *constraint;
4153 char c;
4154 rtx op = *curr_id->operand_loc[i];
4155 rtx subreg = NULL_RTX;
3754d046 4156 machine_mode mode = curr_operand_mode[i];
1a8f8886 4157
c6a6cdaa 4158 if (GET_CODE (op) == SUBREG)
4159 {
4160 subreg = op;
4161 op = SUBREG_REG (op);
4162 mode = GET_MODE (op);
4163 }
1a8f8886 4164
c6a6cdaa 4165 if (CONST_POOL_OK_P (mode, op)
4166 && ((targetm.preferred_reload_class
4167 (op, (enum reg_class) goal_alt[i]) == NO_REGS)
4168 || no_input_reloads_p))
4169 {
4170 rtx tem = force_const_mem (mode, op);
1a8f8886 4171
c6a6cdaa 4172 change_p = true;
4173 if (subreg != NULL_RTX)
4174 tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
1a8f8886 4175
c6a6cdaa 4176 *curr_id->operand_loc[i] = tem;
4177 lra_update_dup (curr_id, i);
497ba60f 4178 process_address (i, false, &before, &after);
1a8f8886 4179
c6a6cdaa 4180 /* If the alternative accepts constant pool refs directly
4181 there will be no reload needed at all. */
4182 if (subreg != NULL_RTX)
4183 continue;
4184 /* Skip alternatives before the one requested. */
4185 constraint = (curr_static_id->operand_alternative
4186 [goal_alt_number * n_operands + i].constraint);
4187 for (;
4188 (c = *constraint) && c != ',' && c != '#';
4189 constraint += CONSTRAINT_LEN (c, constraint))
4190 {
79bc09fb 4191 enum constraint_num cn = lookup_constraint (constraint);
6b3b345a 4192 if ((insn_extra_memory_constraint (cn)
4193 || insn_extra_special_memory_constraint (cn))
79bc09fb 4194 && satisfies_memory_constraint_p (tem, cn))
c6a6cdaa 4195 break;
c6a6cdaa 4196 }
4197 if (c == '\0' || c == ',' || c == '#')
4198 continue;
1a8f8886 4199
c6a6cdaa 4200 goal_alt_win[i] = true;
4201 }
4202 }
1a8f8886 4203
dd083a02 4204 n_outputs = 0;
4205 outputs[0] = -1;
c6a6cdaa 4206 for (i = 0; i < n_operands; i++)
4207 {
1f3a048a 4208 int regno;
4209 bool optional_p = false;
c6a6cdaa 4210 rtx old, new_reg;
4211 rtx op = *curr_id->operand_loc[i];
4212
4213 if (goal_alt_win[i])
4214 {
4215 if (goal_alt[i] == NO_REGS
4216 && REG_P (op)
4217 /* When we assign NO_REGS it means that we will not
4218 assign a hard register to the scratch pseudo by
4219 assigment pass and the scratch pseudo will be
4220 spilled. Spilled scratch pseudos are transformed
4221 back to scratches at the LRA end. */
c2b94990 4222 && lra_former_scratch_operand_p (curr_insn, i)
4223 && lra_former_scratch_p (REGNO (op)))
f7c98bb1 4224 {
4225 int regno = REGNO (op);
7619e612 4226 lra_change_class (regno, NO_REGS, " Change to", true);
f7c98bb1 4227 if (lra_get_regno_hard_regno (regno) >= 0)
4228 /* We don't have to mark all insn affected by the
4229 spilled pseudo as there is only one such insn, the
4230 current one. */
4231 reg_renumber[regno] = -1;
c2b94990 4232 lra_assert (bitmap_single_bit_set_p
4233 (&lra_reg_info[REGNO (op)].insn_bitmap));
f7c98bb1 4234 }
1f3a048a 4235 /* We can do an optional reload. If the pseudo got a hard
4236 reg, we might improve the code through inheritance. If
4237 it does not get a hard register we coalesce memory/memory
4238 moves later. Ignore move insns to avoid cycling. */
267200f3 4239 if (! lra_simple_p
1f3a048a 4240 && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
4241 && goal_alt[i] != NO_REGS && REG_P (op)
4242 && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
7619e612 4243 && regno < new_regno_start
267200f3 4244 && ! lra_former_scratch_p (regno)
1f3a048a 4245 && reg_renumber[regno] < 0
25cd984c 4246 /* Check that the optional reload pseudo will be able to
4247 hold given mode value. */
4248 && ! (prohibited_class_reg_set_mode_p
4249 (goal_alt[i], reg_class_contents[goal_alt[i]],
4250 PSEUDO_REGNO_MODE (regno)))
1f3a048a 4251 && (curr_insn_set == NULL_RTX
267200f3 4252 || !((REG_P (SET_SRC (curr_insn_set))
4253 || MEM_P (SET_SRC (curr_insn_set))
4254 || GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
4255 && (REG_P (SET_DEST (curr_insn_set))
4256 || MEM_P (SET_DEST (curr_insn_set))
4257 || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
1f3a048a 4258 optional_p = true;
74fde7b5 4259 else if (goal_alt_matched[i][0] != -1
4260 && curr_static_id->operand[i].type == OP_OUT
4261 && (curr_static_id->operand_alternative
4262 [goal_alt_number * n_operands + i].earlyclobber))
4263 {
4264 /* Generate reloads for output and matched inputs. This
4265 is the easiest way to avoid creation of non-existing
4266 conflicts in lra-lives.c. */
4267 match_reload (i, goal_alt_matched[i], outputs, goal_alt[i], &before,
4268 &after, TRUE);
4269 outputs[n_outputs++] = i;
4270 outputs[n_outputs] = -1;
4271 continue;
4272 }
1f3a048a 4273 else
4274 continue;
c6a6cdaa 4275 }
1a8f8886 4276
c6a6cdaa 4277 /* Operands that match previous ones have already been handled. */
4278 if (goal_alt_matches[i] >= 0)
4279 continue;
4280
4281 /* We should not have an operand with a non-offsettable address
4282 appearing where an offsettable address will do. It also may
4283 be a case when the address should be special in other words
4284 not a general one (e.g. it needs no index reg). */
4285 if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
4286 {
4287 enum reg_class rclass;
4288 rtx *loc = &XEXP (op, 0);
4289 enum rtx_code code = GET_CODE (*loc);
4290
4291 push_to_sequence (before);
4292 rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
4293 MEM, SCRATCH);
4294 if (GET_RTX_CLASS (code) == RTX_AUTOINC)
4295 new_reg = emit_inc (rclass, *loc, *loc,
4296 /* This value does not matter for MODIFY. */
4297 GET_MODE_SIZE (GET_MODE (op)));
6cadc8f7 4298 else if (get_reload_reg (OP_IN, Pmode, *loc, rclass, FALSE,
c6a6cdaa 4299 "offsetable address", &new_reg))
27137b2a 4300 {
4301 rtx addr = *loc;
4302 enum rtx_code code = GET_CODE (addr);
4303
4304 if (code == AND && CONST_INT_P (XEXP (addr, 1)))
4305 /* (and ... (const_int -X)) is used to align to X bytes. */
4306 addr = XEXP (*loc, 0);
4307 lra_emit_move (new_reg, addr);
4308 if (addr != *loc)
4309 emit_move_insn (new_reg, gen_rtx_AND (GET_MODE (new_reg), new_reg, XEXP (*loc, 1)));
4310 }
c6a6cdaa 4311 before = get_insns ();
4312 end_sequence ();
4313 *loc = new_reg;
4314 lra_update_dup (curr_id, i);
4315 }
4316 else if (goal_alt_matched[i][0] == -1)
4317 {
3754d046 4318 machine_mode mode;
c6a6cdaa 4319 rtx reg, *loc;
9edf7ea8 4320 int hard_regno;
c6a6cdaa 4321 enum op_type type = curr_static_id->operand[i].type;
4322
4323 loc = curr_id->operand_loc[i];
4324 mode = curr_operand_mode[i];
4325 if (GET_CODE (*loc) == SUBREG)
4326 {
4327 reg = SUBREG_REG (*loc);
9edf7ea8 4328 poly_int64 byte = SUBREG_BYTE (*loc);
c6a6cdaa 4329 if (REG_P (reg)
5cbb8e6b 4330 /* Strict_low_part requires reloading the register and not
4331 just the subreg. Likewise for a strict subreg no wider
4332 than a word for WORD_REGISTER_OPERATIONS targets. */
c6a6cdaa 4333 && (curr_static_id->operand[i].strict_low
d0257d43 4334 || (!paradoxical_subreg_p (mode, GET_MODE (reg))
c6a6cdaa 4335 && (hard_regno
4336 = get_try_hard_regno (REGNO (reg))) >= 0
4337 && (simplify_subreg_regno
4338 (hard_regno,
4339 GET_MODE (reg), byte, mode) < 0)
4340 && (goal_alt[i] == NO_REGS
4341 || (simplify_subreg_regno
4342 (ira_class_hard_regs[goal_alt[i]][0],
5cbb8e6b 4343 GET_MODE (reg), byte, mode) >= 0)))
3cedfe34 4344 || (partial_subreg_p (mode, GET_MODE (reg))
52acb7ae 4345 && known_le (GET_MODE_SIZE (GET_MODE (reg)),
4346 UNITS_PER_WORD)
5cbb8e6b 4347 && WORD_REGISTER_OPERATIONS)))
c6a6cdaa 4348 {
55996ba4 4349 /* An OP_INOUT is required when reloading a subreg of a
4350 mode wider than a word to ensure that data beyond the
4351 word being reloaded is preserved. Also automatically
4352 ensure that strict_low_part reloads are made into
4353 OP_INOUT which should already be true from the backend
4354 constraints. */
4355 if (type == OP_OUT
4356 && (curr_static_id->operand[i].strict_low
9f2c0e68 4357 || read_modify_subreg_p (*loc)))
90f51e67 4358 type = OP_INOUT;
c6a6cdaa 4359 loc = &SUBREG_REG (*loc);
4360 mode = GET_MODE (*loc);
4361 }
4362 }
4363 old = *loc;
6cadc8f7 4364 if (get_reload_reg (type, mode, old, goal_alt[i],
4365 loc != curr_id->operand_loc[i], "", &new_reg)
c6a6cdaa 4366 && type != OP_OUT)
4367 {
4368 push_to_sequence (before);
4369 lra_emit_move (new_reg, old);
4370 before = get_insns ();
4371 end_sequence ();
4372 }
4373 *loc = new_reg;
4374 if (type != OP_IN
4375 && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX)
4376 {
4377 start_sequence ();
4378 lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
4379 emit_insn (after);
4380 after = get_insns ();
4381 end_sequence ();
4382 *loc = new_reg;
4383 }
4384 for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
4385 if (goal_alt_dont_inherit_ops[j] == i)
4386 {
4387 lra_set_regno_unique_value (REGNO (new_reg));
4388 break;
4389 }
4390 lra_update_dup (curr_id, i);
4391 }
4392 else if (curr_static_id->operand[i].type == OP_IN
4393 && (curr_static_id->operand[goal_alt_matched[i][0]].type
b8b2688e 4394 == OP_OUT
4395 || (curr_static_id->operand[goal_alt_matched[i][0]].type
4396 == OP_INOUT
4397 && (operands_match_p
4398 (*curr_id->operand_loc[i],
4399 *curr_id->operand_loc[goal_alt_matched[i][0]],
4400 -1)))))
c6a6cdaa 4401 {
aa3ce8ba 4402 /* generate reloads for input and matched outputs. */
4403 match_inputs[0] = i;
4404 match_inputs[1] = -1;
dd083a02 4405 match_reload (goal_alt_matched[i][0], match_inputs, outputs,
72460f4d 4406 goal_alt[i], &before, &after,
4407 curr_static_id->operand_alternative
4408 [goal_alt_number * n_operands + goal_alt_matched[i][0]]
4409 .earlyclobber);
c6a6cdaa 4410 }
b8b2688e 4411 else if ((curr_static_id->operand[i].type == OP_OUT
4412 || (curr_static_id->operand[i].type == OP_INOUT
4413 && (operands_match_p
4414 (*curr_id->operand_loc[i],
4415 *curr_id->operand_loc[goal_alt_matched[i][0]],
4416 -1))))
c6a6cdaa 4417 && (curr_static_id->operand[goal_alt_matched[i][0]].type
b8b2688e 4418 == OP_IN))
aa3ce8ba 4419 /* Generate reloads for output and matched inputs. */
dd083a02 4420 match_reload (i, goal_alt_matched[i], outputs, goal_alt[i], &before,
4421 &after, curr_static_id->operand_alternative
4422 [goal_alt_number * n_operands + i].earlyclobber);
aa3ce8ba 4423 else if (curr_static_id->operand[i].type == OP_IN
4424 && (curr_static_id->operand[goal_alt_matched[i][0]].type
4425 == OP_IN))
4426 {
4427 /* Generate reloads for matched inputs. */
4428 match_inputs[0] = i;
4429 for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
4430 match_inputs[j + 1] = k;
4431 match_inputs[j + 1] = -1;
dd083a02 4432 match_reload (-1, match_inputs, outputs, goal_alt[i], &before,
4433 &after, false);
aa3ce8ba 4434 }
c6a6cdaa 4435 else
4436 /* We must generate code in any case when function
4437 process_alt_operands decides that it is possible. */
4438 gcc_unreachable ();
dd083a02 4439
4440 /* Memorise processed outputs so that output remaining to be processed
4441 can avoid using the same register value (see match_reload). */
4442 if (curr_static_id->operand[i].type == OP_OUT)
4443 {
4444 outputs[n_outputs++] = i;
4445 outputs[n_outputs] = -1;
4446 }
4447
1f3a048a 4448 if (optional_p)
4449 {
ab4ea053 4450 rtx reg = op;
4451
4452 lra_assert (REG_P (reg));
4453 regno = REGNO (reg);
1f3a048a 4454 op = *curr_id->operand_loc[i]; /* Substitution. */
4455 if (GET_CODE (op) == SUBREG)
4456 op = SUBREG_REG (op);
4457 gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
4458 bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
ab4ea053 4459 lra_reg_info[REGNO (op)].restore_rtx = reg;
1f3a048a 4460 if (lra_dump_file != NULL)
4461 fprintf (lra_dump_file,
4462 " Making reload reg %d for reg %d optional\n",
4463 REGNO (op), regno);
4464 }
c6a6cdaa 4465 }
4466 if (before != NULL_RTX || after != NULL_RTX
4467 || max_regno_before != max_reg_num ())
4468 change_p = true;
4469 if (change_p)
4470 {
4471 lra_update_operator_dups (curr_id);
4472 /* Something changes -- process the insn. */
4473 lra_update_insn_regno_info (curr_insn);
4474 }
4475 lra_process_new_insns (curr_insn, before, after, "Inserting insn reload");
4476 return change_p;
4477}
4478
497ba60f 4479/* Return true if INSN satisfies all constraints. In other words, no
4480 reload insns are needed. */
4481bool
4482lra_constrain_insn (rtx_insn *insn)
4483{
4484 int saved_new_regno_start = new_regno_start;
4485 int saved_new_insn_uid_start = new_insn_uid_start;
4486 bool change_p;
4487
4488 curr_insn = insn;
4489 curr_id = lra_get_insn_recog_data (curr_insn);
4490 curr_static_id = curr_id->insn_static_data;
4491 new_insn_uid_start = get_max_uid ();
4492 new_regno_start = max_reg_num ();
4493 change_p = curr_insn_transform (true);
4494 new_regno_start = saved_new_regno_start;
4495 new_insn_uid_start = saved_new_insn_uid_start;
4496 return ! change_p;
4497}
4498
c6a6cdaa 4499/* Return true if X is in LIST. */
4500static bool
4501in_list_p (rtx x, rtx list)
4502{
4503 for (; list != NULL_RTX; list = XEXP (list, 1))
4504 if (XEXP (list, 0) == x)
4505 return true;
4506 return false;
4507}
4508
4509/* Return true if X contains an allocatable hard register (if
4510 HARD_REG_P) or a (spilled if SPILLED_P) pseudo. */
4511static bool
4512contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
4513{
4514 int i, j;
4515 const char *fmt;
4516 enum rtx_code code;
4517
4518 code = GET_CODE (x);
4519 if (REG_P (x))
4520 {
4521 int regno = REGNO (x);
4522 HARD_REG_SET alloc_regs;
4523
4524 if (hard_reg_p)
4525 {
4526 if (regno >= FIRST_PSEUDO_REGISTER)
4527 regno = lra_get_regno_hard_regno (regno);
4528 if (regno < 0)
4529 return false;
4530 COMPL_HARD_REG_SET (alloc_regs, lra_no_alloc_regs);
4531 return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
4532 }
4533 else
4534 {
4535 if (regno < FIRST_PSEUDO_REGISTER)
4536 return false;
4537 if (! spilled_p)
4538 return true;
4539 return lra_get_regno_hard_regno (regno) < 0;
4540 }
4541 }
4542 fmt = GET_RTX_FORMAT (code);
4543 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4544 {
4545 if (fmt[i] == 'e')
4546 {
4547 if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
4548 return true;
4549 }
4550 else if (fmt[i] == 'E')
4551 {
4552 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4553 if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
4554 return true;
4555 }
4556 }
4557 return false;
4558}
4559
d596f8db 4560/* Process all regs in location *LOC and change them on equivalent
4561 substitution. Return true if any change was done. */
c6a6cdaa 4562static bool
d596f8db 4563loc_equivalence_change_p (rtx *loc)
c6a6cdaa 4564{
4565 rtx subst, reg, x = *loc;
4566 bool result = false;
4567 enum rtx_code code = GET_CODE (x);
4568 const char *fmt;
4569 int i, j;
4570
4571 if (code == SUBREG)
4572 {
4573 reg = SUBREG_REG (x);
3b3a5e5f 4574 if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
c6a6cdaa 4575 && GET_MODE (subst) == VOIDmode)
4576 {
4577 /* We cannot reload debug location. Simplify subreg here
4578 while we know the inner mode. */
4579 *loc = simplify_gen_subreg (GET_MODE (x), subst,
4580 GET_MODE (reg), SUBREG_BYTE (x));
4581 return true;
4582 }
4583 }
3b3a5e5f 4584 if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
c6a6cdaa 4585 {
4586 *loc = subst;
4587 return true;
4588 }
4589
4590 /* Scan all the operand sub-expressions. */
4591 fmt = GET_RTX_FORMAT (code);
4592 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4593 {
4594 if (fmt[i] == 'e')
d596f8db 4595 result = loc_equivalence_change_p (&XEXP (x, i)) || result;
c6a6cdaa 4596 else if (fmt[i] == 'E')
4597 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4598 result
d596f8db 4599 = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
c6a6cdaa 4600 }
4601 return result;
4602}
4603
136e5c8e 4604/* Similar to loc_equivalence_change_p, but for use as
61cd3e57 4605 simplify_replace_fn_rtx callback. DATA is insn for which the
4606 elimination is done. If it null we don't do the elimination. */
136e5c8e 4607static rtx
61cd3e57 4608loc_equivalence_callback (rtx loc, const_rtx, void *data)
136e5c8e 4609{
4610 if (!REG_P (loc))
4611 return NULL_RTX;
4612
61cd3e57 4613 rtx subst = (data == NULL
7f836b57 4614 ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
136e5c8e 4615 if (subst != loc)
4616 return subst;
4617
4618 return NULL_RTX;
4619}
4620
c6a6cdaa 4621/* Maximum number of generated reload insns per an insn. It is for
4622 preventing this pass cycling in a bug case. */
4623#define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
4624
4625/* The current iteration number of this LRA pass. */
4626int lra_constraint_iter;
4627
c6a6cdaa 4628/* True if we substituted equiv which needs checking register
4629 allocation correctness because the equivalent value contains
4630 allocatable hard registers or when we restore multi-register
4631 pseudo. */
4632bool lra_risky_transformations_p;
4633
4634/* Return true if REGNO is referenced in more than one block. */
4635static bool
4636multi_block_pseudo_p (int regno)
4637{
4638 basic_block bb = NULL;
4639 unsigned int uid;
4640 bitmap_iterator bi;
1a8f8886 4641
c6a6cdaa 4642 if (regno < FIRST_PSEUDO_REGISTER)
4643 return false;
1a8f8886 4644
c6a6cdaa 4645 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
4646 if (bb == NULL)
4647 bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
4648 else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
4649 return true;
4650 return false;
4651}
4652
7a438292 4653/* Return true if LIST contains a deleted insn. */
4654static bool
382f116f 4655contains_deleted_insn_p (rtx_insn_list *list)
7a438292 4656{
382f116f 4657 for (; list != NULL_RTX; list = list->next ())
4658 if (NOTE_P (list->insn ())
4659 && NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
7a438292 4660 return true;
4661 return false;
4662}
4663
c6a6cdaa 4664/* Return true if X contains a pseudo dying in INSN. */
4665static bool
c265d2aa 4666dead_pseudo_p (rtx x, rtx_insn *insn)
c6a6cdaa 4667{
4668 int i, j;
4669 const char *fmt;
4670 enum rtx_code code;
4671
4672 if (REG_P (x))
4673 return (insn != NULL_RTX
4674 && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
4675 code = GET_CODE (x);
4676 fmt = GET_RTX_FORMAT (code);
4677 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4678 {
4679 if (fmt[i] == 'e')
4680 {
4681 if (dead_pseudo_p (XEXP (x, i), insn))
4682 return true;
4683 }
4684 else if (fmt[i] == 'E')
4685 {
4686 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4687 if (dead_pseudo_p (XVECEXP (x, i, j), insn))
4688 return true;
4689 }
4690 }
4691 return false;
4692}
4693
4694/* Return true if INSN contains a dying pseudo in INSN right hand
4695 side. */
4696static bool
50fc2d35 4697insn_rhs_dead_pseudo_p (rtx_insn *insn)
c6a6cdaa 4698{
4699 rtx set = single_set (insn);
4700
4701 gcc_assert (set != NULL);
4702 return dead_pseudo_p (SET_SRC (set), insn);
4703}
4704
4705/* Return true if any init insn of REGNO contains a dying pseudo in
4706 insn right hand side. */
4707static bool
4708init_insn_rhs_dead_pseudo_p (int regno)
4709{
382f116f 4710 rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
c6a6cdaa 4711
4712 if (insns == NULL)
4713 return false;
382f116f 4714 for (; insns != NULL_RTX; insns = insns->next ())
4715 if (insn_rhs_dead_pseudo_p (insns->insn ()))
c6a6cdaa 4716 return true;
4717 return false;
4718}
4719
691cfda4 4720/* Return TRUE if REGNO has a reverse equivalence. The equivalence is
4721 reverse only if we have one init insn with given REGNO as a
4722 source. */
4723static bool
4724reverse_equiv_p (int regno)
4725{
382f116f 4726 rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
4727 rtx set;
691cfda4 4728
382f116f 4729 if (insns == NULL)
691cfda4 4730 return false;
382f116f 4731 if (! INSN_P (insns->insn ())
4732 || insns->next () != NULL)
691cfda4 4733 return false;
382f116f 4734 if ((set = single_set (insns->insn ())) == NULL_RTX)
691cfda4 4735 return false;
4736 return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
4737}
4738
4739/* Return TRUE if REGNO was reloaded in an equivalence init insn. We
4740 call this function only for non-reverse equivalence. */
4741static bool
4742contains_reloaded_insn_p (int regno)
4743{
4744 rtx set;
382f116f 4745 rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
691cfda4 4746
382f116f 4747 for (; list != NULL; list = list->next ())
4748 if ((set = single_set (list->insn ())) == NULL_RTX
691cfda4 4749 || ! REG_P (SET_DEST (set))
4750 || (int) REGNO (SET_DEST (set)) != regno)
4751 return true;
4752 return false;
4753}
4754
c6a6cdaa 4755/* Entry function of LRA constraint pass. Return true if the
4756 constraint pass did change the code. */
4757bool
4758lra_constraints (bool first_p)
4759{
4760 bool changed_p;
4761 int i, hard_regno, new_insns_num;
f7b7100e 4762 unsigned int min_len, new_min_len, uid;
4763 rtx set, x, reg, dest_reg;
c6a6cdaa 4764 basic_block last_bb;
f7b7100e 4765 bitmap_iterator bi;
c6a6cdaa 4766
4767 lra_constraint_iter++;
4768 if (lra_dump_file != NULL)
4769 fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
4770 lra_constraint_iter);
c6a6cdaa 4771 changed_p = false;
a9d8ab38 4772 if (pic_offset_table_rtx
4773 && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
4774 lra_risky_transformations_p = true;
4775 else
9628978f 4776 /* On the first iteration we should check IRA assignment
4777 correctness. In rare cases, the assignments can be wrong as
8ae81042 4778 early clobbers operands are ignored in IRA or usages of
4779 paradoxical sub-registers are not taken into account by
4780 IRA. */
9628978f 4781 lra_risky_transformations_p = first_p;
c6a6cdaa 4782 new_insn_uid_start = get_max_uid ();
4783 new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
3b3a5e5f 4784 /* Mark used hard regs for target stack size calulations. */
4785 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4786 if (lra_reg_info[i].nrefs != 0
4787 && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4788 {
4789 int j, nregs;
4790
92d2aec3 4791 nregs = hard_regno_nregs (hard_regno, lra_reg_info[i].biggest_mode);
3b3a5e5f 4792 for (j = 0; j < nregs; j++)
4793 df_set_regs_ever_live (hard_regno + j, true);
4794 }
4795 /* Do elimination before the equivalence processing as we can spill
4796 some pseudos during elimination. */
4797 lra_eliminate (false, first_p);
f6708c36 4798 auto_bitmap equiv_insn_bitmap (&reg_obstack);
c6a6cdaa 4799 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4800 if (lra_reg_info[i].nrefs != 0)
4801 {
4802 ira_reg_equiv[i].profitable_p = true;
f7b7100e 4803 reg = regno_reg_rtx[i];
3b3a5e5f 4804 if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
c6a6cdaa 4805 {
4806 bool pseudo_p = contains_reg_p (x, false, false);
c6a6cdaa 4807
f4d3c071 4808 /* After RTL transformation, we cannot guarantee that
7a438292 4809 pseudo in the substitution was not reloaded which might
4810 make equivalence invalid. For example, in reverse
4811 equiv of p0
4812
4813 p0 <- ...
4814 ...
4815 equiv_mem <- p0
4816
4817 the memory address register was reloaded before the 2nd
4818 insn. */
4819 if ((! first_p && pseudo_p)
4820 /* We don't use DF for compilation speed sake. So it
4821 is problematic to update live info when we use an
4822 equivalence containing pseudos in more than one
4823 BB. */
4824 || (pseudo_p && multi_block_pseudo_p (i))
4825 /* If an init insn was deleted for some reason, cancel
4826 the equiv. We could update the equiv insns after
4827 transformations including an equiv insn deletion
4828 but it is not worthy as such cases are extremely
4829 rare. */
4830 || contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
c6a6cdaa 4831 /* If it is not a reverse equivalence, we check that a
4832 pseudo in rhs of the init insn is not dying in the
4833 insn. Otherwise, the live info at the beginning of
4834 the corresponding BB might be wrong after we
4835 removed the insn. When the equiv can be a
4836 constant, the right hand side of the init insn can
4837 be a pseudo. */
691cfda4 4838 || (! reverse_equiv_p (i)
4839 && (init_insn_rhs_dead_pseudo_p (i)
4840 /* If we reloaded the pseudo in an equivalence
f4d3c071 4841 init insn, we cannot remove the equiv init
691cfda4 4842 insns and the init insns might write into
4843 const memory in this case. */
4844 || contains_reloaded_insn_p (i)))
fc8a0f60 4845 /* Prevent access beyond equivalent memory for
4846 paradoxical subregs. */
4847 || (MEM_P (x)
52acb7ae 4848 && maybe_gt (GET_MODE_SIZE (lra_reg_info[i].biggest_mode),
4849 GET_MODE_SIZE (GET_MODE (x))))
a9d8ab38 4850 || (pic_offset_table_rtx
4851 && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
4852 && (targetm.preferred_reload_class
4853 (x, lra_get_allocno_class (i)) == NO_REGS))
bf9df576 4854 || contains_symbol_ref_p (x))))
c6a6cdaa 4855 ira_reg_equiv[i].defined_p = false;
c6a6cdaa 4856 if (contains_reg_p (x, false, true))
4857 ira_reg_equiv[i].profitable_p = false;
3b3a5e5f 4858 if (get_equiv (reg) != reg)
f6708c36 4859 bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
c6a6cdaa 4860 }
4861 }
61cd3e57 4862 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4863 update_equiv (i);
f7b7100e 4864 /* We should add all insns containing pseudos which should be
4865 substituted by their equivalences. */
f6708c36 4866 EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
f7b7100e 4867 lra_push_insn_by_uid (uid);
c6a6cdaa 4868 min_len = lra_insn_stack_length ();
4869 new_insns_num = 0;
4870 last_bb = NULL;
4871 changed_p = false;
4872 while ((new_min_len = lra_insn_stack_length ()) != 0)
4873 {
4874 curr_insn = lra_pop_insn ();
4875 --new_min_len;
1a8f8886 4876 curr_bb = BLOCK_FOR_INSN (curr_insn);
c6a6cdaa 4877 if (curr_bb != last_bb)
4878 {
4879 last_bb = curr_bb;
4880 bb_reload_num = lra_curr_reload_num;
4881 }
4882 if (min_len > new_min_len)
4883 {
4884 min_len = new_min_len;
4885 new_insns_num = 0;
4886 }
4887 if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
4888 internal_error
4889 ("Max. number of generated reload insns per insn is achieved (%d)\n",
4890 MAX_RELOAD_INSNS_NUMBER);
4891 new_insns_num++;
4892 if (DEBUG_INSN_P (curr_insn))
4893 {
4894 /* We need to check equivalence in debug insn and change
4895 pseudo to the equivalent value if necessary. */
4896 curr_id = lra_get_insn_recog_data (curr_insn);
f6708c36 4897 if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
e717b69a 4898 {
136e5c8e 4899 rtx old = *curr_id->operand_loc[0];
4900 *curr_id->operand_loc[0]
4901 = simplify_replace_fn_rtx (old, NULL_RTX,
61cd3e57 4902 loc_equivalence_callback, curr_insn);
136e5c8e 4903 if (old != *curr_id->operand_loc[0])
4904 {
4905 lra_update_insn_regno_info (curr_insn);
4906 changed_p = true;
4907 }
e717b69a 4908 }
c6a6cdaa 4909 }
4910 else if (INSN_P (curr_insn))
4911 {
4912 if ((set = single_set (curr_insn)) != NULL_RTX)
4913 {
4914 dest_reg = SET_DEST (set);
4915 /* The equivalence pseudo could be set up as SUBREG in a
4916 case when it is a call restore insn in a mode
4917 different from the pseudo mode. */
4918 if (GET_CODE (dest_reg) == SUBREG)
4919 dest_reg = SUBREG_REG (dest_reg);
4920 if ((REG_P (dest_reg)
3b3a5e5f 4921 && (x = get_equiv (dest_reg)) != dest_reg
c6a6cdaa 4922 /* Remove insns which set up a pseudo whose value
f4d3c071 4923 cannot be changed. Such insns might be not in
c6a6cdaa 4924 init_insns because we don't update equiv data
4925 during insn transformations.
c625778b 4926
c6a6cdaa 4927 As an example, let suppose that a pseudo got
4928 hard register and on the 1st pass was not
4929 changed to equivalent constant. We generate an
4930 additional insn setting up the pseudo because of
4931 secondary memory movement. Then the pseudo is
4932 spilled and we use the equiv constant. In this
4933 case we should remove the additional insn and
e454a550 4934 this insn is not init_insns list. */
c6a6cdaa 4935 && (! MEM_P (x) || MEM_READONLY_P (x)
e454a550 4936 /* Check that this is actually an insn setting
4937 up the equivalence. */
c6a6cdaa 4938 || in_list_p (curr_insn,
4939 ira_reg_equiv
4940 [REGNO (dest_reg)].init_insns)))
3b3a5e5f 4941 || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
c6a6cdaa 4942 && in_list_p (curr_insn,
4943 ira_reg_equiv
4944 [REGNO (SET_SRC (set))].init_insns)))
4945 {
4946 /* This is equiv init insn of pseudo which did not get a
4947 hard register -- remove the insn. */
4948 if (lra_dump_file != NULL)
4949 {
4950 fprintf (lra_dump_file,
4951 " Removing equiv init insn %i (freq=%d)\n",
4952 INSN_UID (curr_insn),
e374deeb 4953 REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
6dde9719 4954 dump_insn_slim (lra_dump_file, curr_insn);
c6a6cdaa 4955 }
4956 if (contains_reg_p (x, true, false))
4957 lra_risky_transformations_p = true;
4958 lra_set_insn_deleted (curr_insn);
4959 continue;
4960 }
4961 }
4962 curr_id = lra_get_insn_recog_data (curr_insn);
4963 curr_static_id = curr_id->insn_static_data;
4964 init_curr_insn_input_reloads ();
4965 init_curr_operand_mode ();
497ba60f 4966 if (curr_insn_transform (false))
c6a6cdaa 4967 changed_p = true;
d596f8db 4968 /* Check non-transformed insns too for equiv change as USE
4969 or CLOBBER don't need reloads but can contain pseudos
4970 being changed on their equivalences. */
f6708c36 4971 else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
d596f8db 4972 && loc_equivalence_change_p (&PATTERN (curr_insn)))
4973 {
4974 lra_update_insn_regno_info (curr_insn);
4975 changed_p = true;
4976 }
c6a6cdaa 4977 }
4978 }
f6708c36 4979
c6a6cdaa 4980 /* If we used a new hard regno, changed_p should be true because the
4981 hard reg is assigned to a new pseudo. */
382ecba7 4982 if (flag_checking && !changed_p)
c6a6cdaa 4983 {
4984 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4985 if (lra_reg_info[i].nrefs != 0
4986 && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4987 {
92d2aec3 4988 int j, nregs = hard_regno_nregs (hard_regno,
4989 PSEUDO_REGNO_MODE (i));
1a8f8886 4990
c6a6cdaa 4991 for (j = 0; j < nregs; j++)
4992 lra_assert (df_regs_ever_live_p (hard_regno + j));
4993 }
4994 }
c6a6cdaa 4995 return changed_p;
4996}
4997
ab4ea053 4998static void initiate_invariants (void);
4999static void finish_invariants (void);
5000
c6a6cdaa 5001/* Initiate the LRA constraint pass. It is done once per
5002 function. */
5003void
5004lra_constraints_init (void)
5005{
ab4ea053 5006 initiate_invariants ();
c6a6cdaa 5007}
5008
5009/* Finalize the LRA constraint pass. It is done once per
5010 function. */
5011void
5012lra_constraints_finish (void)
5013{
ab4ea053 5014 finish_invariants ();
5015}
5016
5017\f
5018
5019/* Structure describes invariants for ineheritance. */
1cda36f6 5020struct lra_invariant
ab4ea053 5021{
5022 /* The order number of the invariant. */
5023 int num;
5024 /* The invariant RTX. */
5025 rtx invariant_rtx;
5026 /* The origin insn of the invariant. */
5027 rtx_insn *insn;
5028};
5029
1cda36f6 5030typedef lra_invariant invariant_t;
ab4ea053 5031typedef invariant_t *invariant_ptr_t;
5032typedef const invariant_t *const_invariant_ptr_t;
5033
5034/* Pointer to the inheritance invariants. */
5035static vec<invariant_ptr_t> invariants;
5036
5037/* Allocation pool for the invariants. */
1cda36f6 5038static object_allocator<lra_invariant> *invariants_pool;
ab4ea053 5039
5040/* Hash table for the invariants. */
5041static htab_t invariant_table;
5042
5043/* Hash function for INVARIANT. */
5044static hashval_t
5045invariant_hash (const void *invariant)
5046{
5047 rtx inv = ((const_invariant_ptr_t) invariant)->invariant_rtx;
5048 return lra_rtx_hash (inv);
5049}
5050
5051/* Equal function for invariants INVARIANT1 and INVARIANT2. */
5052static int
5053invariant_eq_p (const void *invariant1, const void *invariant2)
5054{
5055 rtx inv1 = ((const_invariant_ptr_t) invariant1)->invariant_rtx;
5056 rtx inv2 = ((const_invariant_ptr_t) invariant2)->invariant_rtx;
5057
5058 return rtx_equal_p (inv1, inv2);
5059}
5060
5061/* Insert INVARIANT_RTX into the table if it is not there yet. Return
5062 invariant which is in the table. */
5063static invariant_ptr_t
5064insert_invariant (rtx invariant_rtx)
5065{
5066 void **entry_ptr;
5067 invariant_t invariant;
5068 invariant_ptr_t invariant_ptr;
5069
5070 invariant.invariant_rtx = invariant_rtx;
5071 entry_ptr = htab_find_slot (invariant_table, &invariant, INSERT);
5072 if (*entry_ptr == NULL)
5073 {
5074 invariant_ptr = invariants_pool->allocate ();
5075 invariant_ptr->invariant_rtx = invariant_rtx;
5076 invariant_ptr->insn = NULL;
5077 invariants.safe_push (invariant_ptr);
5078 *entry_ptr = (void *) invariant_ptr;
5079 }
5080 return (invariant_ptr_t) *entry_ptr;
5081}
5082
5083/* Initiate the invariant table. */
5084static void
5085initiate_invariants (void)
5086{
5087 invariants.create (100);
1cda36f6 5088 invariants_pool
5089 = new object_allocator<lra_invariant> ("Inheritance invariants");
ab4ea053 5090 invariant_table = htab_create (100, invariant_hash, invariant_eq_p, NULL);
5091}
5092
5093/* Finish the invariant table. */
5094static void
5095finish_invariants (void)
5096{
5097 htab_delete (invariant_table);
5098 delete invariants_pool;
5099 invariants.release ();
5100}
5101
5102/* Make the invariant table empty. */
5103static void
5104clear_invariants (void)
5105{
5106 htab_empty (invariant_table);
5107 invariants_pool->release ();
5108 invariants.truncate (0);
c6a6cdaa 5109}
5110
5111\f
5112
5113/* This page contains code to do inheritance/split
5114 transformations. */
5115
5116/* Number of reloads passed so far in current EBB. */
5117static int reloads_num;
5118
5119/* Number of calls passed so far in current EBB. */
5120static int calls_num;
5121
5122/* Current reload pseudo check for validity of elements in
5123 USAGE_INSNS. */
5124static int curr_usage_insns_check;
5125
5126/* Info about last usage of registers in EBB to do inheritance/split
5127 transformation. Inheritance transformation is done from a spilled
5128 pseudo and split transformations from a hard register or a pseudo
5129 assigned to a hard register. */
5130struct usage_insns
5131{
5132 /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
5133 value INSNS is valid. The insns is chain of optional debug insns
cc0dc61b 5134 and a finishing non-debug insn using the corresponding reg. The
5135 value is also used to mark the registers which are set up in the
5136 current insn. The negated insn uid is used for this. */
c6a6cdaa 5137 int check;
5138 /* Value of global reloads_num at the last insn in INSNS. */
5139 int reloads_num;
5140 /* Value of global reloads_nums at the last insn in INSNS. */
5141 int calls_num;
5142 /* It can be true only for splitting. And it means that the restore
5143 insn should be put after insn given by the following member. */
5144 bool after_p;
5145 /* Next insns in the current EBB which use the original reg and the
5146 original reg value is not changed between the current insn and
5147 the next insns. In order words, e.g. for inheritance, if we need
5148 to use the original reg value again in the next insns we can try
5149 to use the value in a hard register from a reload insn of the
5150 current insn. */
5151 rtx insns;
5152};
5153
5154/* Map: regno -> corresponding pseudo usage insns. */
5155static struct usage_insns *usage_insns;
5156
5157static void
f9a00e9e 5158setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
c6a6cdaa 5159{
5160 usage_insns[regno].check = curr_usage_insns_check;
5161 usage_insns[regno].insns = insn;
5162 usage_insns[regno].reloads_num = reloads_num;
5163 usage_insns[regno].calls_num = calls_num;
5164 usage_insns[regno].after_p = after_p;
5165}
5166
5167/* The function is used to form list REGNO usages which consists of
5168 optional debug insns finished by a non-debug insn using REGNO.
5169 RELOADS_NUM is current number of reload insns processed so far. */
5170static void
06743455 5171add_next_usage_insn (int regno, rtx_insn *insn, int reloads_num)
c6a6cdaa 5172{
5173 rtx next_usage_insns;
1a8f8886 5174
c6a6cdaa 5175 if (usage_insns[regno].check == curr_usage_insns_check
5176 && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
5177 && DEBUG_INSN_P (insn))
5178 {
5179 /* Check that we did not add the debug insn yet. */
5180 if (next_usage_insns != insn
5181 && (GET_CODE (next_usage_insns) != INSN_LIST
5182 || XEXP (next_usage_insns, 0) != insn))
5183 usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
5184 next_usage_insns);
5185 }
5186 else if (NONDEBUG_INSN_P (insn))
5187 setup_next_usage_insn (regno, insn, reloads_num, false);
5188 else
5189 usage_insns[regno].check = 0;
5190}
1a8f8886 5191
5bb0e0fd 5192/* Return first non-debug insn in list USAGE_INSNS. */
50fc2d35 5193static rtx_insn *
5bb0e0fd 5194skip_usage_debug_insns (rtx usage_insns)
5195{
5196 rtx insn;
5197
5198 /* Skip debug insns. */
5199 for (insn = usage_insns;
5200 insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
5201 insn = XEXP (insn, 1))
5202 ;
50fc2d35 5203 return safe_as_a <rtx_insn *> (insn);
5bb0e0fd 5204}
5205
5206/* Return true if we need secondary memory moves for insn in
5207 USAGE_INSNS after inserting inherited pseudo of class INHER_CL
5208 into the insn. */
5209static bool
acb7fe1f 5210check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
5211 rtx usage_insns ATTRIBUTE_UNUSED)
5bb0e0fd 5212{
50fc2d35 5213 rtx_insn *insn;
5214 rtx set, dest;
5bb0e0fd 5215 enum reg_class cl;
5216
5217 if (inher_cl == ALL_REGS
5218 || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
5219 return false;
5220 lra_assert (INSN_P (insn));
5221 if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
5222 return false;
5223 dest = SET_DEST (set);
5224 if (! REG_P (dest))
5225 return false;
5226 lra_assert (inher_cl != NO_REGS);
5227 cl = get_reg_class (REGNO (dest));
5228 return (cl != NO_REGS && cl != ALL_REGS
c836e75b 5229 && targetm.secondary_memory_needed (GET_MODE (dest), inher_cl, cl));
5bb0e0fd 5230}
5231
c6a6cdaa 5232/* Registers involved in inheritance/split in the current EBB
5233 (inheritance/split pseudos and original registers). */
5234static bitmap_head check_only_regs;
5235
f4d3c071 5236/* Reload pseudos cannot be involded in invariant inheritance in the
ab4ea053 5237 current EBB. */
5238static bitmap_head invalid_invariant_regs;
5239
c6a6cdaa 5240/* Do inheritance transformations for insn INSN, which defines (if
5241 DEF_P) or uses ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which
5242 instruction in the EBB next uses ORIGINAL_REGNO; it has the same
5243 form as the "insns" field of usage_insns. Return true if we
5244 succeed in such transformation.
5245
5246 The transformations look like:
5247
5248 p <- ... i <- ...
5249 ... p <- i (new insn)
5250 ... =>
5251 <- ... p ... <- ... i ...
5252 or
5253 ... i <- p (new insn)
5254 <- ... p ... <- ... i ...
5255 ... =>
5256 <- ... p ... <- ... i ...
5257 where p is a spilled original pseudo and i is a new inheritance pseudo.
1a8f8886 5258
5259
c6a6cdaa 5260 The inheritance pseudo has the smallest class of two classes CL and
5261 class of ORIGINAL REGNO. */
5262static bool
5263inherit_reload_reg (bool def_p, int original_regno,
7f836b57 5264 enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
c6a6cdaa 5265{
77a00b11 5266 if (optimize_function_for_size_p (cfun))
5267 return false;
5268
c6a6cdaa 5269 enum reg_class rclass = lra_get_allocno_class (original_regno);
5270 rtx original_reg = regno_reg_rtx[original_regno];
7f836b57 5271 rtx new_reg, usage_insn;
5272 rtx_insn *new_insns;
c6a6cdaa 5273
5274 lra_assert (! usage_insns[original_regno].after_p);
5275 if (lra_dump_file != NULL)
5276 fprintf (lra_dump_file,
5bb0e0fd 5277 " <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
c6a6cdaa 5278 if (! ira_reg_classes_intersect_p[cl][rclass])
5279 {
5280 if (lra_dump_file != NULL)
5281 {
5282 fprintf (lra_dump_file,
5bb0e0fd 5283 " Rejecting inheritance for %d "
c6a6cdaa 5284 "because of disjoint classes %s and %s\n",
5285 original_regno, reg_class_names[cl],
5286 reg_class_names[rclass]);
5287 fprintf (lra_dump_file,
5bb0e0fd 5288 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
c6a6cdaa 5289 }
5290 return false;
5291 }
5292 if ((ira_class_subset_p[cl][rclass] && cl != rclass)
5293 /* We don't use a subset of two classes because it can be
5294 NO_REGS. This transformation is still profitable in most
5295 cases even if the classes are not intersected as register
5296 move is probably cheaper than a memory load. */
5297 || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
5298 {
5299 if (lra_dump_file != NULL)
5300 fprintf (lra_dump_file, " Use smallest class of %s and %s\n",
5301 reg_class_names[cl], reg_class_names[rclass]);
1a8f8886 5302
c6a6cdaa 5303 rclass = cl;
5304 }
c47331e3 5305 if (check_secondary_memory_needed_p (rclass, next_usage_insns))
5bb0e0fd 5306 {
5307 /* Reject inheritance resulting in secondary memory moves.
5308 Otherwise, there is a danger in LRA cycling. Also such
5309 transformation will be unprofitable. */
5310 if (lra_dump_file != NULL)
5311 {
50fc2d35 5312 rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
5bb0e0fd 5313 rtx set = single_set (insn);
5314
5315 lra_assert (set != NULL_RTX);
5316
5317 rtx dest = SET_DEST (set);
5318
5319 lra_assert (REG_P (dest));
5320 fprintf (lra_dump_file,
5321 " Rejecting inheritance for insn %d(%s)<-%d(%s) "
5322 "as secondary mem is needed\n",
5323 REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
c47331e3 5324 original_regno, reg_class_names[rclass]);
5bb0e0fd 5325 fprintf (lra_dump_file,
5326 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5327 }
5328 return false;
5329 }
c6a6cdaa 5330 new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
5331 rclass, "inheritance");
5332 start_sequence ();
5333 if (def_p)
ef0231e9 5334 lra_emit_move (original_reg, new_reg);
c6a6cdaa 5335 else
ef0231e9 5336 lra_emit_move (new_reg, original_reg);
c6a6cdaa 5337 new_insns = get_insns ();
5338 end_sequence ();
5339 if (NEXT_INSN (new_insns) != NULL_RTX)
5340 {
5341 if (lra_dump_file != NULL)
5342 {
5343 fprintf (lra_dump_file,
5bb0e0fd 5344 " Rejecting inheritance %d->%d "
c6a6cdaa 5345 "as it results in 2 or more insns:\n",
5346 original_regno, REGNO (new_reg));
4cd001d5 5347 dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
c6a6cdaa 5348 fprintf (lra_dump_file,
5349 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5350 }
5351 return false;
5352 }
06072e79 5353 lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
c6a6cdaa 5354 lra_update_insn_regno_info (insn);
5355 if (! def_p)
5356 /* We now have a new usage insn for original regno. */
5357 setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
5358 if (lra_dump_file != NULL)
5bb0e0fd 5359 fprintf (lra_dump_file, " Original reg change %d->%d (bb%d):\n",
c6a6cdaa 5360 original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
ab4ea053 5361 lra_reg_info[REGNO (new_reg)].restore_rtx = regno_reg_rtx[original_regno];
c6a6cdaa 5362 bitmap_set_bit (&check_only_regs, REGNO (new_reg));
5363 bitmap_set_bit (&check_only_regs, original_regno);
5364 bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
5365 if (def_p)
7f836b57 5366 lra_process_new_insns (insn, NULL, new_insns,
c6a6cdaa 5367 "Add original<-inheritance");
5368 else
7f836b57 5369 lra_process_new_insns (insn, new_insns, NULL,
c6a6cdaa 5370 "Add inheritance<-original");
5371 while (next_usage_insns != NULL_RTX)
5372 {
5373 if (GET_CODE (next_usage_insns) != INSN_LIST)
5374 {
5375 usage_insn = next_usage_insns;
5376 lra_assert (NONDEBUG_INSN_P (usage_insn));
5377 next_usage_insns = NULL;
5378 }
5379 else
5380 {
5381 usage_insn = XEXP (next_usage_insns, 0);
5382 lra_assert (DEBUG_INSN_P (usage_insn));
5383 next_usage_insns = XEXP (next_usage_insns, 1);
5384 }
d686eece 5385 lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
5386 DEBUG_INSN_P (usage_insn));
7f836b57 5387 lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
c6a6cdaa 5388 if (lra_dump_file != NULL)
5389 {
90567983 5390 basic_block bb = BLOCK_FOR_INSN (usage_insn);
c6a6cdaa 5391 fprintf (lra_dump_file,
5392 " Inheritance reuse change %d->%d (bb%d):\n",
5393 original_regno, REGNO (new_reg),
90567983 5394 bb ? bb->index : -1);
f9a00e9e 5395 dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
c6a6cdaa 5396 }
5397 }
5398 if (lra_dump_file != NULL)
5399 fprintf (lra_dump_file,
5400 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5401 return true;
5402}
5403
5404/* Return true if we need a caller save/restore for pseudo REGNO which
5405 was assigned to a hard register. */
5406static inline bool
5407need_for_call_save_p (int regno)
5408{
5409 lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
5410 return (usage_insns[regno].calls_num < calls_num
5411 && (overlaps_hard_reg_set_p
fcf56aaf 5412 ((flag_ipa_ra &&
f2cc6708 5413 ! hard_reg_set_empty_p (lra_reg_info[regno].actual_call_used_reg_set))
5414 ? lra_reg_info[regno].actual_call_used_reg_set
5415 : call_used_reg_set,
a766a8b0 5416 PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
5da94e60 5417 || (targetm.hard_regno_call_part_clobbered
5c62f29a 5418 (lra_reg_info[regno].call_insn,
5419 reg_renumber[regno], PSEUDO_REGNO_MODE (regno)))));
c6a6cdaa 5420}
5421
75de4aa2 5422/* Global registers occurring in the current EBB. */
c6a6cdaa 5423static bitmap_head ebb_global_regs;
5424
5425/* Return true if we need a split for hard register REGNO or pseudo
5426 REGNO which was assigned to a hard register.
5427 POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
5428 used for reloads since the EBB end. It is an approximation of the
5429 used hard registers in the split range. The exact value would
5430 require expensive calculations. If we were aggressive with
5431 splitting because of the approximation, the split pseudo will save
5432 the same hard register assignment and will be removed in the undo
5433 pass. We still need the approximation because too aggressive
5434 splitting would result in too inaccurate cost calculation in the
5435 assignment pass because of too many generated moves which will be
5436 probably removed in the undo pass. */
5437static inline bool
5438need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
5439{
5440 int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
5441
5442 lra_assert (hard_regno >= 0);
5443 return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
5444 /* Don't split eliminable hard registers, otherwise we can
5445 split hard registers like hard frame pointer, which
5446 lives on BB start/end according to DF-infrastructure,
5447 when there is a pseudo assigned to the register and
5448 living in the same BB. */
5449 && (regno >= FIRST_PSEUDO_REGISTER
5450 || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
5451 && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
0157439c 5452 /* Don't split call clobbered hard regs living through
5453 calls, otherwise we might have a check problem in the
5454 assign sub-pass as in the most cases (exception is a
5455 situation when lra_risky_transformations_p value is
5456 true) the assign pass assumes that all pseudos living
5457 through calls are assigned to call saved hard regs. */
5458 && (regno >= FIRST_PSEUDO_REGISTER
5459 || ! TEST_HARD_REG_BIT (call_used_reg_set, regno)
5460 || usage_insns[regno].calls_num == calls_num)
c6a6cdaa 5461 /* We need at least 2 reloads to make pseudo splitting
5462 profitable. We should provide hard regno splitting in
5463 any case to solve 1st insn scheduling problem when
5464 moving hard register definition up might result in
5465 impossibility to find hard register for reload pseudo of
5466 small register class. */
5467 && (usage_insns[regno].reloads_num
7eec3701 5468 + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
c6a6cdaa 5469 && (regno < FIRST_PSEUDO_REGISTER
5470 /* For short living pseudos, spilling + inheritance can
5471 be considered a substitution for splitting.
5472 Therefore we do not splitting for local pseudos. It
5473 decreases also aggressiveness of splitting. The
5474 minimal number of references is chosen taking into
5475 account that for 2 references splitting has no sense
5476 as we can just spill the pseudo. */
5477 || (regno >= FIRST_PSEUDO_REGISTER
5478 && lra_reg_info[regno].nrefs > 3
5479 && bitmap_bit_p (&ebb_global_regs, regno))))
5480 || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
5481}
5482
5483/* Return class for the split pseudo created from original pseudo with
5484 ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO. We
5485 choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
5486 results in no secondary memory movements. */
5487static enum reg_class
5488choose_split_class (enum reg_class allocno_class,
5489 int hard_regno ATTRIBUTE_UNUSED,
3754d046 5490 machine_mode mode ATTRIBUTE_UNUSED)
c6a6cdaa 5491{
c6a6cdaa 5492 int i;
5493 enum reg_class cl, best_cl = NO_REGS;
d810a474 5494 enum reg_class hard_reg_class ATTRIBUTE_UNUSED
5495 = REGNO_REG_CLASS (hard_regno);
1a8f8886 5496
c836e75b 5497 if (! targetm.secondary_memory_needed (mode, allocno_class, allocno_class)
c6a6cdaa 5498 && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
5499 return allocno_class;
5500 for (i = 0;
5501 (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
5502 i++)
c836e75b 5503 if (! targetm.secondary_memory_needed (mode, cl, hard_reg_class)
5504 && ! targetm.secondary_memory_needed (mode, hard_reg_class, cl)
c6a6cdaa 5505 && TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
5506 && (best_cl == NO_REGS
5507 || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
5508 best_cl = cl;
5509 return best_cl;
c6a6cdaa 5510}
5511
cc39a634 5512/* Copy any equivalence information from ORIGINAL_REGNO to NEW_REGNO.
5513 It only makes sense to call this function if NEW_REGNO is always
5514 equal to ORIGINAL_REGNO. */
5515
5516static void
5517lra_copy_reg_equiv (unsigned int new_regno, unsigned int original_regno)
5518{
5519 if (!ira_reg_equiv[original_regno].defined_p)
5520 return;
5521
5522 ira_expand_reg_equiv ();
5523 ira_reg_equiv[new_regno].defined_p = true;
5524 if (ira_reg_equiv[original_regno].memory)
5525 ira_reg_equiv[new_regno].memory
5526 = copy_rtx (ira_reg_equiv[original_regno].memory);
5527 if (ira_reg_equiv[original_regno].constant)
5528 ira_reg_equiv[new_regno].constant
5529 = copy_rtx (ira_reg_equiv[original_regno].constant);
5530 if (ira_reg_equiv[original_regno].invariant)
5531 ira_reg_equiv[new_regno].invariant
5532 = copy_rtx (ira_reg_equiv[original_regno].invariant);
5533}
5534
c6a6cdaa 5535/* Do split transformations for insn INSN, which defines or uses
5536 ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which instruction in
5537 the EBB next uses ORIGINAL_REGNO; it has the same form as the
6a4bc24e 5538 "insns" field of usage_insns. If TO is not NULL, we don't use
7fe7987e 5539 usage_insns, we put restore insns after TO insn. It is a case when
5540 we call it from lra_split_hard_reg_for, outside the inheritance
5541 pass.
c6a6cdaa 5542
5543 The transformations look like:
5544
5545 p <- ... p <- ...
5546 ... s <- p (new insn -- save)
5547 ... =>
5548 ... p <- s (new insn -- restore)
5549 <- ... p ... <- ... p ...
5550 or
5551 <- ... p ... <- ... p ...
5552 ... s <- p (new insn -- save)
5553 ... =>
5554 ... p <- s (new insn -- restore)
5555 <- ... p ... <- ... p ...
5556
5557 where p is an original pseudo got a hard register or a hard
5558 register and s is a new split pseudo. The save is put before INSN
5559 if BEFORE_P is true. Return true if we succeed in such
5560 transformation. */
5561static bool
7f836b57 5562split_reg (bool before_p, int original_regno, rtx_insn *insn,
6a4bc24e 5563 rtx next_usage_insns, rtx_insn *to)
c6a6cdaa 5564{
5565 enum reg_class rclass;
5566 rtx original_reg;
74855d08 5567 int hard_regno, nregs;
7f836b57 5568 rtx new_reg, usage_insn;
5569 rtx_insn *restore, *save;
c6a6cdaa 5570 bool after_p;
5571 bool call_save_p;
e947f9c3 5572 machine_mode mode;
c6a6cdaa 5573
5574 if (original_regno < FIRST_PSEUDO_REGISTER)
5575 {
5576 rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
5577 hard_regno = original_regno;
5578 call_save_p = false;
74855d08 5579 nregs = 1;
e947f9c3 5580 mode = lra_reg_info[hard_regno].biggest_mode;
5581 machine_mode reg_rtx_mode = GET_MODE (regno_reg_rtx[hard_regno]);
e7142ce1 5582 /* A reg can have a biggest_mode of VOIDmode if it was only ever seen
5583 as part of a multi-word register. In that case, or if the biggest
5584 mode was larger than a register, just use the reg_rtx. Otherwise,
5585 limit the size to that of the biggest access in the function. */
5586 if (mode == VOIDmode
d0257d43 5587 || paradoxical_subreg_p (mode, reg_rtx_mode))
e947f9c3 5588 {
5589 original_reg = regno_reg_rtx[hard_regno];
5590 mode = reg_rtx_mode;
5591 }
5592 else
5593 original_reg = gen_rtx_REG (mode, hard_regno);
c6a6cdaa 5594 }
5595 else
5596 {
e947f9c3 5597 mode = PSEUDO_REGNO_MODE (original_regno);
c6a6cdaa 5598 hard_regno = reg_renumber[original_regno];
92d2aec3 5599 nregs = hard_regno_nregs (hard_regno, mode);
c6a6cdaa 5600 rclass = lra_get_allocno_class (original_regno);
5601 original_reg = regno_reg_rtx[original_regno];
5602 call_save_p = need_for_call_save_p (original_regno);
5603 }
c6a6cdaa 5604 lra_assert (hard_regno >= 0);
5605 if (lra_dump_file != NULL)
5606 fprintf (lra_dump_file,
5607 " ((((((((((((((((((((((((((((((((((((((((((((((((\n");
e947f9c3 5608
c6a6cdaa 5609 if (call_save_p)
5610 {
34575461 5611 mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
92d2aec3 5612 hard_regno_nregs (hard_regno, mode),
34575461 5613 mode);
5614 new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, "save");
c6a6cdaa 5615 }
5616 else
5617 {
e947f9c3 5618 rclass = choose_split_class (rclass, hard_regno, mode);
c6a6cdaa 5619 if (rclass == NO_REGS)
5620 {
5621 if (lra_dump_file != NULL)
5622 {
5623 fprintf (lra_dump_file,
5624 " Rejecting split of %d(%s): "
5625 "no good reg class for %d(%s)\n",
5626 original_regno,
5627 reg_class_names[lra_get_allocno_class (original_regno)],
5628 hard_regno,
5629 reg_class_names[REGNO_REG_CLASS (hard_regno)]);
5630 fprintf
5631 (lra_dump_file,
5632 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5633 }
5634 return false;
5635 }
4f031018 5636 /* Split_if_necessary can split hard registers used as part of a
5637 multi-register mode but splits each register individually. The
5638 mode used for each independent register may not be supported
5639 so reject the split. Splitting the wider mode should theoretically
5640 be possible but is not implemented. */
b395382f 5641 if (!targetm.hard_regno_mode_ok (hard_regno, mode))
4f031018 5642 {
5643 if (lra_dump_file != NULL)
5644 {
5645 fprintf (lra_dump_file,
5646 " Rejecting split of %d(%s): unsuitable mode %s\n",
5647 original_regno,
5648 reg_class_names[lra_get_allocno_class (original_regno)],
5649 GET_MODE_NAME (mode));
5650 fprintf
5651 (lra_dump_file,
5652 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5653 }
5654 return false;
5655 }
e947f9c3 5656 new_reg = lra_create_new_reg (mode, original_reg, rclass, "split");
c6a6cdaa 5657 reg_renumber[REGNO (new_reg)] = hard_regno;
5658 }
cc39a634 5659 int new_regno = REGNO (new_reg);
c6a6cdaa 5660 save = emit_spill_move (true, new_reg, original_reg);
52793acd 5661 if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
c6a6cdaa 5662 {
c6a6cdaa 5663 if (lra_dump_file != NULL)
5664 {
5665 fprintf
5666 (lra_dump_file,
52793acd 5667 " Rejecting split %d->%d resulting in > 2 save insns:\n",
cc39a634 5668 original_regno, new_regno);
4cd001d5 5669 dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
c6a6cdaa 5670 fprintf (lra_dump_file,
5671 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5672 }
5673 return false;
5674 }
5675 restore = emit_spill_move (false, new_reg, original_reg);
52793acd 5676 if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
c6a6cdaa 5677 {
c6a6cdaa 5678 if (lra_dump_file != NULL)
5679 {
5680 fprintf (lra_dump_file,
5681 " Rejecting split %d->%d "
52793acd 5682 "resulting in > 2 restore insns:\n",
cc39a634 5683 original_regno, new_regno);
4cd001d5 5684 dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
c6a6cdaa 5685 fprintf (lra_dump_file,
5686 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5687 }
5688 return false;
5689 }
cc39a634 5690 /* Transfer equivalence information to the spill register, so that
5691 if we fail to allocate the spill register, we have the option of
5692 rematerializing the original value instead of spilling to the stack. */
5693 if (!HARD_REGISTER_NUM_P (original_regno)
5694 && mode == PSEUDO_REGNO_MODE (original_regno))
5695 lra_copy_reg_equiv (new_regno, original_regno);
cc39a634 5696 lra_reg_info[new_regno].restore_rtx = regno_reg_rtx[original_regno];
cc39a634 5697 bitmap_set_bit (&lra_split_regs, new_regno);
6a4bc24e 5698 if (to != NULL)
c6a6cdaa 5699 {
7fe7987e 5700 lra_assert (next_usage_insns == NULL);
6a4bc24e 5701 usage_insn = to;
5702 after_p = TRUE;
5703 }
5704 else
5705 {
7fe7987e 5706 /* We need check_only_regs only inside the inheritance pass. */
5707 bitmap_set_bit (&check_only_regs, new_regno);
5708 bitmap_set_bit (&check_only_regs, original_regno);
6a4bc24e 5709 after_p = usage_insns[original_regno].after_p;
5710 for (;;)
b12c2c48 5711 {
6a4bc24e 5712 if (GET_CODE (next_usage_insns) != INSN_LIST)
5713 {
5714 usage_insn = next_usage_insns;
5715 break;
5716 }
5717 usage_insn = XEXP (next_usage_insns, 0);
5718 lra_assert (DEBUG_INSN_P (usage_insn));
5719 next_usage_insns = XEXP (next_usage_insns, 1);
5720 lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
5721 true);
5722 lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
5723 if (lra_dump_file != NULL)
5724 {
5725 fprintf (lra_dump_file, " Split reuse change %d->%d:\n",
5726 original_regno, new_regno);
5727 dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
5728 }
c6a6cdaa 5729 }
5730 }
5731 lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
5732 lra_assert (usage_insn != insn || (after_p && before_p));
7f836b57 5733 lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
5734 after_p ? NULL : restore,
5735 after_p ? restore : NULL,
c6a6cdaa 5736 call_save_p
5737 ? "Add reg<-save" : "Add reg<-split");
7f836b57 5738 lra_process_new_insns (insn, before_p ? save : NULL,
5739 before_p ? NULL : save,
c6a6cdaa 5740 call_save_p
5741 ? "Add save<-reg" : "Add split<-reg");
74855d08 5742 if (nregs > 1)
5743 /* If we are trying to split multi-register. We should check
5744 conflicts on the next assignment sub-pass. IRA can allocate on
5745 sub-register levels, LRA do this on pseudos level right now and
5746 this discrepancy may create allocation conflicts after
5747 splitting. */
5748 lra_risky_transformations_p = true;
c6a6cdaa 5749 if (lra_dump_file != NULL)
5750 fprintf (lra_dump_file,
5751 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5752 return true;
5753}
5754
6a4bc24e 5755/* Split a hard reg for reload pseudo REGNO having RCLASS and living
5756 in the range [FROM, TO]. Return true if did a split. Otherwise,
5757 return false. */
5758bool
5759spill_hard_reg_in_range (int regno, enum reg_class rclass, rtx_insn *from, rtx_insn *to)
5760{
5761 int i, hard_regno;
5762 int rclass_size;
5763 rtx_insn *insn;
eaefe34f 5764 unsigned int uid;
5765 bitmap_iterator bi;
5766 HARD_REG_SET ignore;
6a4bc24e 5767
5768 lra_assert (from != NULL && to != NULL);
eaefe34f 5769 CLEAR_HARD_REG_SET (ignore);
5770 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
5771 {
5772 lra_insn_recog_data_t id = lra_insn_recog_data[uid];
5773 struct lra_static_insn_data *static_id = id->insn_static_data;
5774 struct lra_insn_reg *reg;
5775
5776 for (reg = id->regs; reg != NULL; reg = reg->next)
c1031b5a 5777 if (reg->regno < FIRST_PSEUDO_REGISTER)
eaefe34f 5778 SET_HARD_REG_BIT (ignore, reg->regno);
5779 for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
5780 SET_HARD_REG_BIT (ignore, reg->regno);
5781 }
6a4bc24e 5782 rclass_size = ira_class_hard_regs_num[rclass];
5783 for (i = 0; i < rclass_size; i++)
5784 {
5785 hard_regno = ira_class_hard_regs[rclass][i];
eaefe34f 5786 if (! TEST_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hard_regno)
5787 || TEST_HARD_REG_BIT (ignore, hard_regno))
6a4bc24e 5788 continue;
5789 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
730ba3b8 5790 {
6f7735c6 5791 struct lra_static_insn_data *static_id;
730ba3b8 5792 struct lra_insn_reg *reg;
5793
6f7735c6 5794 if (!INSN_P (insn))
5795 continue;
5796 if (bitmap_bit_p (&lra_reg_info[hard_regno].insn_bitmap,
5797 INSN_UID (insn)))
730ba3b8 5798 break;
6f7735c6 5799 static_id = lra_get_insn_recog_data (insn)->insn_static_data;
730ba3b8 5800 for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
5801 if (reg->regno == hard_regno)
5802 break;
5803 if (reg != NULL)
5804 break;
5805 }
6a4bc24e 5806 if (insn != NEXT_INSN (to))
5807 continue;
5808 if (split_reg (TRUE, hard_regno, from, NULL, to))
5809 return true;
5810 }
5811 return false;
5812}
5813
c6a6cdaa 5814/* Recognize that we need a split transformation for insn INSN, which
5815 defines or uses REGNO in its insn biggest MODE (we use it only if
5816 REGNO is a hard register). POTENTIAL_RELOAD_HARD_REGS contains
5817 hard registers which might be used for reloads since the EBB end.
5818 Put the save before INSN if BEFORE_P is true. MAX_UID is maximla
5819 uid before starting INSN processing. Return true if we succeed in
5820 such transformation. */
5821static bool
3754d046 5822split_if_necessary (int regno, machine_mode mode,
c6a6cdaa 5823 HARD_REG_SET potential_reload_hard_regs,
7f836b57 5824 bool before_p, rtx_insn *insn, int max_uid)
c6a6cdaa 5825{
5826 bool res = false;
5827 int i, nregs = 1;
5828 rtx next_usage_insns;
5829
5830 if (regno < FIRST_PSEUDO_REGISTER)
92d2aec3 5831 nregs = hard_regno_nregs (regno, mode);
c6a6cdaa 5832 for (i = 0; i < nregs; i++)
5833 if (usage_insns[regno + i].check == curr_usage_insns_check
5834 && (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
5835 /* To avoid processing the register twice or more. */
5836 && ((GET_CODE (next_usage_insns) != INSN_LIST
5837 && INSN_UID (next_usage_insns) < max_uid)
5838 || (GET_CODE (next_usage_insns) == INSN_LIST
5839 && (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
5840 && need_for_split_p (potential_reload_hard_regs, regno + i)
6a4bc24e 5841 && split_reg (before_p, regno + i, insn, next_usage_insns, NULL))
c6a6cdaa 5842 res = true;
5843 return res;
5844}
5845
ab4ea053 5846/* Return TRUE if rtx X is considered as an invariant for
5847 inheritance. */
5848static bool
5849invariant_p (const_rtx x)
5850{
5851 machine_mode mode;
5852 const char *fmt;
5853 enum rtx_code code;
5854 int i, j;
5855
00d7c794 5856 if (side_effects_p (x))
5857 return false;
5858
ab4ea053 5859 code = GET_CODE (x);
5860 mode = GET_MODE (x);
5861 if (code == SUBREG)
5862 {
5863 x = SUBREG_REG (x);
5864 code = GET_CODE (x);
081c1d32 5865 mode = wider_subreg_mode (mode, GET_MODE (x));
ab4ea053 5866 }
5867
5868 if (MEM_P (x))
5869 return false;
5870
5871 if (REG_P (x))
5872 {
5873 int i, nregs, regno = REGNO (x);
5874
5875 if (regno >= FIRST_PSEUDO_REGISTER || regno == STACK_POINTER_REGNUM
5876 || TEST_HARD_REG_BIT (eliminable_regset, regno)
5877 || GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
5878 return false;
92d2aec3 5879 nregs = hard_regno_nregs (regno, mode);
ab4ea053 5880 for (i = 0; i < nregs; i++)
5881 if (! fixed_regs[regno + i]
5882 /* A hard register may be clobbered in the current insn
5883 but we can ignore this case because if the hard
5884 register is used it should be set somewhere after the
5885 clobber. */
5886 || bitmap_bit_p (&invalid_invariant_regs, regno + i))
5887 return false;
5888 }
5889 fmt = GET_RTX_FORMAT (code);
5890 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5891 {
5892 if (fmt[i] == 'e')
5893 {
5894 if (! invariant_p (XEXP (x, i)))
5895 return false;
5896 }
5897 else if (fmt[i] == 'E')
5898 {
5899 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5900 if (! invariant_p (XVECEXP (x, i, j)))
5901 return false;
5902 }
5903 }
5904 return true;
5905}
5906
5907/* We have 'dest_reg <- invariant'. Let us try to make an invariant
5908 inheritance transformation (using dest_reg instead invariant in a
5909 subsequent insn). */
5910static bool
5911process_invariant_for_inheritance (rtx dst_reg, rtx invariant_rtx)
5912{
5913 invariant_ptr_t invariant_ptr;
5914 rtx_insn *insn, *new_insns;
5915 rtx insn_set, insn_reg, new_reg;
5916 int insn_regno;
5917 bool succ_p = false;
5918 int dst_regno = REGNO (dst_reg);
582adad1 5919 machine_mode dst_mode = GET_MODE (dst_reg);
ab4ea053 5920 enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
5921
5922 invariant_ptr = insert_invariant (invariant_rtx);
5923 if ((insn = invariant_ptr->insn) != NULL_RTX)
5924 {
5925 /* We have a subsequent insn using the invariant. */
5926 insn_set = single_set (insn);
5927 lra_assert (insn_set != NULL);
5928 insn_reg = SET_DEST (insn_set);
5929 lra_assert (REG_P (insn_reg));
5930 insn_regno = REGNO (insn_reg);
5931 insn_reg_cl = lra_get_allocno_class (insn_regno);
5932
5933 if (dst_mode == GET_MODE (insn_reg)
5934 /* We should consider only result move reg insns which are
5935 cheap. */
5936 && targetm.register_move_cost (dst_mode, cl, insn_reg_cl) == 2
5937 && targetm.register_move_cost (dst_mode, cl, cl) == 2)
5938 {
5939 if (lra_dump_file != NULL)
5940 fprintf (lra_dump_file,
5941 " [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[\n");
5942 new_reg = lra_create_new_reg (dst_mode, dst_reg,
5943 cl, "invariant inheritance");
5944 bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
5945 bitmap_set_bit (&check_only_regs, REGNO (new_reg));
fee93b91 5946 lra_reg_info[REGNO (new_reg)].restore_rtx = PATTERN (insn);
ab4ea053 5947 start_sequence ();
5948 lra_emit_move (new_reg, dst_reg);
5949 new_insns = get_insns ();
5950 end_sequence ();
5951 lra_process_new_insns (curr_insn, NULL, new_insns,
5952 "Add invariant inheritance<-original");
5953 start_sequence ();
5954 lra_emit_move (SET_DEST (insn_set), new_reg);
5955 new_insns = get_insns ();
5956 end_sequence ();
5957 lra_process_new_insns (insn, NULL, new_insns,
5958 "Changing reload<-inheritance");
5959 lra_set_insn_deleted (insn);
5960 succ_p = true;
5961 if (lra_dump_file != NULL)
5962 {
5963 fprintf (lra_dump_file,
5964 " Invariant inheritance reuse change %d (bb%d):\n",
5965 REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
5966 dump_insn_slim (lra_dump_file, insn);
5967 fprintf (lra_dump_file,
5968 " ]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]\n");
5969 }
5970 }
5971 }
5972 invariant_ptr->insn = curr_insn;
5973 return succ_p;
5974}
5975
c6a6cdaa 5976/* Check only registers living at the current program point in the
5977 current EBB. */
5978static bitmap_head live_regs;
5979
5980/* Update live info in EBB given by its HEAD and TAIL insns after
5981 inheritance/split transformation. The function removes dead moves
5982 too. */
5983static void
7f836b57 5984update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
c6a6cdaa 5985{
5986 unsigned int j;
7eec3701 5987 int i, regno;
c6a6cdaa 5988 bool live_p;
7f836b57 5989 rtx_insn *prev_insn;
5990 rtx set;
c6a6cdaa 5991 bool remove_p;
5992 basic_block last_bb, prev_bb, curr_bb;
5993 bitmap_iterator bi;
5994 struct lra_insn_reg *reg;
5995 edge e;
5996 edge_iterator ei;
5997
1a8f8886 5998 last_bb = BLOCK_FOR_INSN (tail);
c6a6cdaa 5999 prev_bb = NULL;
6000 for (curr_insn = tail;
6001 curr_insn != PREV_INSN (head);
6002 curr_insn = prev_insn)
6003 {
6004 prev_insn = PREV_INSN (curr_insn);
76d77f1e 6005 /* We need to process empty blocks too. They contain
6006 NOTE_INSN_BASIC_BLOCK referring for the basic block. */
6007 if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
6008 continue;
c6a6cdaa 6009 curr_bb = BLOCK_FOR_INSN (curr_insn);
6010 if (curr_bb != prev_bb)
6011 {
6012 if (prev_bb != NULL)
6013 {
6014 /* Update df_get_live_in (prev_bb): */
6015 EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6016 if (bitmap_bit_p (&live_regs, j))
6017 bitmap_set_bit (df_get_live_in (prev_bb), j);
6018 else
6019 bitmap_clear_bit (df_get_live_in (prev_bb), j);
6020 }
6021 if (curr_bb != last_bb)
6022 {
6023 /* Update df_get_live_out (curr_bb): */
6024 EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6025 {
6026 live_p = bitmap_bit_p (&live_regs, j);
6027 if (! live_p)
6028 FOR_EACH_EDGE (e, ei, curr_bb->succs)
6029 if (bitmap_bit_p (df_get_live_in (e->dest), j))
6030 {
6031 live_p = true;
6032 break;
6033 }
6034 if (live_p)
6035 bitmap_set_bit (df_get_live_out (curr_bb), j);
6036 else
6037 bitmap_clear_bit (df_get_live_out (curr_bb), j);
6038 }
6039 }
6040 prev_bb = curr_bb;
6041 bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
6042 }
0f31edc8 6043 if (! NONDEBUG_INSN_P (curr_insn))
c6a6cdaa 6044 continue;
6045 curr_id = lra_get_insn_recog_data (curr_insn);
7eec3701 6046 curr_static_id = curr_id->insn_static_data;
c6a6cdaa 6047 remove_p = false;
5c819ea0 6048 if ((set = single_set (curr_insn)) != NULL_RTX
6049 && REG_P (SET_DEST (set))
c6a6cdaa 6050 && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
5c819ea0 6051 && SET_DEST (set) != pic_offset_table_rtx
c6a6cdaa 6052 && bitmap_bit_p (&check_only_regs, regno)
6053 && ! bitmap_bit_p (&live_regs, regno))
6054 remove_p = true;
6055 /* See which defined values die here. */
6056 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6057 if (reg->type == OP_OUT && ! reg->subreg_p)
6058 bitmap_clear_bit (&live_regs, reg->regno);
7eec3701 6059 for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6060 if (reg->type == OP_OUT && ! reg->subreg_p)
6061 bitmap_clear_bit (&live_regs, reg->regno);
853a01d6 6062 if (curr_id->arg_hard_regs != NULL)
6063 /* Make clobbered argument hard registers die. */
6064 for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6065 if (regno >= FIRST_PSEUDO_REGISTER)
6066 bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
c6a6cdaa 6067 /* Mark each used value as live. */
6068 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
53d78539 6069 if (reg->type != OP_OUT
c6a6cdaa 6070 && bitmap_bit_p (&check_only_regs, reg->regno))
6071 bitmap_set_bit (&live_regs, reg->regno);
7eec3701 6072 for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6073 if (reg->type != OP_OUT
6074 && bitmap_bit_p (&check_only_regs, reg->regno))
6075 bitmap_set_bit (&live_regs, reg->regno);
6076 if (curr_id->arg_hard_regs != NULL)
853a01d6 6077 /* Make used argument hard registers live. */
7eec3701 6078 for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
853a01d6 6079 if (regno < FIRST_PSEUDO_REGISTER
6080 && bitmap_bit_p (&check_only_regs, regno))
7eec3701 6081 bitmap_set_bit (&live_regs, regno);
c6a6cdaa 6082 /* It is quite important to remove dead move insns because it
6083 means removing dead store. We don't need to process them for
6084 constraints. */
6085 if (remove_p)
6086 {
6087 if (lra_dump_file != NULL)
6088 {
6089 fprintf (lra_dump_file, " Removing dead insn:\n ");
6dde9719 6090 dump_insn_slim (lra_dump_file, curr_insn);
c6a6cdaa 6091 }
6092 lra_set_insn_deleted (curr_insn);
6093 }
6094 }
6095}
6096
6097/* The structure describes info to do an inheritance for the current
6098 insn. We need to collect such info first before doing the
6099 transformations because the transformations change the insn
6100 internal representation. */
6101struct to_inherit
6102{
6103 /* Original regno. */
6104 int regno;
6105 /* Subsequent insns which can inherit original reg value. */
6106 rtx insns;
6107};
6108
6109/* Array containing all info for doing inheritance from the current
6110 insn. */
6111static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
6112
6113/* Number elements in the previous array. */
6114static int to_inherit_num;
6115
6116/* Add inheritance info REGNO and INSNS. Their meaning is described in
6117 structure to_inherit. */
6118static void
6119add_to_inherit (int regno, rtx insns)
6120{
6121 int i;
6122
6123 for (i = 0; i < to_inherit_num; i++)
6124 if (to_inherit[i].regno == regno)
6125 return;
6126 lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
6127 to_inherit[to_inherit_num].regno = regno;
6128 to_inherit[to_inherit_num++].insns = insns;
6129}
6130
6131/* Return the last non-debug insn in basic block BB, or the block begin
6132 note if none. */
7f836b57 6133static rtx_insn *
c6a6cdaa 6134get_last_insertion_point (basic_block bb)
6135{
7f836b57 6136 rtx_insn *insn;
c6a6cdaa 6137
6138 FOR_BB_INSNS_REVERSE (bb, insn)
6139 if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
6140 return insn;
6141 gcc_unreachable ();
6142}
6143
6144/* Set up RES by registers living on edges FROM except the edge (FROM,
6145 TO) or by registers set up in a jump insn in BB FROM. */
6146static void
6147get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
6148{
7f836b57 6149 rtx_insn *last;
c6a6cdaa 6150 struct lra_insn_reg *reg;
6151 edge e;
6152 edge_iterator ei;
6153
6154 lra_assert (to != NULL);
6155 bitmap_clear (res);
6156 FOR_EACH_EDGE (e, ei, from->succs)
6157 if (e->dest != to)
6158 bitmap_ior_into (res, df_get_live_in (e->dest));
6159 last = get_last_insertion_point (from);
6160 if (! JUMP_P (last))
6161 return;
6162 curr_id = lra_get_insn_recog_data (last);
6163 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6164 if (reg->type != OP_IN)
6165 bitmap_set_bit (res, reg->regno);
6166}
1a8f8886 6167
c6a6cdaa 6168/* Used as a temporary results of some bitmap calculations. */
6169static bitmap_head temp_bitmap;
6170
7eec3701 6171/* We split for reloads of small class of hard regs. The following
6172 defines how many hard regs the class should have to be qualified as
6173 small. The code is mostly oriented to x86/x86-64 architecture
6174 where some insns need to use only specific register or pair of
6175 registers and these register can live in RTL explicitly, e.g. for
6176 parameter passing. */
6177static const int max_small_class_regs_num = 2;
6178
c6a6cdaa 6179/* Do inheritance/split transformations in EBB starting with HEAD and
6180 finishing on TAIL. We process EBB insns in the reverse order.
6181 Return true if we did any inheritance/split transformation in the
6182 EBB.
6183
6184 We should avoid excessive splitting which results in worse code
6185 because of inaccurate cost calculations for spilling new split
6186 pseudos in such case. To achieve this we do splitting only if
6187 register pressure is high in given basic block and there are reload
6188 pseudos requiring hard registers. We could do more register
6189 pressure calculations at any given program point to avoid necessary
6190 splitting even more but it is to expensive and the current approach
6191 works well enough. */
6192static bool
7f836b57 6193inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
c6a6cdaa 6194{
6195 int i, src_regno, dst_regno, nregs;
422470c1 6196 bool change_p, succ_p, update_reloads_num_p;
7f836b57 6197 rtx_insn *prev_insn, *last_insn;
ab4ea053 6198 rtx next_usage_insns, curr_set;
c6a6cdaa 6199 enum reg_class cl;
6200 struct lra_insn_reg *reg;
6201 basic_block last_processed_bb, curr_bb = NULL;
6202 HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
6203 bitmap to_process;
6204 unsigned int j;
6205 bitmap_iterator bi;
6206 bool head_p, after_p;
6207
6208 change_p = false;
6209 curr_usage_insns_check++;
ab4ea053 6210 clear_invariants ();
c6a6cdaa 6211 reloads_num = calls_num = 0;
6212 bitmap_clear (&check_only_regs);
ab4ea053 6213 bitmap_clear (&invalid_invariant_regs);
c6a6cdaa 6214 last_processed_bb = NULL;
6215 CLEAR_HARD_REG_SET (potential_reload_hard_regs);
7eec3701 6216 COPY_HARD_REG_SET (live_hard_regs, eliminable_regset);
6217 IOR_HARD_REG_SET (live_hard_regs, lra_no_alloc_regs);
c6a6cdaa 6218 /* We don't process new insns generated in the loop. */
6219 for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
6220 {
6221 prev_insn = PREV_INSN (curr_insn);
6222 if (BLOCK_FOR_INSN (curr_insn) != NULL)
6223 curr_bb = BLOCK_FOR_INSN (curr_insn);
6224 if (last_processed_bb != curr_bb)
6225 {
6226 /* We are at the end of BB. Add qualified living
6227 pseudos for potential splitting. */
6228 to_process = df_get_live_out (curr_bb);
6229 if (last_processed_bb != NULL)
1a8f8886 6230 {
c6a6cdaa 6231 /* We are somewhere in the middle of EBB. */
6232 get_live_on_other_edges (curr_bb, last_processed_bb,
6233 &temp_bitmap);
6234 to_process = &temp_bitmap;
6235 }
6236 last_processed_bb = curr_bb;
6237 last_insn = get_last_insertion_point (curr_bb);
6238 after_p = (! JUMP_P (last_insn)
6239 && (! CALL_P (last_insn)
6240 || (find_reg_note (last_insn,
6241 REG_NORETURN, NULL_RTX) == NULL_RTX
6242 && ! SIBLING_CALL_P (last_insn))));
c6a6cdaa 6243 CLEAR_HARD_REG_SET (potential_reload_hard_regs);
6244 EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
6245 {
6246 if ((int) j >= lra_constraint_new_regno_start)
6247 break;
6248 if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
6249 {
6250 if (j < FIRST_PSEUDO_REGISTER)
6251 SET_HARD_REG_BIT (live_hard_regs, j);
6252 else
6253 add_to_hard_reg_set (&live_hard_regs,
6254 PSEUDO_REGNO_MODE (j),
6255 reg_renumber[j]);
6256 setup_next_usage_insn (j, last_insn, reloads_num, after_p);
6257 }
6258 }
6259 }
6260 src_regno = dst_regno = -1;
ab4ea053 6261 curr_set = single_set (curr_insn);
6262 if (curr_set != NULL_RTX && REG_P (SET_DEST (curr_set)))
6263 dst_regno = REGNO (SET_DEST (curr_set));
6264 if (curr_set != NULL_RTX && REG_P (SET_SRC (curr_set)))
6265 src_regno = REGNO (SET_SRC (curr_set));
422470c1 6266 update_reloads_num_p = true;
c6a6cdaa 6267 if (src_regno < lra_constraint_new_regno_start
6268 && src_regno >= FIRST_PSEUDO_REGISTER
6269 && reg_renumber[src_regno] < 0
6270 && dst_regno >= lra_constraint_new_regno_start
6271 && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
6272 {
6273 /* 'reload_pseudo <- original_pseudo'. */
7eec3701 6274 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6275 reloads_num++;
422470c1 6276 update_reloads_num_p = false;
c6a6cdaa 6277 succ_p = false;
6278 if (usage_insns[src_regno].check == curr_usage_insns_check
6279 && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
6280 succ_p = inherit_reload_reg (false, src_regno, cl,
6281 curr_insn, next_usage_insns);
6282 if (succ_p)
6283 change_p = true;
6284 else
6285 setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
6286 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6287 IOR_HARD_REG_SET (potential_reload_hard_regs,
6288 reg_class_contents[cl]);
6289 }
ab4ea053 6290 else if (src_regno < 0
6291 && dst_regno >= lra_constraint_new_regno_start
6292 && invariant_p (SET_SRC (curr_set))
6293 && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS
6526e1b6 6294 && ! bitmap_bit_p (&invalid_invariant_regs, dst_regno)
6295 && ! bitmap_bit_p (&invalid_invariant_regs,
6296 ORIGINAL_REGNO(regno_reg_rtx[dst_regno])))
ab4ea053 6297 {
6298 /* 'reload_pseudo <- invariant'. */
6299 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6300 reloads_num++;
6301 update_reloads_num_p = false;
6302 if (process_invariant_for_inheritance (SET_DEST (curr_set), SET_SRC (curr_set)))
6303 change_p = true;
6304 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6305 IOR_HARD_REG_SET (potential_reload_hard_regs,
6306 reg_class_contents[cl]);
6307 }
c6a6cdaa 6308 else if (src_regno >= lra_constraint_new_regno_start
6309 && dst_regno < lra_constraint_new_regno_start
6310 && dst_regno >= FIRST_PSEUDO_REGISTER
6311 && reg_renumber[dst_regno] < 0
6312 && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
6313 && usage_insns[dst_regno].check == curr_usage_insns_check
6314 && (next_usage_insns
6315 = usage_insns[dst_regno].insns) != NULL_RTX)
6316 {
7eec3701 6317 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6318 reloads_num++;
422470c1 6319 update_reloads_num_p = false;
c6a6cdaa 6320 /* 'original_pseudo <- reload_pseudo'. */
6321 if (! JUMP_P (curr_insn)
6322 && inherit_reload_reg (true, dst_regno, cl,
6323 curr_insn, next_usage_insns))
6324 change_p = true;
6325 /* Invalidate. */
6326 usage_insns[dst_regno].check = 0;
6327 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6328 IOR_HARD_REG_SET (potential_reload_hard_regs,
6329 reg_class_contents[cl]);
6330 }
6331 else if (INSN_P (curr_insn))
6332 {
bf63c98f 6333 int iter;
c6a6cdaa 6334 int max_uid = get_max_uid ();
6335
6336 curr_id = lra_get_insn_recog_data (curr_insn);
bf63c98f 6337 curr_static_id = curr_id->insn_static_data;
c6a6cdaa 6338 to_inherit_num = 0;
6339 /* Process insn definitions. */
bf63c98f 6340 for (iter = 0; iter < 2; iter++)
6341 for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
6342 reg != NULL;
6343 reg = reg->next)
6344 if (reg->type != OP_IN
6345 && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
6346 {
6347 if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
6348 && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
6349 && usage_insns[dst_regno].check == curr_usage_insns_check
6350 && (next_usage_insns
6351 = usage_insns[dst_regno].insns) != NULL_RTX)
6352 {
6353 struct lra_insn_reg *r;
6354
6355 for (r = curr_id->regs; r != NULL; r = r->next)
6356 if (r->type != OP_OUT && r->regno == dst_regno)
6357 break;
6358 /* Don't do inheritance if the pseudo is also
6359 used in the insn. */
6360 if (r == NULL)
f4d3c071 6361 /* We cannot do inheritance right now
bf63c98f 6362 because the current insn reg info (chain
6363 regs) can change after that. */
6364 add_to_inherit (dst_regno, next_usage_insns);
6365 }
f4d3c071 6366 /* We cannot process one reg twice here because of
bf63c98f 6367 usage_insns invalidation. */
6368 if ((dst_regno < FIRST_PSEUDO_REGISTER
6369 || reg_renumber[dst_regno] >= 0)
0157439c 6370 && ! reg->subreg_p && reg->type != OP_IN)
bf63c98f 6371 {
6372 HARD_REG_SET s;
6373
6374 if (split_if_necessary (dst_regno, reg->biggest_mode,
6375 potential_reload_hard_regs,
6376 false, curr_insn, max_uid))
6377 change_p = true;
6378 CLEAR_HARD_REG_SET (s);
6379 if (dst_regno < FIRST_PSEUDO_REGISTER)
6380 add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
6381 else
6382 add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
6383 reg_renumber[dst_regno]);
6384 AND_COMPL_HARD_REG_SET (live_hard_regs, s);
68d3038f 6385 AND_COMPL_HARD_REG_SET (potential_reload_hard_regs, s);
bf63c98f 6386 }
6387 /* We should invalidate potential inheritance or
6388 splitting for the current insn usages to the next
6389 usage insns (see code below) as the output pseudo
6390 prevents this. */
6391 if ((dst_regno >= FIRST_PSEUDO_REGISTER
6392 && reg_renumber[dst_regno] < 0)
6393 || (reg->type == OP_OUT && ! reg->subreg_p
6394 && (dst_regno < FIRST_PSEUDO_REGISTER
6395 || reg_renumber[dst_regno] >= 0)))
6396 {
6397 /* Invalidate and mark definitions. */
6398 if (dst_regno >= FIRST_PSEUDO_REGISTER)
6399 usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
6400 else
6401 {
92d2aec3 6402 nregs = hard_regno_nregs (dst_regno,
6403 reg->biggest_mode);
bf63c98f 6404 for (i = 0; i < nregs; i++)
6405 usage_insns[dst_regno + i].check
6406 = -(int) INSN_UID (curr_insn);
6407 }
6408 }
6409 }
853a01d6 6410 /* Process clobbered call regs. */
6411 if (curr_id->arg_hard_regs != NULL)
6412 for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6413 if (dst_regno >= FIRST_PSEUDO_REGISTER)
6414 usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
6415 = -(int) INSN_UID (curr_insn);
c6a6cdaa 6416 if (! JUMP_P (curr_insn))
6417 for (i = 0; i < to_inherit_num; i++)
6418 if (inherit_reload_reg (true, to_inherit[i].regno,
6419 ALL_REGS, curr_insn,
6420 to_inherit[i].insns))
6421 change_p = true;
6422 if (CALL_P (curr_insn))
6423 {
7f836b57 6424 rtx cheap, pat, dest;
6425 rtx_insn *restore;
c6a6cdaa 6426 int regno, hard_regno;
6427
6428 calls_num++;
6429 if ((cheap = find_reg_note (curr_insn,
6430 REG_RETURNED, NULL_RTX)) != NULL_RTX
6431 && ((cheap = XEXP (cheap, 0)), true)
6432 && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
6433 && (hard_regno = reg_renumber[regno]) >= 0
2fa8212b 6434 && usage_insns[regno].check == curr_usage_insns_check
c6a6cdaa 6435 /* If there are pending saves/restores, the
6436 optimization is not worth. */
6437 && usage_insns[regno].calls_num == calls_num - 1
6438 && TEST_HARD_REG_BIT (call_used_reg_set, hard_regno))
6439 {
6440 /* Restore the pseudo from the call result as
6441 REG_RETURNED note says that the pseudo value is
6442 in the call result and the pseudo is an argument
6443 of the call. */
6444 pat = PATTERN (curr_insn);
6445 if (GET_CODE (pat) == PARALLEL)
6446 pat = XVECEXP (pat, 0, 0);
6447 dest = SET_DEST (pat);
e88cf7af 6448 /* For multiple return values dest is PARALLEL.
6449 Currently we handle only single return value case. */
6450 if (REG_P (dest))
6451 {
6452 start_sequence ();
6453 emit_move_insn (cheap, copy_rtx (dest));
6454 restore = get_insns ();
6455 end_sequence ();
6456 lra_process_new_insns (curr_insn, NULL, restore,
6457 "Inserting call parameter restore");
6458 /* We don't need to save/restore of the pseudo from
6459 this call. */
6460 usage_insns[regno].calls_num = calls_num;
6461 bitmap_set_bit (&check_only_regs, regno);
6462 }
c6a6cdaa 6463 }
6464 }
6465 to_inherit_num = 0;
6466 /* Process insn usages. */
bf63c98f 6467 for (iter = 0; iter < 2; iter++)
6468 for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
6469 reg != NULL;
6470 reg = reg->next)
6471 if ((reg->type != OP_OUT
6472 || (reg->type == OP_OUT && reg->subreg_p))
6473 && (src_regno = reg->regno) < lra_constraint_new_regno_start)
6474 {
6475 if (src_regno >= FIRST_PSEUDO_REGISTER
6476 && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
6477 {
6478 if (usage_insns[src_regno].check == curr_usage_insns_check
6479 && (next_usage_insns
6480 = usage_insns[src_regno].insns) != NULL_RTX
6481 && NONDEBUG_INSN_P (curr_insn))
6482 add_to_inherit (src_regno, next_usage_insns);
6483 else if (usage_insns[src_regno].check
6484 != -(int) INSN_UID (curr_insn))
6485 /* Add usages but only if the reg is not set up
6486 in the same insn. */
6487 add_next_usage_insn (src_regno, curr_insn, reloads_num);
6488 }
6489 else if (src_regno < FIRST_PSEUDO_REGISTER
6490 || reg_renumber[src_regno] >= 0)
6491 {
6492 bool before_p;
9ed997be 6493 rtx_insn *use_insn = curr_insn;
bf63c98f 6494
6495 before_p = (JUMP_P (curr_insn)
6496 || (CALL_P (curr_insn) && reg->type == OP_IN));
6497 if (NONDEBUG_INSN_P (curr_insn)
7eec3701 6498 && (! JUMP_P (curr_insn) || reg->type == OP_IN)
bf63c98f 6499 && split_if_necessary (src_regno, reg->biggest_mode,
6500 potential_reload_hard_regs,
6501 before_p, curr_insn, max_uid))
6502 {
6503 if (reg->subreg_p)
6504 lra_risky_transformations_p = true;
6505 change_p = true;
7eec3701 6506 /* Invalidate. */
bf63c98f 6507 usage_insns[src_regno].check = 0;
6508 if (before_p)
6509 use_insn = PREV_INSN (curr_insn);
6510 }
6511 if (NONDEBUG_INSN_P (curr_insn))
6512 {
6513 if (src_regno < FIRST_PSEUDO_REGISTER)
6514 add_to_hard_reg_set (&live_hard_regs,
6515 reg->biggest_mode, src_regno);
6516 else
6517 add_to_hard_reg_set (&live_hard_regs,
6518 PSEUDO_REGNO_MODE (src_regno),
6519 reg_renumber[src_regno]);
6520 }
fbaab486 6521 if (src_regno >= FIRST_PSEUDO_REGISTER)
6522 add_next_usage_insn (src_regno, use_insn, reloads_num);
6523 else
6524 {
6525 for (i = 0; i < hard_regno_nregs (src_regno, reg->biggest_mode); i++)
6526 add_next_usage_insn (src_regno + i, use_insn, reloads_num);
6527 }
bf63c98f 6528 }
6529 }
853a01d6 6530 /* Process used call regs. */
422470c1 6531 if (curr_id->arg_hard_regs != NULL)
6532 for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6533 if (src_regno < FIRST_PSEUDO_REGISTER)
6534 {
6535 SET_HARD_REG_BIT (live_hard_regs, src_regno);
6536 add_next_usage_insn (src_regno, curr_insn, reloads_num);
6537 }
c6a6cdaa 6538 for (i = 0; i < to_inherit_num; i++)
6539 {
6540 src_regno = to_inherit[i].regno;
6541 if (inherit_reload_reg (false, src_regno, ALL_REGS,
6542 curr_insn, to_inherit[i].insns))
6543 change_p = true;
6544 else
6545 setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
6546 }
6547 }
422470c1 6548 if (update_reloads_num_p
ab4ea053 6549 && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
422470c1 6550 {
6551 int regno = -1;
ab4ea053 6552 if ((REG_P (SET_DEST (curr_set))
6553 && (regno = REGNO (SET_DEST (curr_set))) >= lra_constraint_new_regno_start
422470c1 6554 && reg_renumber[regno] < 0
6555 && (cl = lra_get_allocno_class (regno)) != NO_REGS)
ab4ea053 6556 || (REG_P (SET_SRC (curr_set))
6557 && (regno = REGNO (SET_SRC (curr_set))) >= lra_constraint_new_regno_start
422470c1 6558 && reg_renumber[regno] < 0
6559 && (cl = lra_get_allocno_class (regno)) != NO_REGS))
6560 {
7eec3701 6561 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6562 reloads_num++;
422470c1 6563 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6564 IOR_HARD_REG_SET (potential_reload_hard_regs,
6565 reg_class_contents[cl]);
6566 }
6567 }
ab4ea053 6568 if (NONDEBUG_INSN_P (curr_insn))
6569 {
6570 int regno;
6571
6572 /* Invalidate invariants with changed regs. */
6573 curr_id = lra_get_insn_recog_data (curr_insn);
6574 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6575 if (reg->type != OP_IN)
6526e1b6 6576 {
6577 bitmap_set_bit (&invalid_invariant_regs, reg->regno);
6578 bitmap_set_bit (&invalid_invariant_regs,
6579 ORIGINAL_REGNO (regno_reg_rtx[reg->regno]));
6580 }
ab4ea053 6581 curr_static_id = curr_id->insn_static_data;
6582 for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6583 if (reg->type != OP_IN)
6584 bitmap_set_bit (&invalid_invariant_regs, reg->regno);
6585 if (curr_id->arg_hard_regs != NULL)
6586 for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6526e1b6 6587 if (regno >= FIRST_PSEUDO_REGISTER)
ab4ea053 6588 bitmap_set_bit (&invalid_invariant_regs,
6526e1b6 6589 regno - FIRST_PSEUDO_REGISTER);
ab4ea053 6590 }
c6a6cdaa 6591 /* We reached the start of the current basic block. */
6592 if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
6593 || BLOCK_FOR_INSN (prev_insn) != curr_bb)
6594 {
6595 /* We reached the beginning of the current block -- do
6596 rest of spliting in the current BB. */
6597 to_process = df_get_live_in (curr_bb);
6598 if (BLOCK_FOR_INSN (head) != curr_bb)
1a8f8886 6599 {
c6a6cdaa 6600 /* We are somewhere in the middle of EBB. */
6601 get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
6602 curr_bb, &temp_bitmap);
6603 to_process = &temp_bitmap;
6604 }
6605 head_p = true;
6606 EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
6607 {
6608 if ((int) j >= lra_constraint_new_regno_start)
6609 break;
6610 if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
6611 && usage_insns[j].check == curr_usage_insns_check
6612 && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
6613 {
6614 if (need_for_split_p (potential_reload_hard_regs, j))
6615 {
6616 if (lra_dump_file != NULL && head_p)
6617 {
6618 fprintf (lra_dump_file,
6619 " ----------------------------------\n");
6620 head_p = false;
6621 }
6622 if (split_reg (false, j, bb_note (curr_bb),
6a4bc24e 6623 next_usage_insns, NULL))
c6a6cdaa 6624 change_p = true;
6625 }
6626 usage_insns[j].check = 0;
6627 }
6628 }
6629 }
6630 }
6631 return change_p;
6632}
6633
6634/* This value affects EBB forming. If probability of edge from EBB to
6635 a BB is not greater than the following value, we don't add the BB
1a8f8886 6636 to EBB. */
4b69081d 6637#define EBB_PROBABILITY_CUTOFF \
6638 ((REG_BR_PROB_BASE * LRA_INHERITANCE_EBB_PROBABILITY_CUTOFF) / 100)
c6a6cdaa 6639
6640/* Current number of inheritance/split iteration. */
6641int lra_inheritance_iter;
6642
6643/* Entry function for inheritance/split pass. */
6644void
6645lra_inheritance (void)
6646{
6647 int i;
6648 basic_block bb, start_bb;
6649 edge e;
6650
c6a6cdaa 6651 lra_inheritance_iter++;
47f6add2 6652 if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
7b184c47 6653 return;
6654 timevar_push (TV_LRA_INHERITANCE);
c6a6cdaa 6655 if (lra_dump_file != NULL)
6656 fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
6657 lra_inheritance_iter);
6658 curr_usage_insns_check = 0;
6659 usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
6660 for (i = 0; i < lra_constraint_new_regno_start; i++)
6661 usage_insns[i].check = 0;
6662 bitmap_initialize (&check_only_regs, &reg_obstack);
ab4ea053 6663 bitmap_initialize (&invalid_invariant_regs, &reg_obstack);
c6a6cdaa 6664 bitmap_initialize (&live_regs, &reg_obstack);
6665 bitmap_initialize (&temp_bitmap, &reg_obstack);
6666 bitmap_initialize (&ebb_global_regs, &reg_obstack);
fc00614f 6667 FOR_EACH_BB_FN (bb, cfun)
c6a6cdaa 6668 {
6669 start_bb = bb;
6670 if (lra_dump_file != NULL)
6671 fprintf (lra_dump_file, "EBB");
6672 /* Form a EBB starting with BB. */
6673 bitmap_clear (&ebb_global_regs);
6674 bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
6675 for (;;)
6676 {
6677 if (lra_dump_file != NULL)
6678 fprintf (lra_dump_file, " %d", bb->index);
34154e27 6679 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
6680 || LABEL_P (BB_HEAD (bb->next_bb)))
c6a6cdaa 6681 break;
6682 e = find_fallthru_edge (bb->succs);
6683 if (! e)
6684 break;
720cfc43 6685 if (e->probability.initialized_p ()
6686 && e->probability.to_reg_br_prob_base () < EBB_PROBABILITY_CUTOFF)
c6a6cdaa 6687 break;
6688 bb = bb->next_bb;
6689 }
6690 bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
6691 if (lra_dump_file != NULL)
6692 fprintf (lra_dump_file, "\n");
6693 if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
6694 /* Remember that the EBB head and tail can change in
6695 inherit_in_ebb. */
6696 update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
6697 }
7da7f1c6 6698 bitmap_release (&ebb_global_regs);
6699 bitmap_release (&temp_bitmap);
6700 bitmap_release (&live_regs);
6701 bitmap_release (&invalid_invariant_regs);
6702 bitmap_release (&check_only_regs);
c6a6cdaa 6703 free (usage_insns);
6704
6705 timevar_pop (TV_LRA_INHERITANCE);
6706}
6707
6708\f
6709
6710/* This page contains code to undo failed inheritance/split
6711 transformations. */
6712
6713/* Current number of iteration undoing inheritance/split. */
6714int lra_undo_inheritance_iter;
6715
6716/* Fix BB live info LIVE after removing pseudos created on pass doing
6717 inheritance/split which are REMOVED_PSEUDOS. */
6718static void
6719fix_bb_live_info (bitmap live, bitmap removed_pseudos)
6720{
6721 unsigned int regno;
6722 bitmap_iterator bi;
6723
6724 EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
ab4ea053 6725 if (bitmap_clear_bit (live, regno)
6726 && REG_P (lra_reg_info[regno].restore_rtx))
6727 bitmap_set_bit (live, REGNO (lra_reg_info[regno].restore_rtx));
c6a6cdaa 6728}
6729
6730/* Return regno of the (subreg of) REG. Otherwise, return a negative
6731 number. */
6732static int
6733get_regno (rtx reg)
6734{
6735 if (GET_CODE (reg) == SUBREG)
6736 reg = SUBREG_REG (reg);
6737 if (REG_P (reg))
6738 return REGNO (reg);
6739 return -1;
6740}
6741
02ffd664 6742/* Delete a move INSN with destination reg DREGNO and a previous
6743 clobber insn with the same regno. The inheritance/split code can
6744 generate moves with preceding clobber and when we delete such moves
6745 we should delete the clobber insn too to keep the correct life
6746 info. */
6747static void
6748delete_move_and_clobber (rtx_insn *insn, int dregno)
6749{
6750 rtx_insn *prev_insn = PREV_INSN (insn);
6751
6752 lra_set_insn_deleted (insn);
2b3c633f 6753 lra_assert (dregno >= 0);
02ffd664 6754 if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
6755 && GET_CODE (PATTERN (prev_insn)) == CLOBBER
6756 && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
6757 lra_set_insn_deleted (prev_insn);
6758}
6759
c6a6cdaa 6760/* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
6761 return true if we did any change. The undo transformations for
6762 inheritance looks like
6763 i <- i2
6764 p <- i => p <- i2
6765 or removing
6766 p <- i, i <- p, and i <- i3
6767 where p is original pseudo from which inheritance pseudo i was
6768 created, i and i3 are removed inheritance pseudos, i2 is another
6769 not removed inheritance pseudo. All split pseudos or other
6770 occurrences of removed inheritance pseudos are changed on the
6771 corresponding original pseudos.
6772
6773 The function also schedules insns changed and created during
6774 inheritance/split pass for processing by the subsequent constraint
6775 pass. */
6776static bool
6777remove_inheritance_pseudos (bitmap remove_pseudos)
6778{
6779 basic_block bb;
ab4ea053 6780 int regno, sregno, prev_sregno, dregno;
6781 rtx restore_rtx;
7f836b57 6782 rtx set, prev_set;
6783 rtx_insn *prev_insn;
c6a6cdaa 6784 bool change_p, done_p;
6785
6786 change_p = ! bitmap_empty_p (remove_pseudos);
f4d3c071 6787 /* We cannot finish the function right away if CHANGE_P is true
c6a6cdaa 6788 because we need to marks insns affected by previous
6789 inheritance/split pass for processing by the subsequent
6790 constraint pass. */
fc00614f 6791 FOR_EACH_BB_FN (bb, cfun)
c6a6cdaa 6792 {
6793 fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
6794 fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
6795 FOR_BB_INSNS_REVERSE (bb, curr_insn)
6796 {
6797 if (! INSN_P (curr_insn))
6798 continue;
6799 done_p = false;
6800 sregno = dregno = -1;
6801 if (change_p && NONDEBUG_INSN_P (curr_insn)
6802 && (set = single_set (curr_insn)) != NULL_RTX)
6803 {
6804 dregno = get_regno (SET_DEST (set));
6805 sregno = get_regno (SET_SRC (set));
6806 }
1a8f8886 6807
c6a6cdaa 6808 if (sregno >= 0 && dregno >= 0)
6809 {
ab4ea053 6810 if (bitmap_bit_p (remove_pseudos, dregno)
6811 && ! REG_P (lra_reg_info[dregno].restore_rtx))
6812 {
6813 /* invariant inheritance pseudo <- original pseudo */
6814 if (lra_dump_file != NULL)
6815 {
6816 fprintf (lra_dump_file, " Removing invariant inheritance:\n");
6817 dump_insn_slim (lra_dump_file, curr_insn);
6818 fprintf (lra_dump_file, "\n");
6819 }
6820 delete_move_and_clobber (curr_insn, dregno);
6821 done_p = true;
6822 }
6823 else if (bitmap_bit_p (remove_pseudos, sregno)
6824 && ! REG_P (lra_reg_info[sregno].restore_rtx))
6825 {
6826 /* reload pseudo <- invariant inheritance pseudo */
6827 start_sequence ();
f4d3c071 6828 /* We cannot just change the source. It might be
ab4ea053 6829 an insn different from the move. */
fee93b91 6830 emit_insn (lra_reg_info[sregno].restore_rtx);
ab4ea053 6831 rtx_insn *new_insns = get_insns ();
6832 end_sequence ();
fee93b91 6833 lra_assert (single_set (new_insns) != NULL
6834 && SET_DEST (set) == SET_DEST (single_set (new_insns)));
ab4ea053 6835 lra_process_new_insns (curr_insn, NULL, new_insns,
6836 "Changing reload<-invariant inheritance");
6837 delete_move_and_clobber (curr_insn, dregno);
6838 done_p = true;
6839 }
6840 else if ((bitmap_bit_p (remove_pseudos, sregno)
6841 && (get_regno (lra_reg_info[sregno].restore_rtx) == dregno
6842 || (bitmap_bit_p (remove_pseudos, dregno)
6843 && get_regno (lra_reg_info[sregno].restore_rtx) >= 0
6844 && (get_regno (lra_reg_info[sregno].restore_rtx)
6845 == get_regno (lra_reg_info[dregno].restore_rtx)))))
c6a6cdaa 6846 || (bitmap_bit_p (remove_pseudos, dregno)
ab4ea053 6847 && get_regno (lra_reg_info[dregno].restore_rtx) == sregno))
c6a6cdaa 6848 /* One of the following cases:
6849 original <- removed inheritance pseudo
6850 removed inherit pseudo <- another removed inherit pseudo
6851 removed inherit pseudo <- original pseudo
6852 Or
6853 removed_split_pseudo <- original_reg
6854 original_reg <- removed_split_pseudo */
6855 {
6856 if (lra_dump_file != NULL)
6857 {
6858 fprintf (lra_dump_file, " Removing %s:\n",
6859 bitmap_bit_p (&lra_split_regs, sregno)
6860 || bitmap_bit_p (&lra_split_regs, dregno)
6861 ? "split" : "inheritance");
6dde9719 6862 dump_insn_slim (lra_dump_file, curr_insn);
c6a6cdaa 6863 }
02ffd664 6864 delete_move_and_clobber (curr_insn, dregno);
c6a6cdaa 6865 done_p = true;
6866 }
6867 else if (bitmap_bit_p (remove_pseudos, sregno)
6868 && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
6869 {
6870 /* Search the following pattern:
6871 inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
6872 original_pseudo <- inherit_or_split_pseudo1
6873 where the 2nd insn is the current insn and
6874 inherit_or_split_pseudo2 is not removed. If it is found,
6875 change the current insn onto:
6876 original_pseudo <- inherit_or_split_pseudo2. */
6877 for (prev_insn = PREV_INSN (curr_insn);
6878 prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
6879 prev_insn = PREV_INSN (prev_insn))
6880 ;
6881 if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
6882 && (prev_set = single_set (prev_insn)) != NULL_RTX
6883 /* There should be no subregs in insn we are
6884 searching because only the original reg might
6885 be in subreg when we changed the mode of
6886 load/store for splitting. */
6887 && REG_P (SET_DEST (prev_set))
6888 && REG_P (SET_SRC (prev_set))
6889 && (int) REGNO (SET_DEST (prev_set)) == sregno
6890 && ((prev_sregno = REGNO (SET_SRC (prev_set)))
6891 >= FIRST_PSEUDO_REGISTER)
ab4ea053 6892 && (lra_reg_info[prev_sregno].restore_rtx == NULL_RTX
6893 ||
6894 /* As we consider chain of inheritance or
6895 splitting described in above comment we should
6896 check that sregno and prev_sregno were
6897 inheritance/split pseudos created from the
6898 same original regno. */
6899 (get_regno (lra_reg_info[sregno].restore_rtx) >= 0
6900 && (get_regno (lra_reg_info[sregno].restore_rtx)
6901 == get_regno (lra_reg_info[prev_sregno].restore_rtx))))
c6a6cdaa 6902 && ! bitmap_bit_p (remove_pseudos, prev_sregno))
6903 {
6904 lra_assert (GET_MODE (SET_SRC (prev_set))
6905 == GET_MODE (regno_reg_rtx[sregno]));
2b69ec1c 6906 /* Although we have a single set, the insn can
6907 contain more one sregno register occurrence
6908 as a source. Change all occurrences. */
6909 lra_substitute_pseudo_within_insn (curr_insn, sregno,
6910 SET_SRC (prev_set),
6911 false);
ef76edc2 6912 /* As we are finishing with processing the insn
6913 here, check the destination too as it might
6914 inheritance pseudo for another pseudo. */
6915 if (bitmap_bit_p (remove_pseudos, dregno)
6916 && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
ab4ea053 6917 && (restore_rtx
6918 = lra_reg_info[dregno].restore_rtx) != NULL_RTX)
ef76edc2 6919 {
6920 if (GET_CODE (SET_DEST (set)) == SUBREG)
ab4ea053 6921 SUBREG_REG (SET_DEST (set)) = restore_rtx;
ef76edc2 6922 else
ab4ea053 6923 SET_DEST (set) = restore_rtx;
ef76edc2 6924 }
c6a6cdaa 6925 lra_push_insn_and_update_insn_regno_info (curr_insn);
6926 lra_set_used_insn_alternative_by_uid
71d47a14 6927 (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
c6a6cdaa 6928 done_p = true;
6929 if (lra_dump_file != NULL)
6930 {
6931 fprintf (lra_dump_file, " Change reload insn:\n");
6dde9719 6932 dump_insn_slim (lra_dump_file, curr_insn);
c6a6cdaa 6933 }
6934 }
6935 }
6936 }
6937 if (! done_p)
6938 {
6939 struct lra_insn_reg *reg;
6940 bool restored_regs_p = false;
6941 bool kept_regs_p = false;
6942
6943 curr_id = lra_get_insn_recog_data (curr_insn);
6944 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6945 {
6946 regno = reg->regno;
ab4ea053 6947 restore_rtx = lra_reg_info[regno].restore_rtx;
6948 if (restore_rtx != NULL_RTX)
c6a6cdaa 6949 {
6950 if (change_p && bitmap_bit_p (remove_pseudos, regno))
6951 {
06072e79 6952 lra_substitute_pseudo_within_insn
ab4ea053 6953 (curr_insn, regno, restore_rtx, false);
c6a6cdaa 6954 restored_regs_p = true;
6955 }
6956 else
6957 kept_regs_p = true;
6958 }
6959 }
6960 if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
6961 {
6962 /* The instruction has changed since the previous
6963 constraints pass. */
6964 lra_push_insn_and_update_insn_regno_info (curr_insn);
6965 lra_set_used_insn_alternative_by_uid
71d47a14 6966 (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
c6a6cdaa 6967 }
6968 else if (restored_regs_p)
6969 /* The instruction has been restored to the form that
6970 it had during the previous constraints pass. */
6971 lra_update_insn_regno_info (curr_insn);
6972 if (restored_regs_p && lra_dump_file != NULL)
6973 {
6974 fprintf (lra_dump_file, " Insn after restoring regs:\n");
6dde9719 6975 dump_insn_slim (lra_dump_file, curr_insn);
c6a6cdaa 6976 }
6977 }
6978 }
6979 }
6980 return change_p;
6981}
6982
1f3a048a 6983/* If optional reload pseudos failed to get a hard register or was not
6984 inherited, it is better to remove optional reloads. We do this
6985 transformation after undoing inheritance to figure out necessity to
6986 remove optional reloads easier. Return true if we do any
6987 change. */
6988static bool
6989undo_optional_reloads (void)
6990{
267200f3 6991 bool change_p, keep_p;
1f3a048a 6992 unsigned int regno, uid;
6993 bitmap_iterator bi, bi2;
7f836b57 6994 rtx_insn *insn;
6995 rtx set, src, dest;
f6708c36 6996 auto_bitmap removed_optional_reload_pseudos (&reg_obstack);
1f3a048a 6997
f6708c36 6998 bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
1f3a048a 6999 EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
0c4735d1 7000 {
7001 keep_p = false;
95563487 7002 /* Keep optional reloads from previous subpasses. */
ab4ea053 7003 if (lra_reg_info[regno].restore_rtx == NULL_RTX
95563487 7004 /* If the original pseudo changed its allocation, just
7005 removing the optional pseudo is dangerous as the original
7006 pseudo will have longer live range. */
ab4ea053 7007 || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] >= 0)
0c4735d1 7008 keep_p = true;
7009 else if (reg_renumber[regno] >= 0)
7010 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
267200f3 7011 {
0c4735d1 7012 insn = lra_insn_recog_data[uid]->insn;
7013 if ((set = single_set (insn)) == NULL_RTX)
7014 continue;
7015 src = SET_SRC (set);
7016 dest = SET_DEST (set);
7017 if (! REG_P (src) || ! REG_P (dest))
7018 continue;
7019 if (REGNO (dest) == regno
7020 /* Ignore insn for optional reloads itself. */
ab4ea053 7021 && REGNO (lra_reg_info[regno].restore_rtx) != REGNO (src)
0c4735d1 7022 /* Check only inheritance on last inheritance pass. */
7023 && (int) REGNO (src) >= new_regno_start
7024 /* Check that the optional reload was inherited. */
7025 && bitmap_bit_p (&lra_inheritance_pseudos, REGNO (src)))
7026 {
7027 keep_p = true;
7028 break;
7029 }
267200f3 7030 }
0c4735d1 7031 if (keep_p)
7032 {
f6708c36 7033 bitmap_clear_bit (removed_optional_reload_pseudos, regno);
0c4735d1 7034 if (lra_dump_file != NULL)
7035 fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
7036 }
7037 }
f6708c36 7038 change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
7039 auto_bitmap insn_bitmap (&reg_obstack);
7040 EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
1f3a048a 7041 {
7042 if (lra_dump_file != NULL)
7043 fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
f6708c36 7044 bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
7045 EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
1f3a048a 7046 {
7047 insn = lra_insn_recog_data[uid]->insn;
7048 if ((set = single_set (insn)) != NULL_RTX)
7049 {
7050 src = SET_SRC (set);
7051 dest = SET_DEST (set);
7052 if (REG_P (src) && REG_P (dest)
7053 && ((REGNO (src) == regno
ab4ea053 7054 && (REGNO (lra_reg_info[regno].restore_rtx)
7055 == REGNO (dest)))
1f3a048a 7056 || (REGNO (dest) == regno
ab4ea053 7057 && (REGNO (lra_reg_info[regno].restore_rtx)
7058 == REGNO (src)))))
1f3a048a 7059 {
7060 if (lra_dump_file != NULL)
7061 {
7062 fprintf (lra_dump_file, " Deleting move %u\n",
7063 INSN_UID (insn));
7064 dump_insn_slim (lra_dump_file, insn);
7065 }
02ffd664 7066 delete_move_and_clobber (insn, REGNO (dest));
1f3a048a 7067 continue;
7068 }
7069 /* We should not worry about generation memory-memory
7070 moves here as if the corresponding inheritance did
7071 not work (inheritance pseudo did not get a hard reg),
7072 we remove the inheritance pseudo and the optional
7073 reload. */
7074 }
06072e79 7075 lra_substitute_pseudo_within_insn
ab4ea053 7076 (insn, regno, lra_reg_info[regno].restore_rtx, false);
1f3a048a 7077 lra_update_insn_regno_info (insn);
7078 if (lra_dump_file != NULL)
7079 {
7080 fprintf (lra_dump_file,
7081 " Restoring original insn:\n");
7082 dump_insn_slim (lra_dump_file, insn);
7083 }
7084 }
7085 }
7086 /* Clear restore_regnos. */
7087 EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
ab4ea053 7088 lra_reg_info[regno].restore_rtx = NULL_RTX;
1f3a048a 7089 return change_p;
7090}
7091
c6a6cdaa 7092/* Entry function for undoing inheritance/split transformation. Return true
7093 if we did any RTL change in this pass. */
7094bool
7095lra_undo_inheritance (void)
7096{
7097 unsigned int regno;
ab4ea053 7098 int hard_regno;
c6a6cdaa 7099 int n_all_inherit, n_inherit, n_all_split, n_split;
ab4ea053 7100 rtx restore_rtx;
c6a6cdaa 7101 bitmap_iterator bi;
7102 bool change_p;
7103
7104 lra_undo_inheritance_iter++;
47f6add2 7105 if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
7b184c47 7106 return false;
c6a6cdaa 7107 if (lra_dump_file != NULL)
7108 fprintf (lra_dump_file,
7109 "\n********** Undoing inheritance #%d: **********\n\n",
7110 lra_undo_inheritance_iter);
f6708c36 7111 auto_bitmap remove_pseudos (&reg_obstack);
c6a6cdaa 7112 n_inherit = n_all_inherit = 0;
7113 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
ab4ea053 7114 if (lra_reg_info[regno].restore_rtx != NULL_RTX)
c6a6cdaa 7115 {
7116 n_all_inherit++;
267200f3 7117 if (reg_renumber[regno] < 0
7118 /* If the original pseudo changed its allocation, just
7119 removing inheritance is dangerous as for changing
7120 allocation we used shorter live-ranges. */
ab4ea053 7121 && (! REG_P (lra_reg_info[regno].restore_rtx)
7122 || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
f6708c36 7123 bitmap_set_bit (remove_pseudos, regno);
c6a6cdaa 7124 else
7125 n_inherit++;
7126 }
7127 if (lra_dump_file != NULL && n_all_inherit != 0)
7128 fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
7129 n_inherit, n_all_inherit,
7130 (double) n_inherit / n_all_inherit * 100);
7131 n_split = n_all_split = 0;
7132 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
ab4ea053 7133 if ((restore_rtx = lra_reg_info[regno].restore_rtx) != NULL_RTX)
c6a6cdaa 7134 {
ab4ea053 7135 int restore_regno = REGNO (restore_rtx);
7136
c6a6cdaa 7137 n_all_split++;
7138 hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
7139 ? reg_renumber[restore_regno] : restore_regno);
7140 if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
f6708c36 7141 bitmap_set_bit (remove_pseudos, regno);
c6a6cdaa 7142 else
7143 {
7144 n_split++;
7145 if (lra_dump_file != NULL)
7146 fprintf (lra_dump_file, " Keep split r%d (orig=r%d)\n",
7147 regno, restore_regno);
7148 }
7149 }
7150 if (lra_dump_file != NULL && n_all_split != 0)
7151 fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
7152 n_split, n_all_split,
7153 (double) n_split / n_all_split * 100);
f6708c36 7154 change_p = remove_inheritance_pseudos (remove_pseudos);
c6a6cdaa 7155 /* Clear restore_regnos. */
7156 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
ab4ea053 7157 lra_reg_info[regno].restore_rtx = NULL_RTX;
c6a6cdaa 7158 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
ab4ea053 7159 lra_reg_info[regno].restore_rtx = NULL_RTX;
1f3a048a 7160 change_p = undo_optional_reloads () || change_p;
c6a6cdaa 7161 return change_p;
7162}