]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/lra-constraints.c
Remove AND_HARD_REG_SET
[thirdparty/gcc.git] / gcc / lra-constraints.c
CommitLineData
55a2c322 1/* Code for RTL transformations to satisfy insn constraints.
a5544970 2 Copyright (C) 2010-2019 Free Software Foundation, Inc.
55a2c322
VM
3 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22/* This file contains code for 3 passes: constraint pass,
23 inheritance/split pass, and pass for undoing failed inheritance and
24 split.
25
26 The major goal of constraint pass is to transform RTL to satisfy
27 insn and address constraints by:
28 o choosing insn alternatives;
29 o generating *reload insns* (or reloads in brief) and *reload
30 pseudos* which will get necessary hard registers later;
31 o substituting pseudos with equivalent values and removing the
32 instructions that initialized those pseudos.
33
34 The constraint pass has biggest and most complicated code in LRA.
35 There are a lot of important details like:
36 o reuse of input reload pseudos to simplify reload pseudo
37 allocations;
38 o some heuristics to choose insn alternative to improve the
39 inheritance;
40 o early clobbers etc.
41
42 The pass is mimicking former reload pass in alternative choosing
43 because the reload pass is oriented to current machine description
44 model. It might be changed if the machine description model is
45 changed.
46
47 There is special code for preventing all LRA and this pass cycling
48 in case of bugs.
49
50 On the first iteration of the pass we process every instruction and
51 choose an alternative for each one. On subsequent iterations we try
52 to avoid reprocessing instructions if we can be sure that the old
53 choice is still valid.
54
55 The inheritance/spilt pass is to transform code to achieve
56 ineheritance and live range splitting. It is done on backward
57 traversal of EBBs.
58
59 The inheritance optimization goal is to reuse values in hard
60 registers. There is analogous optimization in old reload pass. The
61 inheritance is achieved by following transformation:
62
63 reload_p1 <- p reload_p1 <- p
64 ... new_p <- reload_p1
65 ... => ...
66 reload_p2 <- p reload_p2 <- new_p
67
68 where p is spilled and not changed between the insns. Reload_p1 is
69 also called *original pseudo* and new_p is called *inheritance
70 pseudo*.
71
72 The subsequent assignment pass will try to assign the same (or
73 another if it is not possible) hard register to new_p as to
74 reload_p1 or reload_p2.
75
76 If the assignment pass fails to assign a hard register to new_p,
77 this file will undo the inheritance and restore the original code.
78 This is because implementing the above sequence with a spilled
79 new_p would make the code much worse. The inheritance is done in
80 EBB scope. The above is just a simplified example to get an idea
81 of the inheritance as the inheritance is also done for non-reload
82 insns.
83
84 Splitting (transformation) is also done in EBB scope on the same
85 pass as the inheritance:
86
87 r <- ... or ... <- r r <- ... or ... <- r
88 ... s <- r (new insn -- save)
f4eafc30 89 ... =>
55a2c322
VM
90 ... r <- s (new insn -- restore)
91 ... <- r ... <- r
92
93 The *split pseudo* s is assigned to the hard register of the
94 original pseudo or hard register r.
95
96 Splitting is done:
97 o In EBBs with high register pressure for global pseudos (living
98 in at least 2 BBs) and assigned to hard registers when there
99 are more one reloads needing the hard registers;
100 o for pseudos needing save/restore code around calls.
101
102 If the split pseudo still has the same hard register as the
103 original pseudo after the subsequent assignment pass or the
104 original pseudo was split, the opposite transformation is done on
105 the same pass for undoing inheritance. */
106
107#undef REG_OK_STRICT
108
109#include "config.h"
110#include "system.h"
111#include "coretypes.h"
c7131fb2 112#include "backend.h"
957060b5 113#include "target.h"
55a2c322 114#include "rtl.h"
957060b5
AM
115#include "tree.h"
116#include "predict.h"
c7131fb2 117#include "df.h"
4d0cdd0c 118#include "memmodel.h"
55a2c322 119#include "tm_p.h"
957060b5
AM
120#include "expmed.h"
121#include "optabs.h"
55a2c322 122#include "regs.h"
957060b5 123#include "ira.h"
55a2c322
VM
124#include "recog.h"
125#include "output.h"
126#include "addresses.h"
55a2c322 127#include "expr.h"
60393bbc 128#include "cfgrtl.h"
55a2c322 129#include "rtl-error.h"
fb8a0e40 130#include "params.h"
c7131fb2 131#include "lra.h"
55a2c322 132#include "lra-int.h"
013a8899 133#include "print-rtl.h"
55a2c322
VM
134
135/* Value of LRA_CURR_RELOAD_NUM at the beginning of BB of the current
136 insn. Remember that LRA_CURR_RELOAD_NUM is the number of emitted
137 reload insns. */
138static int bb_reload_num;
139
2c62cbaa
VM
140/* The current insn being processed and corresponding its single set
141 (NULL otherwise), its data (basic block, the insn data, the insn
142 static data, and the mode of each operand). */
cfa434f6 143static rtx_insn *curr_insn;
2c62cbaa 144static rtx curr_insn_set;
55a2c322
VM
145static basic_block curr_bb;
146static lra_insn_recog_data_t curr_id;
147static struct lra_static_insn_data *curr_static_id;
ef4bddc2 148static machine_mode curr_operand_mode[MAX_RECOG_OPERANDS];
895ff86f
VM
149/* Mode of the register substituted by its equivalence with VOIDmode
150 (e.g. constant) and whose subreg is given operand of the current
151 insn. VOIDmode in all other cases. */
152static machine_mode original_subreg_reg_mode[MAX_RECOG_OPERANDS];
55a2c322
VM
153
154\f
155
156/* Start numbers for new registers and insns at the current constraints
157 pass start. */
158static int new_regno_start;
159static int new_insn_uid_start;
160
277f65de
RS
161/* If LOC is nonnull, strip any outer subreg from it. */
162static inline rtx *
163strip_subreg (rtx *loc)
164{
165 return loc && GET_CODE (*loc) == SUBREG ? &SUBREG_REG (*loc) : loc;
166}
167
55a2c322
VM
168/* Return hard regno of REGNO or if it is was not assigned to a hard
169 register, use a hard register from its allocno class. */
170static int
171get_try_hard_regno (int regno)
172{
173 int hard_regno;
174 enum reg_class rclass;
175
176 if ((hard_regno = regno) >= FIRST_PSEUDO_REGISTER)
177 hard_regno = lra_get_regno_hard_regno (regno);
178 if (hard_regno >= 0)
179 return hard_regno;
180 rclass = lra_get_allocno_class (regno);
181 if (rclass == NO_REGS)
182 return -1;
183 return ira_class_hard_regs[rclass][0];
184}
185
9d0a9bb4
PB
186/* Return the hard regno of X after removing its subreg. If X is not
187 a register or a subreg of a register, return -1. If X is a pseudo,
1686923c
BE
188 use its assignment. If FINAL_P return the final hard regno which will
189 be after elimination. */
55a2c322 190static int
1686923c 191get_hard_regno (rtx x, bool final_p)
55a2c322
VM
192{
193 rtx reg;
1686923c 194 int hard_regno;
55a2c322
VM
195
196 reg = x;
9d0a9bb4 197 if (SUBREG_P (x))
55a2c322
VM
198 reg = SUBREG_REG (x);
199 if (! REG_P (reg))
200 return -1;
9d0a9bb4 201 if (! HARD_REGISTER_NUM_P (hard_regno = REGNO (reg)))
55a2c322
VM
202 hard_regno = lra_get_regno_hard_regno (hard_regno);
203 if (hard_regno < 0)
204 return -1;
1686923c
BE
205 if (final_p)
206 hard_regno = lra_get_elimination_hard_regno (hard_regno);
9d0a9bb4 207 if (SUBREG_P (x))
1686923c
BE
208 hard_regno += subreg_regno_offset (hard_regno, GET_MODE (reg),
209 SUBREG_BYTE (x), GET_MODE (x));
210 return hard_regno;
55a2c322
VM
211}
212
213/* If REGNO is a hard register or has been allocated a hard register,
214 return the class of that register. If REGNO is a reload pseudo
215 created by the current constraints pass, return its allocno class.
216 Return NO_REGS otherwise. */
217static enum reg_class
218get_reg_class (int regno)
219{
220 int hard_regno;
221
1686923c 222 if (! HARD_REGISTER_NUM_P (hard_regno = regno))
55a2c322
VM
223 hard_regno = lra_get_regno_hard_regno (regno);
224 if (hard_regno >= 0)
225 {
1686923c 226 hard_regno = lra_get_elimination_hard_regno (hard_regno);
55a2c322
VM
227 return REGNO_REG_CLASS (hard_regno);
228 }
229 if (regno >= new_regno_start)
230 return lra_get_allocno_class (regno);
231 return NO_REGS;
232}
233
234/* Return true if REG satisfies (or will satisfy) reg class constraint
235 CL. Use elimination first if REG is a hard register. If REG is a
236 reload pseudo created by this constraints pass, assume that it will
237 be allocated a hard register from its allocno class, but allow that
238 class to be narrowed to CL if it is currently a superset of CL.
239
240 If NEW_CLASS is nonnull, set *NEW_CLASS to the new allocno class of
241 REGNO (reg), or NO_REGS if no change in its class was needed. */
242static bool
243in_class_p (rtx reg, enum reg_class cl, enum reg_class *new_class)
244{
245 enum reg_class rclass, common_class;
ef4bddc2 246 machine_mode reg_mode;
55a2c322
VM
247 int class_size, hard_regno, nregs, i, j;
248 int regno = REGNO (reg);
f4eafc30 249
55a2c322
VM
250 if (new_class != NULL)
251 *new_class = NO_REGS;
252 if (regno < FIRST_PSEUDO_REGISTER)
253 {
254 rtx final_reg = reg;
255 rtx *final_loc = &final_reg;
f4eafc30 256
55a2c322
VM
257 lra_eliminate_reg_if_possible (final_loc);
258 return TEST_HARD_REG_BIT (reg_class_contents[cl], REGNO (*final_loc));
259 }
260 reg_mode = GET_MODE (reg);
261 rclass = get_reg_class (regno);
262 if (regno < new_regno_start
263 /* Do not allow the constraints for reload instructions to
264 influence the classes of new pseudos. These reloads are
265 typically moves that have many alternatives, and restricting
266 reload pseudos for one alternative may lead to situations
267 where other reload pseudos are no longer allocatable. */
a2d0d374
VM
268 || (INSN_UID (curr_insn) >= new_insn_uid_start
269 && curr_insn_set != NULL
58532ca6
VM
270 && ((OBJECT_P (SET_SRC (curr_insn_set))
271 && ! CONSTANT_P (SET_SRC (curr_insn_set)))
a2d0d374 272 || (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
58532ca6
VM
273 && OBJECT_P (SUBREG_REG (SET_SRC (curr_insn_set)))
274 && ! CONSTANT_P (SUBREG_REG (SET_SRC (curr_insn_set)))))))
55a2c322
VM
275 /* When we don't know what class will be used finally for reload
276 pseudos, we use ALL_REGS. */
277 return ((regno >= new_regno_start && rclass == ALL_REGS)
278 || (rclass != NO_REGS && ira_class_subset_p[rclass][cl]
279 && ! hard_reg_set_subset_p (reg_class_contents[cl],
280 lra_no_alloc_regs)));
281 else
282 {
283 common_class = ira_reg_class_subset[rclass][cl];
284 if (new_class != NULL)
285 *new_class = common_class;
286 if (hard_reg_set_subset_p (reg_class_contents[common_class],
287 lra_no_alloc_regs))
288 return false;
289 /* Check that there are enough allocatable regs. */
290 class_size = ira_class_hard_regs_num[common_class];
291 for (i = 0; i < class_size; i++)
292 {
293 hard_regno = ira_class_hard_regs[common_class][i];
ad474626 294 nregs = hard_regno_nregs (hard_regno, reg_mode);
55a2c322
VM
295 if (nregs == 1)
296 return true;
297 for (j = 0; j < nregs; j++)
f421c426
VM
298 if (TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno + j)
299 || ! TEST_HARD_REG_BIT (reg_class_contents[common_class],
300 hard_regno + j))
55a2c322
VM
301 break;
302 if (j >= nregs)
303 return true;
304 }
305 return false;
306 }
307}
308
309/* Return true if REGNO satisfies a memory constraint. */
310static bool
311in_mem_p (int regno)
312{
313 return get_reg_class (regno) == NO_REGS;
314}
315
a953491e
RS
316/* Return 1 if ADDR is a valid memory address for mode MODE in address
317 space AS, and check that each pseudo has the proper kind of hard
318 reg. */
319static int
ef4bddc2 320valid_address_p (machine_mode mode ATTRIBUTE_UNUSED,
a953491e
RS
321 rtx addr, addr_space_t as)
322{
323#ifdef GO_IF_LEGITIMATE_ADDRESS
324 lra_assert (ADDR_SPACE_GENERIC_P (as));
325 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
326 return 0;
327
328 win:
329 return 1;
330#else
331 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
332#endif
333}
334
335namespace {
336 /* Temporarily eliminates registers in an address (for the lifetime of
337 the object). */
338 class address_eliminator {
339 public:
340 address_eliminator (struct address_info *ad);
341 ~address_eliminator ();
342
343 private:
344 struct address_info *m_ad;
345 rtx *m_base_loc;
346 rtx m_base_reg;
347 rtx *m_index_loc;
348 rtx m_index_reg;
349 };
350}
351
352address_eliminator::address_eliminator (struct address_info *ad)
353 : m_ad (ad),
354 m_base_loc (strip_subreg (ad->base_term)),
355 m_base_reg (NULL_RTX),
356 m_index_loc (strip_subreg (ad->index_term)),
357 m_index_reg (NULL_RTX)
358{
359 if (m_base_loc != NULL)
360 {
361 m_base_reg = *m_base_loc;
9cb95c07
VM
362 /* If we have non-legitimate address which is decomposed not in
363 the way we expected, don't do elimination here. In such case
364 the address will be reloaded and elimination will be done in
365 reload insn finally. */
366 if (REG_P (m_base_reg))
367 lra_eliminate_reg_if_possible (m_base_loc);
a953491e
RS
368 if (m_ad->base_term2 != NULL)
369 *m_ad->base_term2 = *m_ad->base_term;
370 }
371 if (m_index_loc != NULL)
372 {
373 m_index_reg = *m_index_loc;
9cb95c07
VM
374 if (REG_P (m_index_reg))
375 lra_eliminate_reg_if_possible (m_index_loc);
a953491e
RS
376 }
377}
378
379address_eliminator::~address_eliminator ()
380{
381 if (m_base_loc && *m_base_loc != m_base_reg)
382 {
383 *m_base_loc = m_base_reg;
384 if (m_ad->base_term2 != NULL)
385 *m_ad->base_term2 = *m_ad->base_term;
386 }
387 if (m_index_loc && *m_index_loc != m_index_reg)
388 *m_index_loc = m_index_reg;
389}
390
391/* Return true if the eliminated form of AD is a legitimate target address. */
392static bool
393valid_address_p (struct address_info *ad)
394{
395 address_eliminator eliminator (ad);
396 return valid_address_p (ad->mode, *ad->outer, ad->as);
397}
398
a953491e 399/* Return true if the eliminated form of memory reference OP satisfies
9eb1ca69 400 extra (special) memory constraint CONSTRAINT. */
a953491e 401static bool
777e635f 402satisfies_memory_constraint_p (rtx op, enum constraint_num constraint)
a953491e
RS
403{
404 struct address_info ad;
405
406 decompose_mem_address (&ad, op);
407 address_eliminator eliminator (&ad);
777e635f 408 return constraint_satisfied_p (op, constraint);
a953491e
RS
409}
410
411/* Return true if the eliminated form of address AD satisfies extra
412 address constraint CONSTRAINT. */
413static bool
414satisfies_address_constraint_p (struct address_info *ad,
777e635f 415 enum constraint_num constraint)
a953491e
RS
416{
417 address_eliminator eliminator (ad);
777e635f 418 return constraint_satisfied_p (*ad->outer, constraint);
a953491e
RS
419}
420
421/* Return true if the eliminated form of address OP satisfies extra
422 address constraint CONSTRAINT. */
423static bool
777e635f 424satisfies_address_constraint_p (rtx op, enum constraint_num constraint)
a953491e
RS
425{
426 struct address_info ad;
427
428 decompose_lea_address (&ad, &op);
429 return satisfies_address_constraint_p (&ad, constraint);
430}
a953491e 431
4c2b2d79
VM
432/* Initiate equivalences for LRA. As we keep original equivalences
433 before any elimination, we need to make copies otherwise any change
434 in insns might change the equivalences. */
435void
436lra_init_equiv (void)
437{
438 ira_expand_reg_equiv ();
439 for (int i = FIRST_PSEUDO_REGISTER; i < max_reg_num (); i++)
440 {
441 rtx res;
442
443 if ((res = ira_reg_equiv[i].memory) != NULL_RTX)
444 ira_reg_equiv[i].memory = copy_rtx (res);
445 if ((res = ira_reg_equiv[i].invariant) != NULL_RTX)
446 ira_reg_equiv[i].invariant = copy_rtx (res);
447 }
448}
449
450static rtx loc_equivalence_callback (rtx, const_rtx, void *);
451
452/* Update equivalence for REGNO. We need to this as the equivalence
453 might contain other pseudos which are changed by their
454 equivalences. */
455static void
456update_equiv (int regno)
457{
458 rtx x;
459
460 if ((x = ira_reg_equiv[regno].memory) != NULL_RTX)
461 ira_reg_equiv[regno].memory
462 = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
463 NULL_RTX);
464 if ((x = ira_reg_equiv[regno].invariant) != NULL_RTX)
465 ira_reg_equiv[regno].invariant
466 = simplify_replace_fn_rtx (x, NULL_RTX, loc_equivalence_callback,
467 NULL_RTX);
468}
469
55a2c322
VM
470/* If we have decided to substitute X with another value, return that
471 value, otherwise return X. */
472static rtx
8d49e7ef 473get_equiv (rtx x)
55a2c322
VM
474{
475 int regno;
476 rtx res;
477
478 if (! REG_P (x) || (regno = REGNO (x)) < FIRST_PSEUDO_REGISTER
479 || ! ira_reg_equiv[regno].defined_p
480 || ! ira_reg_equiv[regno].profitable_p
481 || lra_get_regno_hard_regno (regno) >= 0)
482 return x;
483 if ((res = ira_reg_equiv[regno].memory) != NULL_RTX)
d6220b11
KK
484 {
485 if (targetm.cannot_substitute_mem_equiv_p (res))
486 return x;
487 return res;
488 }
55a2c322
VM
489 if ((res = ira_reg_equiv[regno].constant) != NULL_RTX)
490 return res;
491 if ((res = ira_reg_equiv[regno].invariant) != NULL_RTX)
492 return res;
493 gcc_unreachable ();
494}
495
8d49e7ef
VM
496/* If we have decided to substitute X with the equivalent value,
497 return that value after elimination for INSN, otherwise return
498 X. */
499static rtx
cfa434f6 500get_equiv_with_elimination (rtx x, rtx_insn *insn)
8d49e7ef
VM
501{
502 rtx res = get_equiv (x);
503
504 if (x == res || CONSTANT_P (res))
505 return res;
d9cf932c 506 return lra_eliminate_regs_1 (insn, res, GET_MODE (res),
a6af1bf9 507 false, false, 0, true);
8d49e7ef
VM
508}
509
55a2c322
VM
510/* Set up curr_operand_mode. */
511static void
512init_curr_operand_mode (void)
513{
514 int nop = curr_static_id->n_operands;
515 for (int i = 0; i < nop; i++)
516 {
ef4bddc2 517 machine_mode mode = GET_MODE (*curr_id->operand_loc[i]);
55a2c322
VM
518 if (mode == VOIDmode)
519 {
520 /* The .md mode for address operands is the mode of the
521 addressed value rather than the mode of the address itself. */
522 if (curr_id->icode >= 0 && curr_static_id->operand[i].is_address)
523 mode = Pmode;
524 else
525 mode = curr_static_id->operand[i].mode;
526 }
527 curr_operand_mode[i] = mode;
528 }
529}
530
531\f
532
533/* The page contains code to reuse input reloads. */
534
535/* Structure describes input reload of the current insns. */
536struct input_reload
537{
3f156a6c
VM
538 /* True for input reload of matched operands. */
539 bool match_p;
55a2c322
VM
540 /* Reloaded value. */
541 rtx input;
542 /* Reload pseudo used. */
543 rtx reg;
544};
545
546/* The number of elements in the following array. */
547static int curr_insn_input_reloads_num;
548/* Array containing info about input reloads. It is used to find the
549 same input reload and reuse the reload pseudo in this case. */
550static struct input_reload curr_insn_input_reloads[LRA_MAX_INSN_RELOADS];
551
552/* Initiate data concerning reuse of input reloads for the current
553 insn. */
554static void
555init_curr_insn_input_reloads (void)
556{
557 curr_insn_input_reloads_num = 0;
558}
559
55a2c322 560/* Create a new pseudo using MODE, RCLASS, ORIGINAL or reuse already
95921002
VM
561 created input reload pseudo (only if TYPE is not OP_OUT). Don't
562 reuse pseudo if IN_SUBREG_P is true and the reused pseudo should be
563 wrapped up in SUBREG. The result pseudo is returned through
564 RESULT_REG. Return TRUE if we created a new pseudo, FALSE if we
565 reused the already created input reload pseudo. Use TITLE to
566 describe new registers for debug purposes. */
55a2c322 567static bool
ef4bddc2 568get_reload_reg (enum op_type type, machine_mode mode, rtx original,
95921002
VM
569 enum reg_class rclass, bool in_subreg_p,
570 const char *title, rtx *result_reg)
55a2c322
VM
571{
572 int i, regno;
573 enum reg_class new_class;
3f156a6c 574 bool unique_p = false;
55a2c322
VM
575
576 if (type == OP_OUT)
577 {
578 *result_reg
579 = lra_create_new_reg_with_unique_value (mode, original, rclass, title);
580 return true;
581 }
73cca0cc
VM
582 /* Prevent reuse value of expression with side effects,
583 e.g. volatile memory. */
584 if (! side_effects_p (original))
585 for (i = 0; i < curr_insn_input_reloads_num; i++)
3f156a6c
VM
586 {
587 if (! curr_insn_input_reloads[i].match_p
588 && rtx_equal_p (curr_insn_input_reloads[i].input, original)
589 && in_class_p (curr_insn_input_reloads[i].reg, rclass, &new_class))
590 {
591 rtx reg = curr_insn_input_reloads[i].reg;
592 regno = REGNO (reg);
593 /* If input is equal to original and both are VOIDmode,
594 GET_MODE (reg) might be still different from mode.
595 Ensure we don't return *result_reg with wrong mode. */
596 if (GET_MODE (reg) != mode)
597 {
598 if (in_subreg_p)
599 continue;
cf098191
RS
600 if (maybe_lt (GET_MODE_SIZE (GET_MODE (reg)),
601 GET_MODE_SIZE (mode)))
3f156a6c
VM
602 continue;
603 reg = lowpart_subreg (mode, reg, GET_MODE (reg));
604 if (reg == NULL_RTX || GET_CODE (reg) != SUBREG)
605 continue;
606 }
607 *result_reg = reg;
608 if (lra_dump_file != NULL)
609 {
610 fprintf (lra_dump_file, " Reuse r%d for reload ", regno);
611 dump_value_slim (lra_dump_file, original, 1);
612 }
613 if (new_class != lra_get_allocno_class (regno))
614 lra_change_class (regno, new_class, ", change to", false);
615 if (lra_dump_file != NULL)
616 fprintf (lra_dump_file, "\n");
617 return false;
618 }
619 /* If we have an input reload with a different mode, make sure it
620 will get a different hard reg. */
621 else if (REG_P (original)
622 && REG_P (curr_insn_input_reloads[i].input)
623 && REGNO (original) == REGNO (curr_insn_input_reloads[i].input)
624 && (GET_MODE (original)
625 != GET_MODE (curr_insn_input_reloads[i].input)))
626 unique_p = true;
627 }
628 *result_reg = (unique_p
629 ? lra_create_new_reg_with_unique_value
630 : lra_create_new_reg) (mode, original, rclass, title);
55a2c322
VM
631 lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
632 curr_insn_input_reloads[curr_insn_input_reloads_num].input = original;
3f156a6c 633 curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = false;
55a2c322
VM
634 curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = *result_reg;
635 return true;
636}
637
638\f
55a2c322
VM
639/* The page contains major code to choose the current insn alternative
640 and generate reloads for it. */
641
642/* Return the offset from REGNO of the least significant register
643 in (reg:MODE REGNO).
644
645 This function is used to tell whether two registers satisfy
646 a matching constraint. (reg:MODE1 REGNO1) matches (reg:MODE2 REGNO2) if:
647
648 REGNO1 + lra_constraint_offset (REGNO1, MODE1)
649 == REGNO2 + lra_constraint_offset (REGNO2, MODE2) */
650int
ef4bddc2 651lra_constraint_offset (int regno, machine_mode mode)
55a2c322
VM
652{
653 lra_assert (regno < FIRST_PSEUDO_REGISTER);
b0567726
RS
654
655 scalar_int_mode int_mode;
656 if (WORDS_BIG_ENDIAN
657 && is_a <scalar_int_mode> (mode, &int_mode)
658 && GET_MODE_SIZE (int_mode) > UNITS_PER_WORD)
ad474626 659 return hard_regno_nregs (regno, mode) - 1;
55a2c322
VM
660 return 0;
661}
662
663/* Like rtx_equal_p except that it allows a REG and a SUBREG to match
664 if they are the same hard reg, and has special hacks for
665 auto-increment and auto-decrement. This is specifically intended for
666 process_alt_operands to use in determining whether two operands
667 match. X is the operand whose number is the lower of the two.
668
669 It is supposed that X is the output operand and Y is the input
670 operand. Y_HARD_REGNO is the final hard regno of register Y or
671 register in subreg Y as we know it now. Otherwise, it is a
672 negative value. */
673static bool
674operands_match_p (rtx x, rtx y, int y_hard_regno)
675{
676 int i;
677 RTX_CODE code = GET_CODE (x);
678 const char *fmt;
679
680 if (x == y)
681 return true;
682 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
683 && (REG_P (y) || (GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y)))))
684 {
685 int j;
f4eafc30 686
1686923c 687 i = get_hard_regno (x, false);
55a2c322
VM
688 if (i < 0)
689 goto slow;
690
691 if ((j = y_hard_regno) < 0)
692 goto slow;
693
694 i += lra_constraint_offset (i, GET_MODE (x));
695 j += lra_constraint_offset (j, GET_MODE (y));
696
697 return i == j;
698 }
699
700 /* If two operands must match, because they are really a single
701 operand of an assembler insn, then two post-increments are invalid
702 because the assembler insn would increment only once. On the
703 other hand, a post-increment matches ordinary indexing if the
704 post-increment is the output operand. */
705 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
706 return operands_match_p (XEXP (x, 0), y, y_hard_regno);
707
708 /* Two pre-increments are invalid because the assembler insn would
709 increment only once. On the other hand, a pre-increment matches
710 ordinary indexing if the pre-increment is the input operand. */
711 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
712 || GET_CODE (y) == PRE_MODIFY)
713 return operands_match_p (x, XEXP (y, 0), -1);
f4eafc30 714
55a2c322
VM
715 slow:
716
9fccb335
RS
717 if (code == REG && REG_P (y))
718 return REGNO (x) == REGNO (y);
719
55a2c322
VM
720 if (code == REG && GET_CODE (y) == SUBREG && REG_P (SUBREG_REG (y))
721 && x == SUBREG_REG (y))
722 return true;
723 if (GET_CODE (y) == REG && code == SUBREG && REG_P (SUBREG_REG (x))
724 && SUBREG_REG (x) == y)
725 return true;
726
727 /* Now we have disposed of all the cases in which different rtx
728 codes can match. */
729 if (code != GET_CODE (y))
730 return false;
731
732 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
733 if (GET_MODE (x) != GET_MODE (y))
734 return false;
735
736 switch (code)
737 {
738 CASE_CONST_UNIQUE:
739 return false;
740
741 case LABEL_REF:
04a121a7 742 return label_ref_label (x) == label_ref_label (y);
55a2c322
VM
743 case SYMBOL_REF:
744 return XSTR (x, 0) == XSTR (y, 0);
745
746 default:
747 break;
748 }
749
750 /* Compare the elements. If any pair of corresponding elements fail
751 to match, return false for the whole things. */
752
753 fmt = GET_RTX_FORMAT (code);
754 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
755 {
756 int val, j;
757 switch (fmt[i])
758 {
759 case 'w':
760 if (XWINT (x, i) != XWINT (y, i))
761 return false;
762 break;
763
764 case 'i':
765 if (XINT (x, i) != XINT (y, i))
766 return false;
767 break;
768
91914e56
RS
769 case 'p':
770 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
771 return false;
772 break;
773
55a2c322
VM
774 case 'e':
775 val = operands_match_p (XEXP (x, i), XEXP (y, i), -1);
776 if (val == 0)
777 return false;
778 break;
779
780 case '0':
781 break;
782
783 case 'E':
784 if (XVECLEN (x, i) != XVECLEN (y, i))
785 return false;
786 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
787 {
788 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j), -1);
789 if (val == 0)
790 return false;
791 }
792 break;
793
794 /* It is believed that rtx's at this level will never
795 contain anything but integers and other rtx's, except for
796 within LABEL_REFs and SYMBOL_REFs. */
797 default:
798 gcc_unreachable ();
799 }
800 }
801 return true;
802}
803
804/* True if X is a constant that can be forced into the constant pool.
805 MODE is the mode of the operand, or VOIDmode if not known. */
806#define CONST_POOL_OK_P(MODE, X) \
807 ((MODE) != VOIDmode \
808 && CONSTANT_P (X) \
809 && GET_CODE (X) != HIGH \
cf098191 810 && GET_MODE_SIZE (MODE).is_constant () \
55a2c322
VM
811 && !targetm.cannot_force_const_mem (MODE, X))
812
813/* True if C is a non-empty register class that has too few registers
814 to be safely used as a reload target class. */
a9711f36
VM
815#define SMALL_REGISTER_CLASS_P(C) \
816 (ira_class_hard_regs_num [(C)] == 1 \
817 || (ira_class_hard_regs_num [(C)] >= 1 \
818 && targetm.class_likely_spilled_p (C)))
55a2c322
VM
819
820/* If REG is a reload pseudo, try to make its class satisfying CL. */
821static void
822narrow_reload_pseudo_class (rtx reg, enum reg_class cl)
823{
824 enum reg_class rclass;
825
826 /* Do not make more accurate class from reloads generated. They are
827 mostly moves with a lot of constraints. Making more accurate
828 class may results in very narrow class and impossibility of find
829 registers for several reloads of one insn. */
830 if (INSN_UID (curr_insn) >= new_insn_uid_start)
831 return;
832 if (GET_CODE (reg) == SUBREG)
833 reg = SUBREG_REG (reg);
834 if (! REG_P (reg) || (int) REGNO (reg) < new_regno_start)
835 return;
836 if (in_class_p (reg, cl, &rclass) && rclass != cl)
a2d0d374 837 lra_change_class (REGNO (reg), rclass, " Change to", true);
55a2c322
VM
838}
839
4be9717c
VM
840/* Searches X for any reference to a reg with the same value as REGNO,
841 returning the rtx of the reference found if any. Otherwise,
842 returns NULL_RTX. */
843static rtx
844regno_val_use_in (unsigned int regno, rtx x)
845{
846 const char *fmt;
847 int i, j;
848 rtx tem;
849
850 if (REG_P (x) && lra_reg_info[REGNO (x)].val == lra_reg_info[regno].val)
851 return x;
852
853 fmt = GET_RTX_FORMAT (GET_CODE (x));
854 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
855 {
856 if (fmt[i] == 'e')
857 {
858 if ((tem = regno_val_use_in (regno, XEXP (x, i))))
859 return tem;
860 }
861 else if (fmt[i] == 'E')
862 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
863 if ((tem = regno_val_use_in (regno , XVECEXP (x, i, j))))
864 return tem;
865 }
866
867 return NULL_RTX;
868}
869
d8321b33
VM
870/* Return true if all current insn non-output operands except INS (it
871 has a negaitve end marker) do not use pseudos with the same value
872 as REGNO. */
873static bool
874check_conflict_input_operands (int regno, signed char *ins)
875{
876 int in;
877 int n_operands = curr_static_id->n_operands;
878
879 for (int nop = 0; nop < n_operands; nop++)
880 if (! curr_static_id->operand[nop].is_operator
881 && curr_static_id->operand[nop].type != OP_OUT)
882 {
883 for (int i = 0; (in = ins[i]) >= 0; i++)
884 if (in == nop)
885 break;
886 if (in < 0
887 && regno_val_use_in (regno, *curr_id->operand_loc[nop]) != NULL_RTX)
888 return false;
889 }
890 return true;
891}
892
55a2c322 893/* Generate reloads for matching OUT and INS (array of input operand
aefae0f1
TP
894 numbers with end marker -1) with reg class GOAL_CLASS, considering
895 output operands OUTS (similar array to INS) needing to be in different
896 registers. Add input and output reloads correspondingly to the lists
897 *BEFORE and *AFTER. OUT might be negative. In this case we generate
898 input reloads for matched input operands INS. EARLY_CLOBBER_P is a flag
899 that the output operand is early clobbered for chosen alternative. */
55a2c322 900static void
aefae0f1
TP
901match_reload (signed char out, signed char *ins, signed char *outs,
902 enum reg_class goal_class, rtx_insn **before,
903 rtx_insn **after, bool early_clobber_p)
55a2c322 904{
aefae0f1 905 bool out_conflict;
55a2c322 906 int i, in;
e67d1102 907 rtx new_in_reg, new_out_reg, reg;
ef4bddc2 908 machine_mode inmode, outmode;
55a2c322 909 rtx in_rtx = *curr_id->operand_loc[ins[0]];
511dcace 910 rtx out_rtx = out < 0 ? in_rtx : *curr_id->operand_loc[out];
55a2c322 911
55a2c322 912 inmode = curr_operand_mode[ins[0]];
511dcace 913 outmode = out < 0 ? inmode : curr_operand_mode[out];
55a2c322
VM
914 push_to_sequence (*before);
915 if (inmode != outmode)
916 {
00224b1a
RS
917 /* process_alt_operands has already checked that the mode sizes
918 are ordered. */
bd4288c0 919 if (partial_subreg_p (outmode, inmode))
55a2c322
VM
920 {
921 reg = new_in_reg
922 = lra_create_new_reg_with_unique_value (inmode, in_rtx,
923 goal_class, "");
98a05c03 924 new_out_reg = gen_lowpart_SUBREG (outmode, reg);
2c62cbaa 925 LRA_SUBREG_P (new_out_reg) = 1;
350c0fe7 926 /* If the input reg is dying here, we can use the same hard
f681cf95
VM
927 register for REG and IN_RTX. We do it only for original
928 pseudos as reload pseudos can die although original
929 pseudos still live where reload pseudos dies. */
930 if (REG_P (in_rtx) && (int) REGNO (in_rtx) < lra_new_regno_start
d8321b33
VM
931 && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
932 && (!early_clobber_p
933 || check_conflict_input_operands(REGNO (in_rtx), ins)))
d70a81dd 934 lra_assign_reg_val (REGNO (in_rtx), REGNO (reg));
55a2c322
VM
935 }
936 else
937 {
938 reg = new_out_reg
939 = lra_create_new_reg_with_unique_value (outmode, out_rtx,
940 goal_class, "");
98a05c03 941 new_in_reg = gen_lowpart_SUBREG (inmode, reg);
55a2c322
VM
942 /* NEW_IN_REG is non-paradoxical subreg. We don't want
943 NEW_OUT_REG living above. We add clobber clause for
c5cd5a7e
VM
944 this. This is just a temporary clobber. We can remove
945 it at the end of LRA work. */
e67d1102 946 rtx_insn *clobber = emit_clobber (new_out_reg);
c5cd5a7e 947 LRA_TEMP_CLOBBER_P (PATTERN (clobber)) = 1;
2c62cbaa 948 LRA_SUBREG_P (new_in_reg) = 1;
350c0fe7
VM
949 if (GET_CODE (in_rtx) == SUBREG)
950 {
951 rtx subreg_reg = SUBREG_REG (in_rtx);
952
953 /* If SUBREG_REG is dying here and sub-registers IN_RTX
954 and NEW_IN_REG are similar, we can use the same hard
955 register for REG and SUBREG_REG. */
f681cf95
VM
956 if (REG_P (subreg_reg)
957 && (int) REGNO (subreg_reg) < lra_new_regno_start
958 && GET_MODE (subreg_reg) == outmode
91914e56 959 && known_eq (SUBREG_BYTE (in_rtx), SUBREG_BYTE (new_in_reg))
d8321b33
VM
960 && find_regno_note (curr_insn, REG_DEAD, REGNO (subreg_reg))
961 && (! early_clobber_p
962 || check_conflict_input_operands (REGNO (subreg_reg),
963 ins)))
d70a81dd 964 lra_assign_reg_val (REGNO (subreg_reg), REGNO (reg));
350c0fe7 965 }
55a2c322
VM
966 }
967 }
968 else
969 {
970 /* Pseudos have values -- see comments for lra_reg_info.
971 Different pseudos with the same value do not conflict even if
972 they live in the same place. When we create a pseudo we
973 assign value of original pseudo (if any) from which we
974 created the new pseudo. If we create the pseudo from the
3363daad
VM
975 input pseudo, the new pseudo will have no conflict with the
976 input pseudo which is wrong when the input pseudo lives after
977 the insn and as the new pseudo value is changed by the insn
978 output. Therefore we create the new pseudo from the output
979 except the case when we have single matched dying input
980 pseudo.
f4eafc30 981
55a2c322
VM
982 We cannot reuse the current output register because we might
983 have a situation like "a <- a op b", where the constraints
984 force the second input operand ("b") to match the output
985 operand ("a"). "b" must then be copied into a new register
599e1cf8
VM
986 so that it doesn't clobber the current value of "a".
987
67914693 988 We cannot use the same value if the output pseudo is
599e1cf8
VM
989 early clobbered or the input pseudo is mentioned in the
990 output, e.g. as an address part in memory, because
991 output reload will actually extend the pseudo liveness.
992 We don't care about eliminable hard regs here as we are
993 interesting only in pseudos. */
f4eafc30 994
aefae0f1
TP
995 /* Matching input's register value is the same as one of the other
996 output operand. Output operands in a parallel insn must be in
997 different registers. */
998 out_conflict = false;
999 if (REG_P (in_rtx))
1000 {
1001 for (i = 0; outs[i] >= 0; i++)
1002 {
1003 rtx other_out_rtx = *curr_id->operand_loc[outs[i]];
1004 if (REG_P (other_out_rtx)
1005 && (regno_val_use_in (REGNO (in_rtx), other_out_rtx)
1006 != NULL_RTX))
1007 {
1008 out_conflict = true;
1009 break;
1010 }
1011 }
1012 }
1013
55a2c322 1014 new_in_reg = new_out_reg
599e1cf8 1015 = (! early_clobber_p && ins[1] < 0 && REG_P (in_rtx)
3363daad
VM
1016 && (int) REGNO (in_rtx) < lra_new_regno_start
1017 && find_regno_note (curr_insn, REG_DEAD, REGNO (in_rtx))
d8321b33
VM
1018 && (! early_clobber_p
1019 || check_conflict_input_operands (REGNO (in_rtx), ins))
4be9717c
VM
1020 && (out < 0
1021 || regno_val_use_in (REGNO (in_rtx), out_rtx) == NULL_RTX)
aefae0f1 1022 && !out_conflict
3363daad
VM
1023 ? lra_create_new_reg (inmode, in_rtx, goal_class, "")
1024 : lra_create_new_reg_with_unique_value (outmode, out_rtx,
1025 goal_class, ""));
55a2c322 1026 }
511dcace
VM
1027 /* In operand can be got from transformations before processing insn
1028 constraints. One example of such transformations is subreg
1029 reloading (see function simplify_operand_subreg). The new
1030 pseudos created by the transformations might have inaccurate
55a2c322
VM
1031 class (ALL_REGS) and we should make their classes more
1032 accurate. */
1033 narrow_reload_pseudo_class (in_rtx, goal_class);
55a2c322
VM
1034 lra_emit_move (copy_rtx (new_in_reg), in_rtx);
1035 *before = get_insns ();
1036 end_sequence ();
3f156a6c
VM
1037 /* Add the new pseudo to consider values of subsequent input reload
1038 pseudos. */
1039 lra_assert (curr_insn_input_reloads_num < LRA_MAX_INSN_RELOADS);
1040 curr_insn_input_reloads[curr_insn_input_reloads_num].input = in_rtx;
1041 curr_insn_input_reloads[curr_insn_input_reloads_num].match_p = true;
1042 curr_insn_input_reloads[curr_insn_input_reloads_num++].reg = new_in_reg;
55a2c322
VM
1043 for (i = 0; (in = ins[i]) >= 0; i++)
1044 {
1045 lra_assert
1046 (GET_MODE (*curr_id->operand_loc[in]) == VOIDmode
1047 || GET_MODE (new_in_reg) == GET_MODE (*curr_id->operand_loc[in]));
1048 *curr_id->operand_loc[in] = new_in_reg;
1049 }
1050 lra_update_dups (curr_id, ins);
511dcace
VM
1051 if (out < 0)
1052 return;
1053 /* See a comment for the input operand above. */
1054 narrow_reload_pseudo_class (out_rtx, goal_class);
55a2c322
VM
1055 if (find_reg_note (curr_insn, REG_UNUSED, out_rtx) == NULL_RTX)
1056 {
1057 start_sequence ();
1058 lra_emit_move (out_rtx, copy_rtx (new_out_reg));
1059 emit_insn (*after);
1060 *after = get_insns ();
1061 end_sequence ();
1062 }
1063 *curr_id->operand_loc[out] = new_out_reg;
1064 lra_update_dup (curr_id, out);
1065}
1066
1067/* Return register class which is union of all reg classes in insn
1068 constraint alternative string starting with P. */
1069static enum reg_class
1070reg_class_from_constraints (const char *p)
1071{
1072 int c, len;
1073 enum reg_class op_class = NO_REGS;
1074
1075 do
1076 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
1077 {
1078 case '#':
1079 case ',':
1080 return op_class;
1081
55a2c322 1082 case 'g':
55a2c322
VM
1083 op_class = reg_class_subunion[op_class][GENERAL_REGS];
1084 break;
f4eafc30 1085
55a2c322 1086 default:
777e635f
RS
1087 enum constraint_num cn = lookup_constraint (p);
1088 enum reg_class cl = reg_class_for_constraint (cn);
1089 if (cl == NO_REGS)
55a2c322 1090 {
777e635f 1091 if (insn_extra_address_constraint (cn))
55a2c322
VM
1092 op_class
1093 = (reg_class_subunion
1094 [op_class][base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1095 ADDRESS, SCRATCH)]);
55a2c322
VM
1096 break;
1097 }
f4eafc30 1098
777e635f
RS
1099 op_class = reg_class_subunion[op_class][cl];
1100 break;
55a2c322
VM
1101 }
1102 while ((p += len), c);
1103 return op_class;
1104}
1105
1106/* If OP is a register, return the class of the register as per
1107 get_reg_class, otherwise return NO_REGS. */
1108static inline enum reg_class
1109get_op_class (rtx op)
1110{
1111 return REG_P (op) ? get_reg_class (REGNO (op)) : NO_REGS;
1112}
1113
1114/* Return generated insn mem_pseudo:=val if TO_P or val:=mem_pseudo
1115 otherwise. If modes of MEM_PSEUDO and VAL are different, use
1116 SUBREG for VAL to make them equal. */
cfa434f6 1117static rtx_insn *
55a2c322
VM
1118emit_spill_move (bool to_p, rtx mem_pseudo, rtx val)
1119{
1120 if (GET_MODE (mem_pseudo) != GET_MODE (val))
2c62cbaa 1121 {
cb1cca12
VM
1122 /* Usually size of mem_pseudo is greater than val size but in
1123 rare cases it can be less as it can be defined by target
1124 dependent macro HARD_REGNO_CALLER_SAVE_MODE. */
1ccd4874
VM
1125 if (! MEM_P (val))
1126 {
54b84aa9
EB
1127 val = gen_lowpart_SUBREG (GET_MODE (mem_pseudo),
1128 GET_CODE (val) == SUBREG
1129 ? SUBREG_REG (val) : val);
1ccd4874
VM
1130 LRA_SUBREG_P (val) = 1;
1131 }
1132 else
1133 {
1134 mem_pseudo = gen_lowpart_SUBREG (GET_MODE (val), mem_pseudo);
1135 LRA_SUBREG_P (mem_pseudo) = 1;
1136 }
2c62cbaa 1137 }
1476d1bd
MM
1138 return to_p ? gen_move_insn (mem_pseudo, val)
1139 : gen_move_insn (val, mem_pseudo);
55a2c322
VM
1140}
1141
1142/* Process a special case insn (register move), return true if we
2c62cbaa 1143 don't need to process it anymore. INSN should be a single set
f15643d4
RS
1144 insn. Set up that RTL was changed through CHANGE_P and that hook
1145 TARGET_SECONDARY_MEMORY_NEEDED says to use secondary memory through
2c62cbaa 1146 SEC_MEM_P. */
55a2c322 1147static bool
2c62cbaa 1148check_and_process_move (bool *change_p, bool *sec_mem_p ATTRIBUTE_UNUSED)
55a2c322
VM
1149{
1150 int sregno, dregno;
ef0006eb 1151 rtx dest, src, dreg, sreg, new_reg, scratch_reg;
cfa434f6 1152 rtx_insn *before;
55a2c322 1153 enum reg_class dclass, sclass, secondary_class;
55a2c322
VM
1154 secondary_reload_info sri;
1155
2c62cbaa
VM
1156 lra_assert (curr_insn_set != NULL_RTX);
1157 dreg = dest = SET_DEST (curr_insn_set);
1158 sreg = src = SET_SRC (curr_insn_set);
55a2c322
VM
1159 if (GET_CODE (dest) == SUBREG)
1160 dreg = SUBREG_REG (dest);
1161 if (GET_CODE (src) == SUBREG)
1162 sreg = SUBREG_REG (src);
1ccd4874 1163 if (! (REG_P (dreg) || MEM_P (dreg)) || ! (REG_P (sreg) || MEM_P (sreg)))
55a2c322
VM
1164 return false;
1165 sclass = dclass = NO_REGS;
55a2c322
VM
1166 if (REG_P (dreg))
1167 dclass = get_reg_class (REGNO (dreg));
48855443 1168 gcc_assert (dclass < LIM_REG_CLASSES);
55a2c322
VM
1169 if (dclass == ALL_REGS)
1170 /* ALL_REGS is used for new pseudos created by transformations
1171 like reload of SUBREG_REG (see function
1172 simplify_operand_subreg). We don't know their class yet. We
1173 should figure out the class from processing the insn
1174 constraints not in this fast path function. Even if ALL_REGS
1175 were a right class for the pseudo, secondary_... hooks usually
1176 are not define for ALL_REGS. */
1177 return false;
55a2c322
VM
1178 if (REG_P (sreg))
1179 sclass = get_reg_class (REGNO (sreg));
48855443 1180 gcc_assert (sclass < LIM_REG_CLASSES);
55a2c322
VM
1181 if (sclass == ALL_REGS)
1182 /* See comments above. */
1183 return false;
1ccd4874
VM
1184 if (sclass == NO_REGS && dclass == NO_REGS)
1185 return false;
f15643d4 1186 if (targetm.secondary_memory_needed (GET_MODE (src), sclass, dclass)
1ccd4874 1187 && ((sclass != NO_REGS && dclass != NO_REGS)
94e23f53
RS
1188 || (GET_MODE (src)
1189 != targetm.secondary_memory_needed_mode (GET_MODE (src)))))
55a2c322
VM
1190 {
1191 *sec_mem_p = true;
1192 return false;
1193 }
1ccd4874
VM
1194 if (! REG_P (dreg) || ! REG_P (sreg))
1195 return false;
55a2c322
VM
1196 sri.prev_sri = NULL;
1197 sri.icode = CODE_FOR_nothing;
1198 sri.extra_cost = 0;
1199 secondary_class = NO_REGS;
1200 /* Set up hard register for a reload pseudo for hook
1201 secondary_reload because some targets just ignore unassigned
1202 pseudos in the hook. */
1203 if (dclass != NO_REGS && lra_get_regno_hard_regno (REGNO (dreg)) < 0)
1204 {
1205 dregno = REGNO (dreg);
1206 reg_renumber[dregno] = ira_class_hard_regs[dclass][0];
1207 }
1208 else
1209 dregno = -1;
1210 if (sclass != NO_REGS && lra_get_regno_hard_regno (REGNO (sreg)) < 0)
1211 {
1212 sregno = REGNO (sreg);
1213 reg_renumber[sregno] = ira_class_hard_regs[sclass][0];
1214 }
1215 else
1216 sregno = -1;
1217 if (sclass != NO_REGS)
1218 secondary_class
1219 = (enum reg_class) targetm.secondary_reload (false, dest,
1220 (reg_class_t) sclass,
1221 GET_MODE (src), &sri);
1222 if (sclass == NO_REGS
1223 || ((secondary_class != NO_REGS || sri.icode != CODE_FOR_nothing)
1224 && dclass != NO_REGS))
1225 {
55a2c322
VM
1226 enum reg_class old_sclass = secondary_class;
1227 secondary_reload_info old_sri = sri;
55a2c322
VM
1228
1229 sri.prev_sri = NULL;
1230 sri.icode = CODE_FOR_nothing;
1231 sri.extra_cost = 0;
1232 secondary_class
ef0006eb 1233 = (enum reg_class) targetm.secondary_reload (true, src,
55a2c322 1234 (reg_class_t) dclass,
ef0006eb 1235 GET_MODE (src), &sri);
55a2c322
VM
1236 /* Check the target hook consistency. */
1237 lra_assert
1238 ((secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1239 || (old_sclass == NO_REGS && old_sri.icode == CODE_FOR_nothing)
1240 || (secondary_class == old_sclass && sri.icode == old_sri.icode));
1241 }
1242 if (sregno >= 0)
1243 reg_renumber [sregno] = -1;
1244 if (dregno >= 0)
1245 reg_renumber [dregno] = -1;
1246 if (secondary_class == NO_REGS && sri.icode == CODE_FOR_nothing)
1247 return false;
1248 *change_p = true;
1249 new_reg = NULL_RTX;
1250 if (secondary_class != NO_REGS)
ef0006eb 1251 new_reg = lra_create_new_reg_with_unique_value (GET_MODE (src), NULL_RTX,
55a2c322
VM
1252 secondary_class,
1253 "secondary");
1254 start_sequence ();
55a2c322 1255 if (sri.icode == CODE_FOR_nothing)
ef0006eb 1256 lra_emit_move (new_reg, src);
55a2c322
VM
1257 else
1258 {
1259 enum reg_class scratch_class;
1260
1261 scratch_class = (reg_class_from_constraints
1262 (insn_data[sri.icode].operand[2].constraint));
1263 scratch_reg = (lra_create_new_reg_with_unique_value
1264 (insn_data[sri.icode].operand[2].mode, NULL_RTX,
1265 scratch_class, "scratch"));
1266 emit_insn (GEN_FCN (sri.icode) (new_reg != NULL_RTX ? new_reg : dest,
ef0006eb 1267 src, scratch_reg));
55a2c322
VM
1268 }
1269 before = get_insns ();
1270 end_sequence ();
cfa434f6 1271 lra_process_new_insns (curr_insn, before, NULL, "Inserting the move");
55a2c322 1272 if (new_reg != NULL_RTX)
ef0006eb 1273 SET_SRC (curr_insn_set) = new_reg;
55a2c322
VM
1274 else
1275 {
1276 if (lra_dump_file != NULL)
1277 {
1278 fprintf (lra_dump_file, "Deleting move %u\n", INSN_UID (curr_insn));
cfbeaedf 1279 dump_insn_slim (lra_dump_file, curr_insn);
55a2c322
VM
1280 }
1281 lra_set_insn_deleted (curr_insn);
1282 return true;
1283 }
1284 return false;
1285}
1286
1287/* The following data describe the result of process_alt_operands.
1288 The data are used in curr_insn_transform to generate reloads. */
1289
1290/* The chosen reg classes which should be used for the corresponding
1291 operands. */
1292static enum reg_class goal_alt[MAX_RECOG_OPERANDS];
1293/* True if the operand should be the same as another operand and that
1294 other operand does not need a reload. */
1295static bool goal_alt_match_win[MAX_RECOG_OPERANDS];
1296/* True if the operand does not need a reload. */
1297static bool goal_alt_win[MAX_RECOG_OPERANDS];
1298/* True if the operand can be offsetable memory. */
1299static bool goal_alt_offmemok[MAX_RECOG_OPERANDS];
1300/* The number of an operand to which given operand can be matched to. */
1301static int goal_alt_matches[MAX_RECOG_OPERANDS];
1302/* The number of elements in the following array. */
1303static int goal_alt_dont_inherit_ops_num;
1304/* Numbers of operands whose reload pseudos should not be inherited. */
1305static int goal_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1306/* True if the insn commutative operands should be swapped. */
1307static bool goal_alt_swapped;
1308/* The chosen insn alternative. */
1309static int goal_alt_number;
1310
987b67f1
VM
1311/* True if the corresponding operand is the result of an equivalence
1312 substitution. */
1313static bool equiv_substition_p[MAX_RECOG_OPERANDS];
1314
55a2c322
VM
1315/* The following five variables are used to choose the best insn
1316 alternative. They reflect final characteristics of the best
1317 alternative. */
1318
1319/* Number of necessary reloads and overall cost reflecting the
1320 previous value and other unpleasantness of the best alternative. */
1321static int best_losers, best_overall;
55a2c322
VM
1322/* Overall number hard registers used for reloads. For example, on
1323 some targets we need 2 general registers to reload DFmode and only
1324 one floating point register. */
1325static int best_reload_nregs;
1326/* Overall number reflecting distances of previous reloading the same
1327 value. The distances are counted from the current BB start. It is
1328 used to improve inheritance chances. */
1329static int best_reload_sum;
1330
1331/* True if the current insn should have no correspondingly input or
1332 output reloads. */
1333static bool no_input_reloads_p, no_output_reloads_p;
1334
1335/* True if we swapped the commutative operands in the current
1336 insn. */
1337static int curr_swapped;
1338
d9cf932c
VM
1339/* if CHECK_ONLY_P is false, arrange for address element *LOC to be a
1340 register of class CL. Add any input reloads to list BEFORE. AFTER
1341 is nonnull if *LOC is an automodified value; handle that case by
1342 adding the required output reloads to list AFTER. Return true if
1343 the RTL was changed.
1344
1345 if CHECK_ONLY_P is true, check that the *LOC is a correct address
1346 register. Return false if the address register is correct. */
55a2c322 1347static bool
d9cf932c 1348process_addr_reg (rtx *loc, bool check_only_p, rtx_insn **before, rtx_insn **after,
cfa434f6 1349 enum reg_class cl)
55a2c322
VM
1350{
1351 int regno;
1352 enum reg_class rclass, new_class;
277f65de 1353 rtx reg;
55a2c322 1354 rtx new_reg;
ef4bddc2 1355 machine_mode mode;
95921002 1356 bool subreg_p, before_p = false;
55a2c322 1357
95921002
VM
1358 subreg_p = GET_CODE (*loc) == SUBREG;
1359 if (subreg_p)
ada2eb68
JW
1360 {
1361 reg = SUBREG_REG (*loc);
1362 mode = GET_MODE (reg);
1363
1364 /* For mode with size bigger than ptr_mode, there unlikely to be "mov"
1365 between two registers with different classes, but there normally will
1366 be "mov" which transfers element of vector register into the general
1367 register, and this normally will be a subreg which should be reloaded
1368 as a whole. This is particularly likely to be triggered when
1369 -fno-split-wide-types specified. */
3c11e1af
JW
1370 if (!REG_P (reg)
1371 || in_class_p (reg, cl, &new_class)
cf098191 1372 || known_le (GET_MODE_SIZE (mode), GET_MODE_SIZE (ptr_mode)))
ada2eb68
JW
1373 loc = &SUBREG_REG (*loc);
1374 }
1375
277f65de 1376 reg = *loc;
55a2c322
VM
1377 mode = GET_MODE (reg);
1378 if (! REG_P (reg))
1379 {
d9cf932c
VM
1380 if (check_only_p)
1381 return true;
55a2c322
VM
1382 /* Always reload memory in an address even if the target supports
1383 such addresses. */
1384 new_reg = lra_create_new_reg_with_unique_value (mode, reg, cl, "address");
1385 before_p = true;
1386 }
1387 else
1388 {
1389 regno = REGNO (reg);
1390 rclass = get_reg_class (regno);
d9cf932c
VM
1391 if (! check_only_p
1392 && (*loc = get_equiv_with_elimination (reg, curr_insn)) != reg)
55a2c322
VM
1393 {
1394 if (lra_dump_file != NULL)
1395 {
1396 fprintf (lra_dump_file,
1397 "Changing pseudo %d in address of insn %u on equiv ",
1398 REGNO (reg), INSN_UID (curr_insn));
cfbeaedf 1399 dump_value_slim (lra_dump_file, *loc, 1);
55a2c322
VM
1400 fprintf (lra_dump_file, "\n");
1401 }
1402 *loc = copy_rtx (*loc);
1403 }
1404 if (*loc != reg || ! in_class_p (reg, cl, &new_class))
1405 {
d9cf932c
VM
1406 if (check_only_p)
1407 return true;
55a2c322
VM
1408 reg = *loc;
1409 if (get_reload_reg (after == NULL ? OP_IN : OP_INOUT,
95921002 1410 mode, reg, cl, subreg_p, "address", &new_reg))
55a2c322
VM
1411 before_p = true;
1412 }
1413 else if (new_class != NO_REGS && rclass != new_class)
1414 {
d9cf932c
VM
1415 if (check_only_p)
1416 return true;
a2d0d374 1417 lra_change_class (regno, new_class, " Change to", true);
55a2c322
VM
1418 return false;
1419 }
1420 else
1421 return false;
1422 }
1423 if (before_p)
1424 {
1425 push_to_sequence (*before);
1426 lra_emit_move (new_reg, reg);
1427 *before = get_insns ();
1428 end_sequence ();
1429 }
1430 *loc = new_reg;
1431 if (after != NULL)
1432 {
1433 start_sequence ();
9a9fe2b4 1434 lra_emit_move (before_p ? copy_rtx (reg) : reg, new_reg);
55a2c322
VM
1435 emit_insn (*after);
1436 *after = get_insns ();
1437 end_sequence ();
1438 }
1439 return true;
1440}
1441
4f0bee4c
WM
1442/* Insert move insn in simplify_operand_subreg. BEFORE returns
1443 the insn to be inserted before curr insn. AFTER returns the
1444 the insn to be inserted after curr insn. ORIGREG and NEWREG
1445 are the original reg and new reg for reload. */
1446static void
cfa434f6
DM
1447insert_move_for_subreg (rtx_insn **before, rtx_insn **after, rtx origreg,
1448 rtx newreg)
4f0bee4c
WM
1449{
1450 if (before)
1451 {
1452 push_to_sequence (*before);
1453 lra_emit_move (newreg, origreg);
1454 *before = get_insns ();
1455 end_sequence ();
1456 }
1457 if (after)
1458 {
1459 start_sequence ();
1460 lra_emit_move (origreg, newreg);
1461 emit_insn (*after);
1462 *after = get_insns ();
1463 end_sequence ();
1464 }
1465}
1466
ef4bddc2 1467static int valid_address_p (machine_mode mode, rtx addr, addr_space_t as);
ab5d2233 1468static bool process_address (int, bool, rtx_insn **, rtx_insn **);
ba38538f 1469
55a2c322
VM
1470/* Make reloads for subreg in operand NOP with internal subreg mode
1471 REG_MODE, add new reloads for further processing. Return true if
895ff86f 1472 any change was done. */
55a2c322 1473static bool
ef4bddc2 1474simplify_operand_subreg (int nop, machine_mode reg_mode)
55a2c322
VM
1475{
1476 int hard_regno;
cfa434f6 1477 rtx_insn *before, *after;
895ff86f 1478 machine_mode mode, innermode;
55a2c322
VM
1479 rtx reg, new_reg;
1480 rtx operand = *curr_id->operand_loc[nop];
4f0bee4c
WM
1481 enum reg_class regclass;
1482 enum op_type type;
55a2c322 1483
cfa434f6 1484 before = after = NULL;
55a2c322
VM
1485
1486 if (GET_CODE (operand) != SUBREG)
1487 return false;
f4eafc30 1488
55a2c322
VM
1489 mode = GET_MODE (operand);
1490 reg = SUBREG_REG (operand);
895ff86f 1491 innermode = GET_MODE (reg);
4f0bee4c 1492 type = curr_static_id->operand[nop].type;
2e186411 1493 if (MEM_P (reg))
ba38538f 1494 {
ab5d2233
EB
1495 const bool addr_was_valid
1496 = valid_address_p (innermode, XEXP (reg, 0), MEM_ADDR_SPACE (reg));
ba38538f 1497 alter_subreg (curr_id->operand_loc[nop], false);
ab5d2233 1498 rtx subst = *curr_id->operand_loc[nop];
ba38538f 1499 lra_assert (MEM_P (subst));
8eaff6ef
VM
1500 const bool addr_is_valid = valid_address_p (GET_MODE (subst),
1501 XEXP (subst, 0),
1502 MEM_ADDR_SPACE (subst));
ab5d2233 1503 if (!addr_was_valid
8eaff6ef 1504 || addr_is_valid
2e186411
AM
1505 || ((get_constraint_type (lookup_constraint
1506 (curr_static_id->operand[nop].constraint))
1507 != CT_SPECIAL_MEMORY)
1508 /* We still can reload address and if the address is
1509 valid, we can remove subreg without reloading its
1510 inner memory. */
1511 && valid_address_p (GET_MODE (subst),
1512 regno_reg_rtx
1513 [ira_class_hard_regs
1514 [base_reg_class (GET_MODE (subst),
1515 MEM_ADDR_SPACE (subst),
1516 ADDRESS, SCRATCH)][0]],
1517 MEM_ADDR_SPACE (subst))))
1518 {
ab5d2233 1519 /* If we change the address for a paradoxical subreg of memory, the
849fccf8
EB
1520 new address might violate the necessary alignment or the access
1521 might be slow; take this into consideration. We need not worry
ab5d2233 1522 about accesses beyond allocated memory for paradoxical memory
2e186411
AM
1523 subregs as we don't substitute such equiv memory (see processing
1524 equivalences in function lra_constraints) and because for spilled
1525 pseudos we allocate stack memory enough for the biggest
198075e1
MF
1526 corresponding paradoxical subreg.
1527
1528 However, do not blindly simplify a (subreg (mem ...)) for
1529 WORD_REGISTER_OPERATIONS targets as this may lead to loading junk
1530 data into a register when the inner is narrower than outer or
1531 missing important data from memory when the inner is wider than
1532 outer. This rule only applies to modes that are no wider than
8eaff6ef
VM
1533 a word.
1534
1535 If valid memory becomes invalid after subreg elimination
f8dc3fb2
VM
1536 and address might be different we still have to reload
1537 memory.
8eaff6ef 1538 */
f8dc3fb2
VM
1539 if ((! addr_was_valid
1540 || addr_is_valid
1541 || known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (innermode)))
8eaff6ef
VM
1542 && !(maybe_ne (GET_MODE_PRECISION (mode),
1543 GET_MODE_PRECISION (innermode))
1544 && known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD)
1545 && known_le (GET_MODE_SIZE (innermode), UNITS_PER_WORD)
1546 && WORD_REGISTER_OPERATIONS)
198075e1 1547 && (!(MEM_ALIGN (subst) < GET_MODE_ALIGNMENT (mode)
e0bd6c9f 1548 && targetm.slow_unaligned_access (mode, MEM_ALIGN (subst)))
198075e1 1549 || (MEM_ALIGN (reg) < GET_MODE_ALIGNMENT (innermode)
e0bd6c9f
RS
1550 && targetm.slow_unaligned_access (innermode,
1551 MEM_ALIGN (reg)))))
2e186411
AM
1552 return true;
1553
ab5d2233
EB
1554 *curr_id->operand_loc[nop] = operand;
1555
1556 /* But if the address was not valid, we cannot reload the MEM without
1557 reloading the address first. */
1558 if (!addr_was_valid)
1559 process_address (nop, false, &before, &after);
1560
2e186411
AM
1561 /* INNERMODE is fast, MODE slow. Reload the mem in INNERMODE. */
1562 enum reg_class rclass
1563 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
ab5d2233 1564 if (get_reload_reg (curr_static_id->operand[nop].type, innermode,
8eaff6ef 1565 reg, rclass, TRUE, "slow/invalid mem", &new_reg))
2e186411
AM
1566 {
1567 bool insert_before, insert_after;
1568 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1569
1570 insert_before = (type != OP_OUT
bd4288c0 1571 || partial_subreg_p (mode, innermode));
2e186411
AM
1572 insert_after = type != OP_IN;
1573 insert_move_for_subreg (insert_before ? &before : NULL,
1574 insert_after ? &after : NULL,
1575 reg, new_reg);
1576 }
2e186411
AM
1577 SUBREG_REG (operand) = new_reg;
1578
1579 /* Convert to MODE. */
1580 reg = operand;
ab5d2233
EB
1581 rclass
1582 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
2e186411 1583 if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
8eaff6ef 1584 rclass, TRUE, "slow/invalid mem", &new_reg))
2e186411
AM
1585 {
1586 bool insert_before, insert_after;
1587 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1588
1589 insert_before = type != OP_OUT;
1590 insert_after = type != OP_IN;
1591 insert_move_for_subreg (insert_before ? &before : NULL,
1592 insert_after ? &after : NULL,
1593 reg, new_reg);
1594 }
1595 *curr_id->operand_loc[nop] = new_reg;
1596 lra_process_new_insns (curr_insn, before, after,
8eaff6ef 1597 "Inserting slow/invalid mem reload");
2e186411
AM
1598 return true;
1599 }
95831c01 1600
ba38538f
VM
1601 /* If the address was valid and became invalid, prefer to reload
1602 the memory. Typical case is when the index scale should
1603 correspond the memory. */
2e186411 1604 *curr_id->operand_loc[nop] = operand;
77850e96
MF
1605 /* Do not return false here as the MEM_P (reg) will be processed
1606 later in this function. */
ba38538f
VM
1607 }
1608 else if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
55a2c322
VM
1609 {
1610 alter_subreg (curr_id->operand_loc[nop], false);
1611 return true;
1612 }
895ff86f
VM
1613 else if (CONSTANT_P (reg))
1614 {
1615 /* Try to simplify subreg of constant. It is usually result of
1616 equivalence substitution. */
1617 if (innermode == VOIDmode
1618 && (innermode = original_subreg_reg_mode[nop]) == VOIDmode)
1619 innermode = curr_static_id->operand[nop].mode;
1620 if ((new_reg = simplify_subreg (mode, reg, innermode,
1621 SUBREG_BYTE (operand))) != NULL_RTX)
1622 {
1623 *curr_id->operand_loc[nop] = new_reg;
1624 return true;
1625 }
1626 }
55a2c322
VM
1627 /* Put constant into memory when we have mixed modes. It generates
1628 a better code in most cases as it does not need a secondary
1629 reload memory. It also prevents LRA looping when LRA is using
1630 secondary reload memory again and again. */
1631 if (CONSTANT_P (reg) && CONST_POOL_OK_P (reg_mode, reg)
1632 && SCALAR_INT_MODE_P (reg_mode) != SCALAR_INT_MODE_P (mode))
1633 {
1634 SUBREG_REG (operand) = force_const_mem (reg_mode, reg);
1635 alter_subreg (curr_id->operand_loc[nop], false);
1636 return true;
1637 }
1638 /* Force a reload of the SUBREG_REG if this is a constant or PLUS or
1639 if there may be a problem accessing OPERAND in the outer
1640 mode. */
1641 if ((REG_P (reg)
1642 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1643 && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
1644 /* Don't reload paradoxical subregs because we could be looping
1645 having repeatedly final regno out of hard regs range. */
ad474626
RS
1646 && (hard_regno_nregs (hard_regno, innermode)
1647 >= hard_regno_nregs (hard_regno, mode))
895ff86f 1648 && simplify_subreg_regno (hard_regno, innermode,
2c62cbaa
VM
1649 SUBREG_BYTE (operand), mode) < 0
1650 /* Don't reload subreg for matching reload. It is actually
1651 valid subreg in LRA. */
1652 && ! LRA_SUBREG_P (operand))
55a2c322
VM
1653 || CONSTANT_P (reg) || GET_CODE (reg) == PLUS || MEM_P (reg))
1654 {
6e23f296
VM
1655 enum reg_class rclass;
1656
7613fa50
VM
1657 if (REG_P (reg))
1658 /* There is a big probability that we will get the same class
6e23f296
VM
1659 for the new pseudo and we will get the same insn which
1660 means infinite looping. So spill the new pseudo. */
1661 rclass = NO_REGS;
1662 else
1663 /* The class will be defined later in curr_insn_transform. */
1664 rclass
1665 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
55a2c322 1666
25bb0bb5 1667 if (get_reload_reg (curr_static_id->operand[nop].type, reg_mode, reg,
95921002 1668 rclass, TRUE, "subreg reg", &new_reg))
55a2c322 1669 {
4f0bee4c 1670 bool insert_before, insert_after;
2b778c9d 1671 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
4f0bee4c
WM
1672
1673 insert_before = (type != OP_OUT
9eaf97d6 1674 || read_modify_subreg_p (operand));
4f0bee4c
WM
1675 insert_after = (type != OP_IN);
1676 insert_move_for_subreg (insert_before ? &before : NULL,
1677 insert_after ? &after : NULL,
1678 reg, new_reg);
55a2c322
VM
1679 }
1680 SUBREG_REG (operand) = new_reg;
1681 lra_process_new_insns (curr_insn, before, after,
1682 "Inserting subreg reload");
1683 return true;
1684 }
4f0bee4c
WM
1685 /* Force a reload for a paradoxical subreg. For paradoxical subreg,
1686 IRA allocates hardreg to the inner pseudo reg according to its mode
1687 instead of the outermode, so the size of the hardreg may not be enough
1688 to contain the outermode operand, in that case we may need to insert
1689 reload for the reg. For the following two types of paradoxical subreg,
1690 we need to insert reload:
1691 1. If the op_type is OP_IN, and the hardreg could not be paired with
1692 other hardreg to contain the outermode operand
1693 (checked by in_hard_reg_set_p), we need to insert the reload.
1694 2. If the op_type is OP_OUT or OP_INOUT.
1695
1696 Here is a paradoxical subreg example showing how the reload is generated:
1697
1698 (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1699 (subreg:TI (reg:DI 107 [ __comp ]) 0)) {*movti_internal_rex64}
1700
1701 In IRA, reg107 is allocated to a DImode hardreg. We use x86-64 as example
1702 here, if reg107 is assigned to hardreg R15, because R15 is the last
1703 hardreg, compiler cannot find another hardreg to pair with R15 to
1704 contain TImode data. So we insert a TImode reload reg180 for it.
1705 After reload is inserted:
1706
1707 (insn 283 0 0 (set (subreg:DI (reg:TI 180 [orig:107 __comp ] [107]) 0)
1708 (reg:DI 107 [ __comp ])) -1
1709 (insn 5 4 7 2 (set (reg:TI 106 [ __comp ])
1710 (subreg:TI (reg:TI 180 [orig:107 __comp ] [107]) 0)) {*movti_internal_rex64}
1711
1712 Two reload hard registers will be allocated to reg180 to save TImode data
8fd96632
RS
1713 in LRA_assign.
1714
1715 For LRA pseudos this should normally be handled by the biggest_mode
1716 mechanism. However, it's possible for new uses of an LRA pseudo
1717 to be introduced after we've allocated it, such as when undoing
1718 inheritance, and the allocated register might not then be appropriate
1719 for the new uses. */
4f0bee4c
WM
1720 else if (REG_P (reg)
1721 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1722 && (hard_regno = lra_get_regno_hard_regno (REGNO (reg))) >= 0
ad474626
RS
1723 && (hard_regno_nregs (hard_regno, innermode)
1724 < hard_regno_nregs (hard_regno, mode))
4f0bee4c
WM
1725 && (regclass = lra_get_allocno_class (REGNO (reg)))
1726 && (type != OP_IN
1727 || !in_hard_reg_set_p (reg_class_contents[regclass],
8fd96632
RS
1728 mode, hard_regno)
1729 || overlaps_hard_reg_set_p (lra_no_alloc_regs,
1730 mode, hard_regno)))
4f0bee4c
WM
1731 {
1732 /* The class will be defined later in curr_insn_transform. */
1733 enum reg_class rclass
1734 = (enum reg_class) targetm.preferred_reload_class (reg, ALL_REGS);
1735
1736 if (get_reload_reg (curr_static_id->operand[nop].type, mode, reg,
95921002 1737 rclass, TRUE, "paradoxical subreg", &new_reg))
4f0bee4c
WM
1738 {
1739 rtx subreg;
1740 bool insert_before, insert_after;
1741
1742 PUT_MODE (new_reg, mode);
ea09f50d 1743 subreg = gen_lowpart_SUBREG (innermode, new_reg);
4f0bee4c
WM
1744 bitmap_set_bit (&lra_subreg_reload_pseudos, REGNO (new_reg));
1745
1746 insert_before = (type != OP_OUT);
1747 insert_after = (type != OP_IN);
1748 insert_move_for_subreg (insert_before ? &before : NULL,
1749 insert_after ? &after : NULL,
1750 reg, subreg);
1751 }
1752 SUBREG_REG (operand) = new_reg;
1753 lra_process_new_insns (curr_insn, before, after,
1754 "Inserting paradoxical subreg reload");
1755 return true;
1756 }
55a2c322
VM
1757 return false;
1758}
1759
1760/* Return TRUE if X refers for a hard register from SET. */
1761static bool
1762uses_hard_regs_p (rtx x, HARD_REG_SET set)
1763{
1764 int i, j, x_hard_regno;
ef4bddc2 1765 machine_mode mode;
55a2c322
VM
1766 const char *fmt;
1767 enum rtx_code code;
1768
1769 if (x == NULL_RTX)
1770 return false;
1771 code = GET_CODE (x);
1772 mode = GET_MODE (x);
145d4e1a 1773
55a2c322
VM
1774 if (code == SUBREG)
1775 {
145d4e1a
AV
1776 /* For all SUBREGs we want to check whether the full multi-register
1777 overlaps the set. For normal SUBREGs this means 'get_hard_regno' of
1778 the inner register, for paradoxical SUBREGs this means the
1779 'get_hard_regno' of the full SUBREG and for complete SUBREGs either is
1780 fine. Use the wider mode for all cases. */
1781 rtx subreg = SUBREG_REG (x);
bd5a2c67 1782 mode = wider_subreg_mode (x);
145d4e1a
AV
1783 if (mode == GET_MODE (subreg))
1784 {
1785 x = subreg;
1786 code = GET_CODE (x);
1787 }
55a2c322 1788 }
f4eafc30 1789
145d4e1a 1790 if (REG_P (x) || SUBREG_P (x))
55a2c322 1791 {
1686923c 1792 x_hard_regno = get_hard_regno (x, true);
55a2c322
VM
1793 return (x_hard_regno >= 0
1794 && overlaps_hard_reg_set_p (set, mode, x_hard_regno));
1795 }
1796 if (MEM_P (x))
1797 {
277f65de 1798 struct address_info ad;
55a2c322 1799
277f65de
RS
1800 decompose_mem_address (&ad, x);
1801 if (ad.base_term != NULL && uses_hard_regs_p (*ad.base_term, set))
1802 return true;
1803 if (ad.index_term != NULL && uses_hard_regs_p (*ad.index_term, set))
1804 return true;
55a2c322
VM
1805 }
1806 fmt = GET_RTX_FORMAT (code);
1807 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1808 {
1809 if (fmt[i] == 'e')
1810 {
1811 if (uses_hard_regs_p (XEXP (x, i), set))
1812 return true;
1813 }
1814 else if (fmt[i] == 'E')
1815 {
1816 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1817 if (uses_hard_regs_p (XVECEXP (x, i, j), set))
1818 return true;
1819 }
1820 }
1821 return false;
1822}
1823
1824/* Return true if OP is a spilled pseudo. */
1825static inline bool
1826spilled_pseudo_p (rtx op)
1827{
1828 return (REG_P (op)
1829 && REGNO (op) >= FIRST_PSEUDO_REGISTER && in_mem_p (REGNO (op)));
1830}
1831
1832/* Return true if X is a general constant. */
1833static inline bool
1834general_constant_p (rtx x)
1835{
1836 return CONSTANT_P (x) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (x));
1837}
1838
2c62cbaa
VM
1839static bool
1840reg_in_class_p (rtx reg, enum reg_class cl)
1841{
1842 if (cl == NO_REGS)
1843 return get_reg_class (REGNO (reg)) == NO_REGS;
1844 return in_class_p (reg, cl, NULL);
1845}
1846
3c954213
VM
1847/* Return true if SET of RCLASS contains no hard regs which can be
1848 used in MODE. */
1849static bool
1850prohibited_class_reg_set_mode_p (enum reg_class rclass,
1851 HARD_REG_SET &set,
b8506a8a 1852 machine_mode mode)
3c954213
VM
1853{
1854 HARD_REG_SET temp;
1855
c07ad89a 1856 lra_assert (hard_reg_set_subset_p (reg_class_contents[rclass], set));
6576d245 1857 temp = set;
3c954213
VM
1858 AND_COMPL_HARD_REG_SET (temp, lra_no_alloc_regs);
1859 return (hard_reg_set_subset_p
1860 (temp, ira_prohibited_class_mode_regs[rclass][mode]));
1861}
1862
9b195552
VM
1863
1864/* Used to check validity info about small class input operands. It
1865 should be incremented at start of processing an insn
1866 alternative. */
1867static unsigned int curr_small_class_check = 0;
1868
a25f3e8e
RS
1869/* Update number of used inputs of class OP_CLASS for operand NOP
1870 of alternative NALT. Return true if we have more such class operands
1871 than the number of available regs. */
9b195552 1872static bool
a25f3e8e
RS
1873update_and_check_small_class_inputs (int nop, int nalt,
1874 enum reg_class op_class)
9b195552
VM
1875{
1876 static unsigned int small_class_check[LIM_REG_CLASSES];
1877 static int small_class_input_nums[LIM_REG_CLASSES];
1878
1879 if (SMALL_REGISTER_CLASS_P (op_class)
1880 /* We are interesting in classes became small because of fixing
1881 some hard regs, e.g. by an user through GCC options. */
1882 && hard_reg_set_intersect_p (reg_class_contents[op_class],
1883 ira_no_alloc_regs)
1884 && (curr_static_id->operand[nop].type != OP_OUT
a25f3e8e 1885 || TEST_BIT (curr_static_id->operand[nop].early_clobber_alts, nalt)))
9b195552
VM
1886 {
1887 if (small_class_check[op_class] == curr_small_class_check)
1888 small_class_input_nums[op_class]++;
1889 else
1890 {
1891 small_class_check[op_class] = curr_small_class_check;
1892 small_class_input_nums[op_class] = 1;
1893 }
1894 if (small_class_input_nums[op_class] > ira_class_hard_regs_num[op_class])
1895 return true;
1896 }
1897 return false;
1898}
1899
55a2c322
VM
1900/* Major function to choose the current insn alternative and what
1901 operands should be reloaded and how. If ONLY_ALTERNATIVE is not
1902 negative we should consider only this alternative. Return false if
67914693 1903 we cannot choose the alternative or find how to reload the
55a2c322
VM
1904 operands. */
1905static bool
1906process_alt_operands (int only_alternative)
1907{
1908 bool ok_p = false;
36ff9dfb 1909 int nop, overall, nalt;
55a2c322
VM
1910 int n_alternatives = curr_static_id->n_alternatives;
1911 int n_operands = curr_static_id->n_operands;
1912 /* LOSERS counts the operands that don't fit this alternative and
1913 would require loading. */
1914 int losers;
feca7b89 1915 int addr_losers;
55a2c322
VM
1916 /* REJECT is a count of how undesirable this alternative says it is
1917 if any reloading is required. If the alternative matches exactly
1918 then REJECT is ignored, but otherwise it gets this much counted
1919 against it in addition to the reloading needed. */
1920 int reject;
feca7b89
VM
1921 /* This is defined by '!' or '?' alternative constraint and added to
1922 reject. But in some cases it can be ignored. */
1923 int static_reject;
d1457701 1924 int op_reject;
55a2c322
VM
1925 /* The number of elements in the following array. */
1926 int early_clobbered_regs_num;
1927 /* Numbers of operands which are early clobber registers. */
1928 int early_clobbered_nops[MAX_RECOG_OPERANDS];
1929 enum reg_class curr_alt[MAX_RECOG_OPERANDS];
1930 HARD_REG_SET curr_alt_set[MAX_RECOG_OPERANDS];
1931 bool curr_alt_match_win[MAX_RECOG_OPERANDS];
1932 bool curr_alt_win[MAX_RECOG_OPERANDS];
1933 bool curr_alt_offmemok[MAX_RECOG_OPERANDS];
1934 int curr_alt_matches[MAX_RECOG_OPERANDS];
1935 /* The number of elements in the following array. */
1936 int curr_alt_dont_inherit_ops_num;
1937 /* Numbers of operands whose reload pseudos should not be inherited. */
1938 int curr_alt_dont_inherit_ops[MAX_RECOG_OPERANDS];
1939 rtx op;
1940 /* The register when the operand is a subreg of register, otherwise the
1941 operand itself. */
1942 rtx no_subreg_reg_operand[MAX_RECOG_OPERANDS];
1943 /* The register if the operand is a register or subreg of register,
1944 otherwise NULL. */
1945 rtx operand_reg[MAX_RECOG_OPERANDS];
1946 int hard_regno[MAX_RECOG_OPERANDS];
ef4bddc2 1947 machine_mode biggest_mode[MAX_RECOG_OPERANDS];
55a2c322
VM
1948 int reload_nregs, reload_sum;
1949 bool costly_p;
1950 enum reg_class cl;
1951
1952 /* Calculate some data common for all alternatives to speed up the
1953 function. */
1954 for (nop = 0; nop < n_operands; nop++)
1955 {
7214306b
VM
1956 rtx reg;
1957
55a2c322
VM
1958 op = no_subreg_reg_operand[nop] = *curr_id->operand_loc[nop];
1959 /* The real hard regno of the operand after the allocation. */
1686923c 1960 hard_regno[nop] = get_hard_regno (op, true);
f4eafc30 1961
7214306b
VM
1962 operand_reg[nop] = reg = op;
1963 biggest_mode[nop] = GET_MODE (op);
1964 if (GET_CODE (op) == SUBREG)
55a2c322 1965 {
bd5a2c67 1966 biggest_mode[nop] = wider_subreg_mode (op);
7214306b 1967 operand_reg[nop] = reg = SUBREG_REG (op);
55a2c322 1968 }
7214306b 1969 if (! REG_P (reg))
55a2c322 1970 operand_reg[nop] = NULL_RTX;
7214306b
VM
1971 else if (REGNO (reg) >= FIRST_PSEUDO_REGISTER
1972 || ((int) REGNO (reg)
1973 == lra_get_elimination_hard_regno (REGNO (reg))))
1974 no_subreg_reg_operand[nop] = reg;
1975 else
1976 operand_reg[nop] = no_subreg_reg_operand[nop]
1977 /* Just use natural mode for elimination result. It should
1978 be enough for extra constraints hooks. */
1979 = regno_reg_rtx[hard_regno[nop]];
55a2c322
VM
1980 }
1981
1982 /* The constraints are made of several alternatives. Each operand's
1983 constraint looks like foo,bar,... with commas separating the
1984 alternatives. The first alternatives for all operands go
1985 together, the second alternatives go together, etc.
1986
1987 First loop over alternatives. */
9840b2fa 1988 alternative_mask preferred = curr_id->preferred_alternatives;
4cc8d9d2 1989 if (only_alternative >= 0)
9840b2fa 1990 preferred &= ALTERNATIVE_BIT (only_alternative);
4cc8d9d2 1991
55a2c322
VM
1992 for (nalt = 0; nalt < n_alternatives; nalt++)
1993 {
1994 /* Loop over operands for one constraint alternative. */
9840b2fa 1995 if (!TEST_BIT (preferred, nalt))
55a2c322
VM
1996 continue;
1997
dbe7895c 1998 bool matching_early_clobber[MAX_RECOG_OPERANDS];
9b195552 1999 curr_small_class_check++;
feca7b89
VM
2000 overall = losers = addr_losers = 0;
2001 static_reject = reject = reload_nregs = reload_sum = 0;
55a2c322 2002 for (nop = 0; nop < n_operands; nop++)
cb1cca12
VM
2003 {
2004 int inc = (curr_static_id
2005 ->operand_alternative[nalt * n_operands + nop].reject);
2006 if (lra_dump_file != NULL && inc != 0)
2007 fprintf (lra_dump_file,
2008 " Staticly defined alt reject+=%d\n", inc);
feca7b89 2009 static_reject += inc;
dbe7895c 2010 matching_early_clobber[nop] = 0;
cb1cca12 2011 }
feca7b89 2012 reject += static_reject;
55a2c322
VM
2013 early_clobbered_regs_num = 0;
2014
2015 for (nop = 0; nop < n_operands; nop++)
2016 {
2017 const char *p;
2018 char *end;
2019 int len, c, m, i, opalt_num, this_alternative_matches;
2020 bool win, did_match, offmemok, early_clobber_p;
2021 /* false => this operand can be reloaded somehow for this
2022 alternative. */
2023 bool badop;
2024 /* true => this operand can be reloaded if the alternative
2025 allows regs. */
2026 bool winreg;
2027 /* True if a constant forced into memory would be OK for
2028 this operand. */
2029 bool constmemok;
2030 enum reg_class this_alternative, this_costly_alternative;
2031 HARD_REG_SET this_alternative_set, this_costly_alternative_set;
2032 bool this_alternative_match_win, this_alternative_win;
2033 bool this_alternative_offmemok;
80f466c4 2034 bool scratch_p;
ef4bddc2 2035 machine_mode mode;
777e635f 2036 enum constraint_num cn;
55a2c322
VM
2037
2038 opalt_num = nalt * n_operands + nop;
2039 if (curr_static_id->operand_alternative[opalt_num].anything_ok)
2040 {
2041 /* Fast track for no constraints at all. */
2042 curr_alt[nop] = NO_REGS;
2043 CLEAR_HARD_REG_SET (curr_alt_set[nop]);
2044 curr_alt_win[nop] = true;
2045 curr_alt_match_win[nop] = false;
2046 curr_alt_offmemok[nop] = false;
2047 curr_alt_matches[nop] = -1;
2048 continue;
2049 }
f4eafc30 2050
55a2c322
VM
2051 op = no_subreg_reg_operand[nop];
2052 mode = curr_operand_mode[nop];
2053
2054 win = did_match = winreg = offmemok = constmemok = false;
2055 badop = true;
f4eafc30 2056
55a2c322
VM
2057 early_clobber_p = false;
2058 p = curr_static_id->operand_alternative[opalt_num].constraint;
f4eafc30 2059
55a2c322
VM
2060 this_costly_alternative = this_alternative = NO_REGS;
2061 /* We update set of possible hard regs besides its class
2062 because reg class might be inaccurate. For example,
2063 union of LO_REGS (l), HI_REGS(h), and STACK_REG(k) in ARM
2064 is translated in HI_REGS because classes are merged by
2065 pairs and there is no accurate intermediate class. */
2066 CLEAR_HARD_REG_SET (this_alternative_set);
2067 CLEAR_HARD_REG_SET (this_costly_alternative_set);
2068 this_alternative_win = false;
2069 this_alternative_match_win = false;
2070 this_alternative_offmemok = false;
2071 this_alternative_matches = -1;
f4eafc30 2072
55a2c322
VM
2073 /* An empty constraint should be excluded by the fast
2074 track. */
2075 lra_assert (*p != 0 && *p != ',');
f4eafc30 2076
d1457701 2077 op_reject = 0;
55a2c322
VM
2078 /* Scan this alternative's specs for this operand; set WIN
2079 if the operand fits any letter in this alternative.
2080 Otherwise, clear BADOP if this operand could fit some
2081 letter after reloads, or set WINREG if this operand could
2082 fit after reloads provided the constraint allows some
2083 registers. */
2084 costly_p = false;
2085 do
2086 {
2087 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
2088 {
2089 case '\0':
2090 len = 0;
2091 break;
2092 case ',':
2093 c = '\0';
2094 break;
f4eafc30 2095
55a2c322
VM
2096 case '&':
2097 early_clobber_p = true;
2098 break;
f4eafc30 2099
d1457701
VM
2100 case '$':
2101 op_reject += LRA_MAX_REJECT;
2102 break;
2103 case '^':
2104 op_reject += LRA_LOSER_COST_FACTOR;
2105 break;
2106
55a2c322
VM
2107 case '#':
2108 /* Ignore rest of this alternative. */
2109 c = '\0';
2110 break;
f4eafc30 2111
55a2c322
VM
2112 case '0': case '1': case '2': case '3': case '4':
2113 case '5': case '6': case '7': case '8': case '9':
2114 {
2115 int m_hregno;
2116 bool match_p;
f4eafc30 2117
55a2c322
VM
2118 m = strtoul (p, &end, 10);
2119 p = end;
2120 len = 0;
2121 lra_assert (nop > m);
f4eafc30 2122
00224b1a
RS
2123 /* Reject matches if we don't know which operand is
2124 bigger. This situation would arguably be a bug in
2125 an .md pattern, but could also occur in a user asm. */
2126 if (!ordered_p (GET_MODE_SIZE (biggest_mode[m]),
2127 GET_MODE_SIZE (biggest_mode[nop])))
2128 break;
2129
a426543a
VM
2130 /* Don't match wrong asm insn operands for proper
2131 diagnostic later. */
2132 if (INSN_CODE (curr_insn) < 0
2133 && (curr_operand_mode[m] == BLKmode
2134 || curr_operand_mode[nop] == BLKmode)
2135 && curr_operand_mode[m] != curr_operand_mode[nop])
2136 break;
2137
1686923c 2138 m_hregno = get_hard_regno (*curr_id->operand_loc[m], false);
55a2c322
VM
2139 /* We are supposed to match a previous operand.
2140 If we do, we win if that one did. If we do
2141 not, count both of the operands as losers.
2142 (This is too conservative, since most of the
2143 time only a single reload insn will be needed
2144 to make the two operands win. As a result,
2145 this alternative may be rejected when it is
2146 actually desirable.) */
2147 match_p = false;
2148 if (operands_match_p (*curr_id->operand_loc[nop],
2149 *curr_id->operand_loc[m], m_hregno))
2150 {
2151 /* We should reject matching of an early
2152 clobber operand if the matching operand is
2153 not dying in the insn. */
a25f3e8e
RS
2154 if (!TEST_BIT (curr_static_id->operand[m]
2155 .early_clobber_alts, nalt)
55a2c322
VM
2156 || operand_reg[nop] == NULL_RTX
2157 || (find_regno_note (curr_insn, REG_DEAD,
1c86bd80
VM
2158 REGNO (op))
2159 || REGNO (op) == REGNO (operand_reg[m])))
55a2c322
VM
2160 match_p = true;
2161 }
2162 if (match_p)
2163 {
2164 /* If we are matching a non-offsettable
2165 address where an offsettable address was
2166 expected, then we must reject this
2167 combination, because we can't reload
2168 it. */
2169 if (curr_alt_offmemok[m]
2170 && MEM_P (*curr_id->operand_loc[m])
2171 && curr_alt[m] == NO_REGS && ! curr_alt_win[m])
2172 continue;
55a2c322
VM
2173 }
2174 else
2175 {
9f41de41
VM
2176 /* If the operands do not match and one
2177 operand is INOUT, we can not match them.
2178 Try other possibilities, e.g. other
2179 alternatives or commutative operand
2180 exchange. */
2181 if (curr_static_id->operand[nop].type == OP_INOUT
2182 || curr_static_id->operand[m].type == OP_INOUT)
2183 break;
2f0b80c7 2184 /* Operands don't match. If the operands are
613caed2
VM
2185 different user defined explicit hard
2186 registers, then we cannot make them match
2187 when one is early clobber operand. */
2f0b80c7
PB
2188 if ((REG_P (*curr_id->operand_loc[nop])
2189 || SUBREG_P (*curr_id->operand_loc[nop]))
2190 && (REG_P (*curr_id->operand_loc[m])
2191 || SUBREG_P (*curr_id->operand_loc[m])))
2192 {
2193 rtx nop_reg = *curr_id->operand_loc[nop];
2194 if (SUBREG_P (nop_reg))
2195 nop_reg = SUBREG_REG (nop_reg);
2196 rtx m_reg = *curr_id->operand_loc[m];
2197 if (SUBREG_P (m_reg))
2198 m_reg = SUBREG_REG (m_reg);
2199
2200 if (REG_P (nop_reg)
2201 && HARD_REGISTER_P (nop_reg)
2202 && REG_USERVAR_P (nop_reg)
2203 && REG_P (m_reg)
2204 && HARD_REGISTER_P (m_reg)
2205 && REG_USERVAR_P (m_reg))
613caed2
VM
2206 {
2207 int i;
2208
2209 for (i = 0; i < early_clobbered_regs_num; i++)
2210 if (m == early_clobbered_nops[i])
2211 break;
2212 if (i < early_clobbered_regs_num
2213 || early_clobber_p)
2214 break;
2215 }
2f0b80c7 2216 }
2f0b80c7
PB
2217 /* Both operands must allow a reload register,
2218 otherwise we cannot make them match. */
55a2c322
VM
2219 if (curr_alt[m] == NO_REGS)
2220 break;
2221 /* Retroactively mark the operand we had to
2222 match as a loser, if it wasn't already and
2223 it wasn't matched to a register constraint
2224 (e.g it might be matched by memory). */
2225 if (curr_alt_win[m]
2226 && (operand_reg[m] == NULL_RTX
2227 || hard_regno[m] < 0))
2228 {
2229 losers++;
2230 reload_nregs
2231 += (ira_reg_class_max_nregs[curr_alt[m]]
2232 [GET_MODE (*curr_id->operand_loc[m])]);
2233 }
f4eafc30 2234
f4581282
VM
2235 /* Prefer matching earlyclobber alternative as
2236 it results in less hard regs required for
2237 the insn than a non-matching earlyclobber
2238 alternative. */
a25f3e8e
RS
2239 if (TEST_BIT (curr_static_id->operand[m]
2240 .early_clobber_alts, nalt))
f4581282
VM
2241 {
2242 if (lra_dump_file != NULL)
2243 fprintf
2244 (lra_dump_file,
2245 " %d Matching earlyclobber alt:"
2246 " reject--\n",
2247 nop);
dbe7895c
AS
2248 if (!matching_early_clobber[m])
2249 {
2250 reject--;
2251 matching_early_clobber[m] = 1;
2252 }
f4581282
VM
2253 }
2254 /* Otherwise we prefer no matching
2255 alternatives because it gives more freedom
2256 in RA. */
2257 else if (operand_reg[nop] == NULL_RTX
2258 || (find_regno_note (curr_insn, REG_DEAD,
2259 REGNO (operand_reg[nop]))
2260 == NULL_RTX))
cb1cca12
VM
2261 {
2262 if (lra_dump_file != NULL)
2263 fprintf
2264 (lra_dump_file,
2265 " %d Matching alt: reject+=2\n",
2266 nop);
2267 reject += 2;
2268 }
55a2c322
VM
2269 }
2270 /* If we have to reload this operand and some
2271 previous operand also had to match the same
2272 thing as this operand, we don't know how to do
2273 that. */
2274 if (!match_p || !curr_alt_win[m])
2275 {
2276 for (i = 0; i < nop; i++)
2277 if (curr_alt_matches[i] == m)
2278 break;
2279 if (i < nop)
2280 break;
2281 }
2282 else
2283 did_match = true;
f4eafc30 2284
28ed1460 2285 this_alternative_matches = m;
55a2c322
VM
2286 /* This can be fixed with reloads if the operand
2287 we are supposed to match can be fixed with
2288 reloads. */
2289 badop = false;
2290 this_alternative = curr_alt[m];
6576d245 2291 this_alternative_set = curr_alt_set[m];
821b7577 2292 winreg = this_alternative != NO_REGS;
55a2c322
VM
2293 break;
2294 }
f4eafc30 2295
55a2c322
VM
2296 case 'g':
2297 if (MEM_P (op)
2298 || general_constant_p (op)
2299 || spilled_pseudo_p (op))
2300 win = true;
777e635f 2301 cl = GENERAL_REGS;
55a2c322 2302 goto reg;
f4eafc30 2303
55a2c322 2304 default:
777e635f
RS
2305 cn = lookup_constraint (p);
2306 switch (get_constraint_type (cn))
55a2c322 2307 {
777e635f
RS
2308 case CT_REGISTER:
2309 cl = reg_class_for_constraint (cn);
2310 if (cl != NO_REGS)
2311 goto reg;
2312 break;
f4eafc30 2313
d9c35eee
RS
2314 case CT_CONST_INT:
2315 if (CONST_INT_P (op)
2316 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2317 win = true;
2318 break;
2319
777e635f
RS
2320 case CT_MEMORY:
2321 if (MEM_P (op)
2322 && satisfies_memory_constraint_p (op, cn))
2323 win = true;
2324 else if (spilled_pseudo_p (op))
2325 win = true;
2326
2327 /* If we didn't already win, we can reload constants
2328 via force_const_mem or put the pseudo value into
2329 memory, or make other memory by reloading the
2330 address like for 'o'. */
2331 if (CONST_POOL_OK_P (mode, op)
987b67f1
VM
2332 || MEM_P (op) || REG_P (op)
2333 /* We can restore the equiv insn by a
2334 reload. */
2335 || equiv_substition_p[nop])
777e635f
RS
2336 badop = false;
2337 constmemok = true;
2338 offmemok = true;
2339 break;
2340
2341 case CT_ADDRESS:
998fd141
AO
2342 /* An asm operand with an address constraint
2343 that doesn't satisfy address_operand has
2344 is_address cleared, so that we don't try to
2345 make a non-address fit. */
2346 if (!curr_static_id->operand[nop].is_address)
2347 break;
777e635f
RS
2348 /* If we didn't already win, we can reload the address
2349 into a base register. */
2350 if (satisfies_address_constraint_p (op, cn))
2351 win = true;
2352 cl = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2353 ADDRESS, SCRATCH);
2354 badop = false;
2355 goto reg;
2356
2357 case CT_FIXED_FORM:
2358 if (constraint_satisfied_p (op, cn))
55a2c322 2359 win = true;
55a2c322 2360 break;
9eb1ca69
VM
2361
2362 case CT_SPECIAL_MEMORY:
2363 if (MEM_P (op)
2364 && satisfies_memory_constraint_p (op, cn))
2365 win = true;
2366 else if (spilled_pseudo_p (op))
2367 win = true;
2368 break;
55a2c322 2369 }
777e635f 2370 break;
f4eafc30 2371
777e635f 2372 reg:
a5b821e4
JJ
2373 if (mode == BLKmode)
2374 break;
55a2c322
VM
2375 this_alternative = reg_class_subunion[this_alternative][cl];
2376 IOR_HARD_REG_SET (this_alternative_set,
2377 reg_class_contents[cl]);
2378 if (costly_p)
2379 {
2380 this_costly_alternative
2381 = reg_class_subunion[this_costly_alternative][cl];
2382 IOR_HARD_REG_SET (this_costly_alternative_set,
2383 reg_class_contents[cl]);
2384 }
55a2c322
VM
2385 winreg = true;
2386 if (REG_P (op))
2387 {
2388 if (hard_regno[nop] >= 0
2389 && in_hard_reg_set_p (this_alternative_set,
2390 mode, hard_regno[nop]))
2391 win = true;
2392 else if (hard_regno[nop] < 0
2393 && in_class_p (op, this_alternative, NULL))
2394 win = true;
2395 }
2396 break;
2397 }
2398 if (c != ' ' && c != '\t')
2399 costly_p = c == '*';
2400 }
2401 while ((p += len), c);
f4eafc30 2402
80f466c4
VM
2403 scratch_p = (operand_reg[nop] != NULL_RTX
2404 && lra_former_scratch_p (REGNO (operand_reg[nop])));
55a2c322
VM
2405 /* Record which operands fit this alternative. */
2406 if (win)
2407 {
2408 this_alternative_win = true;
2409 if (operand_reg[nop] != NULL_RTX)
2410 {
2411 if (hard_regno[nop] >= 0)
2412 {
2413 if (in_hard_reg_set_p (this_costly_alternative_set,
2414 mode, hard_regno[nop]))
cb1cca12
VM
2415 {
2416 if (lra_dump_file != NULL)
2417 fprintf (lra_dump_file,
2418 " %d Costly set: reject++\n",
2419 nop);
2420 reject++;
2421 }
55a2c322
VM
2422 }
2423 else
2424 {
80f466c4
VM
2425 /* Prefer won reg to spilled pseudo under other
2426 equal conditions for possibe inheritance. */
2427 if (! scratch_p)
2428 {
2429 if (lra_dump_file != NULL)
2430 fprintf
2431 (lra_dump_file,
2432 " %d Non pseudo reload: reject++\n",
2433 nop);
2434 reject++;
2435 }
55a2c322
VM
2436 if (in_class_p (operand_reg[nop],
2437 this_costly_alternative, NULL))
cb1cca12
VM
2438 {
2439 if (lra_dump_file != NULL)
2440 fprintf
2441 (lra_dump_file,
2442 " %d Non pseudo costly reload:"
2443 " reject++\n",
2444 nop);
2445 reject++;
2446 }
55a2c322 2447 }
9c582551 2448 /* We simulate the behavior of old reload here.
55a2c322
VM
2449 Although scratches need hard registers and it
2450 might result in spilling other pseudos, no reload
2451 insns are generated for the scratches. So it
2452 might cost something but probably less than old
2453 reload pass believes. */
80f466c4 2454 if (scratch_p)
cb1cca12
VM
2455 {
2456 if (lra_dump_file != NULL)
2457 fprintf (lra_dump_file,
80f466c4 2458 " %d Scratch win: reject+=2\n",
cb1cca12 2459 nop);
80f466c4 2460 reject += 2;
cb1cca12 2461 }
55a2c322
VM
2462 }
2463 }
2464 else if (did_match)
2465 this_alternative_match_win = true;
2466 else
2467 {
2468 int const_to_mem = 0;
2469 bool no_regs_p;
2470
d1457701 2471 reject += op_reject;
8d49e7ef
VM
2472 /* Never do output reload of stack pointer. It makes
2473 impossible to do elimination when SP is changed in
2474 RTL. */
2475 if (op == stack_pointer_rtx && ! frame_pointer_needed
2476 && curr_static_id->operand[nop].type != OP_IN)
2477 goto fail;
2478
e86c0101
SB
2479 /* If this alternative asks for a specific reg class, see if there
2480 is at least one allocatable register in that class. */
55a2c322
VM
2481 no_regs_p
2482 = (this_alternative == NO_REGS
2483 || (hard_reg_set_subset_p
2484 (reg_class_contents[this_alternative],
2485 lra_no_alloc_regs)));
e86c0101
SB
2486
2487 /* For asms, verify that the class for this alternative is possible
2488 for the mode that is specified. */
ecee672b 2489 if (!no_regs_p && INSN_CODE (curr_insn) < 0)
e86c0101
SB
2490 {
2491 int i;
2492 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
f939c3e6 2493 if (targetm.hard_regno_mode_ok (i, mode)
8f21260c
VM
2494 && in_hard_reg_set_p (reg_class_contents[this_alternative],
2495 mode, i))
e86c0101
SB
2496 break;
2497 if (i == FIRST_PSEUDO_REGISTER)
2498 winreg = false;
2499 }
2500
55a2c322
VM
2501 /* If this operand accepts a register, and if the
2502 register class has at least one allocatable register,
2503 then this operand can be reloaded. */
2504 if (winreg && !no_regs_p)
2505 badop = false;
f4eafc30 2506
55a2c322 2507 if (badop)
8f21260c
VM
2508 {
2509 if (lra_dump_file != NULL)
2510 fprintf (lra_dump_file,
2511 " alt=%d: Bad operand -- refuse\n",
2512 nalt);
2513 goto fail;
2514 }
55a2c322 2515
d13835b6
VM
2516 if (this_alternative != NO_REGS)
2517 {
2518 HARD_REG_SET available_regs;
2519
6576d245 2520 available_regs = reg_class_contents[this_alternative];
d13835b6
VM
2521 AND_COMPL_HARD_REG_SET
2522 (available_regs,
2523 ira_prohibited_class_mode_regs[this_alternative][mode]);
2524 AND_COMPL_HARD_REG_SET (available_regs, lra_no_alloc_regs);
2525 if (hard_reg_set_empty_p (available_regs))
2526 {
2527 /* There are no hard regs holding a value of given
2528 mode. */
2529 if (offmemok)
2530 {
2531 this_alternative = NO_REGS;
2532 if (lra_dump_file != NULL)
2533 fprintf (lra_dump_file,
2534 " %d Using memory because of"
2535 " a bad mode: reject+=2\n",
2536 nop);
2537 reject += 2;
2538 }
2539 else
2540 {
2541 if (lra_dump_file != NULL)
2542 fprintf (lra_dump_file,
2543 " alt=%d: Wrong mode -- refuse\n",
2544 nalt);
2545 goto fail;
2546 }
2547 }
2548 }
2549
2ae577fd
VM
2550 /* If not assigned pseudo has a class which a subset of
2551 required reg class, it is a less costly alternative
2552 as the pseudo still can get a hard reg of necessary
2553 class. */
2554 if (! no_regs_p && REG_P (op) && hard_regno[nop] < 0
2555 && (cl = get_reg_class (REGNO (op))) != NO_REGS
2556 && ira_class_subset_p[this_alternative][cl])
2557 {
2558 if (lra_dump_file != NULL)
2559 fprintf
2560 (lra_dump_file,
2561 " %d Super set class reg: reject-=3\n", nop);
2562 reject -= 3;
2563 }
2564
55a2c322
VM
2565 this_alternative_offmemok = offmemok;
2566 if (this_costly_alternative != NO_REGS)
cb1cca12
VM
2567 {
2568 if (lra_dump_file != NULL)
2569 fprintf (lra_dump_file,
2570 " %d Costly loser: reject++\n", nop);
2571 reject++;
2572 }
55a2c322
VM
2573 /* If the operand is dying, has a matching constraint,
2574 and satisfies constraints of the matched operand
f4581282 2575 which failed to satisfy the own constraints, most probably
a9711f36
VM
2576 the reload for this operand will be gone. */
2577 if (this_alternative_matches >= 0
2578 && !curr_alt_win[this_alternative_matches]
2579 && REG_P (op)
2580 && find_regno_note (curr_insn, REG_DEAD, REGNO (op))
2581 && (hard_regno[nop] >= 0
2582 ? in_hard_reg_set_p (this_alternative_set,
2583 mode, hard_regno[nop])
2584 : in_class_p (op, this_alternative, NULL)))
2585 {
2586 if (lra_dump_file != NULL)
2587 fprintf
2588 (lra_dump_file,
2589 " %d Dying matched operand reload: reject++\n",
2590 nop);
2591 reject++;
2592 }
2593 else
027ece11 2594 {
5306401f
VM
2595 /* Strict_low_part requires to reload the register
2596 not the sub-register. In this case we should
2597 check that a final reload hard reg can hold the
2598 value mode. */
027ece11
VM
2599 if (curr_static_id->operand[nop].strict_low
2600 && REG_P (op)
2601 && hard_regno[nop] < 0
2602 && GET_CODE (*curr_id->operand_loc[nop]) == SUBREG
2603 && ira_class_hard_regs_num[this_alternative] > 0
f939c3e6
RS
2604 && (!targetm.hard_regno_mode_ok
2605 (ira_class_hard_regs[this_alternative][0],
2606 GET_MODE (*curr_id->operand_loc[nop]))))
8f21260c
VM
2607 {
2608 if (lra_dump_file != NULL)
2609 fprintf
2610 (lra_dump_file,
2611 " alt=%d: Strict low subreg reload -- refuse\n",
2612 nalt);
2613 goto fail;
2614 }
027ece11
VM
2615 losers++;
2616 }
55a2c322
VM
2617 if (operand_reg[nop] != NULL_RTX
2618 /* Output operands and matched input operands are
2619 not inherited. The following conditions do not
2620 exactly describe the previous statement but they
2621 are pretty close. */
2622 && curr_static_id->operand[nop].type != OP_OUT
2623 && (this_alternative_matches < 0
2624 || curr_static_id->operand[nop].type != OP_IN))
2625 {
2626 int last_reload = (lra_reg_info[ORIGINAL_REGNO
2627 (operand_reg[nop])]
2628 .last_reload);
2629
6334f3e9
VM
2630 /* The value of reload_sum has sense only if we
2631 process insns in their order. It happens only on
2632 the first constraints sub-pass when we do most of
2633 reload work. */
2634 if (lra_constraint_iter == 1 && last_reload > bb_reload_num)
55a2c322
VM
2635 reload_sum += last_reload - bb_reload_num;
2636 }
2637 /* If this is a constant that is reloaded into the
2638 desired class by copying it to memory first, count
2639 that as another reload. This is consistent with
2640 other code and is required to avoid choosing another
2641 alternative when the constant is moved into memory.
2642 Note that the test here is precisely the same as in
2643 the code below that calls force_const_mem. */
2644 if (CONST_POOL_OK_P (mode, op)
2645 && ((targetm.preferred_reload_class
2646 (op, this_alternative) == NO_REGS)
2647 || no_input_reloads_p))
2648 {
2649 const_to_mem = 1;
2650 if (! no_regs_p)
2651 losers++;
2652 }
f4eafc30 2653
55a2c322
VM
2654 /* Alternative loses if it requires a type of reload not
2655 permitted for this insn. We can always reload
2656 objects with a REG_UNUSED note. */
2657 if ((curr_static_id->operand[nop].type != OP_IN
2658 && no_output_reloads_p
2659 && ! find_reg_note (curr_insn, REG_UNUSED, op))
2660 || (curr_static_id->operand[nop].type != OP_OUT
8f21260c
VM
2661 && no_input_reloads_p && ! const_to_mem)
2662 || (this_alternative_matches >= 0
9102dadd
VM
2663 && (no_input_reloads_p
2664 || (no_output_reloads_p
2665 && (curr_static_id->operand
2666 [this_alternative_matches].type != OP_IN)
2667 && ! find_reg_note (curr_insn, REG_UNUSED,
2668 no_subreg_reg_operand
2669 [this_alternative_matches])))))
8f21260c
VM
2670 {
2671 if (lra_dump_file != NULL)
2672 fprintf
2673 (lra_dump_file,
2674 " alt=%d: No input/otput reload -- refuse\n",
2675 nalt);
2676 goto fail;
2677 }
f4eafc30 2678
67914693 2679 /* Alternative loses if it required class pseudo cannot
f66af4aa 2680 hold value of required mode. Such insns can be
7b6e0c54 2681 described by insn definitions with mode iterators. */
f66af4aa
VM
2682 if (GET_MODE (*curr_id->operand_loc[nop]) != VOIDmode
2683 && ! hard_reg_set_empty_p (this_alternative_set)
7b6e0c54
VM
2684 /* It is common practice for constraints to use a
2685 class which does not have actually enough regs to
2686 hold the value (e.g. x86 AREG for mode requiring
2687 more one general reg). Therefore we have 2
155ed511
SL
2688 conditions to check that the reload pseudo cannot
2689 hold the mode value. */
f939c3e6
RS
2690 && (!targetm.hard_regno_mode_ok
2691 (ira_class_hard_regs[this_alternative][0],
2692 GET_MODE (*curr_id->operand_loc[nop])))
7b6e0c54
VM
2693 /* The above condition is not enough as the first
2694 reg in ira_class_hard_regs can be not aligned for
2695 multi-words mode values. */
3c954213
VM
2696 && (prohibited_class_reg_set_mode_p
2697 (this_alternative, this_alternative_set,
2698 GET_MODE (*curr_id->operand_loc[nop]))))
2699 {
2700 if (lra_dump_file != NULL)
2701 fprintf (lra_dump_file,
2702 " alt=%d: reload pseudo for op %d "
0d7bac69 2703 "cannot hold the mode value -- refuse\n",
3c954213
VM
2704 nalt, nop);
2705 goto fail;
f66af4aa
VM
2706 }
2707
821b7577
VM
2708 /* Check strong discouragement of reload of non-constant
2709 into class THIS_ALTERNATIVE. */
2710 if (! CONSTANT_P (op) && ! no_regs_p
2711 && (targetm.preferred_reload_class
2712 (op, this_alternative) == NO_REGS
2713 || (curr_static_id->operand[nop].type == OP_OUT
2714 && (targetm.preferred_output_reload_class
2715 (op, this_alternative) == NO_REGS))))
cb1cca12
VM
2716 {
2717 if (lra_dump_file != NULL)
2718 fprintf (lra_dump_file,
2719 " %d Non-prefered reload: reject+=%d\n",
2720 nop, LRA_MAX_REJECT);
2721 reject += LRA_MAX_REJECT;
2722 }
f4eafc30 2723
ed52a84e
VM
2724 if (! (MEM_P (op) && offmemok)
2725 && ! (const_to_mem && constmemok))
55a2c322
VM
2726 {
2727 /* We prefer to reload pseudos over reloading other
2728 things, since such reloads may be able to be
2729 eliminated later. So bump REJECT in other cases.
2730 Don't do this in the case where we are forcing a
2731 constant into memory and it will then win since
2732 we don't want to have a different alternative
2733 match then. */
2734 if (! (REG_P (op) && REGNO (op) >= FIRST_PSEUDO_REGISTER))
cb1cca12
VM
2735 {
2736 if (lra_dump_file != NULL)
2737 fprintf
2738 (lra_dump_file,
2739 " %d Non-pseudo reload: reject+=2\n",
2740 nop);
2741 reject += 2;
2742 }
f4eafc30 2743
55a2c322
VM
2744 if (! no_regs_p)
2745 reload_nregs
2746 += ira_reg_class_max_nregs[this_alternative][mode];
36ff9dfb
VM
2747
2748 if (SMALL_REGISTER_CLASS_P (this_alternative))
cb1cca12
VM
2749 {
2750 if (lra_dump_file != NULL)
2751 fprintf
2752 (lra_dump_file,
2753 " %d Small class reload: reject+=%d\n",
2754 nop, LRA_LOSER_COST_FACTOR / 2);
2755 reject += LRA_LOSER_COST_FACTOR / 2;
2756 }
55a2c322
VM
2757 }
2758
1bdc4b11
VM
2759 /* We are trying to spill pseudo into memory. It is
2760 usually more costly than moving to a hard register
2761 although it might takes the same number of
5f225ef4
VM
2762 reloads.
2763
2764 Non-pseudo spill may happen also. Suppose a target allows both
2765 register and memory in the operand constraint alternatives,
2766 then it's typical that an eliminable register has a substition
2767 of "base + offset" which can either be reloaded by a simple
2768 "new_reg <= base + offset" which will match the register
2769 constraint, or a similar reg addition followed by further spill
2770 to and reload from memory which will match the memory
2771 constraint, but this memory spill will be much more costly
2772 usually.
2773
2774 Code below increases the reject for both pseudo and non-pseudo
2775 spill. */
10406801
JW
2776 if (no_regs_p
2777 && !(MEM_P (op) && offmemok)
2778 && !(REG_P (op) && hard_regno[nop] < 0))
cb1cca12
VM
2779 {
2780 if (lra_dump_file != NULL)
2781 fprintf
2782 (lra_dump_file,
5f225ef4
VM
2783 " %d Spill %spseudo into memory: reject+=3\n",
2784 nop, REG_P (op) ? "" : "Non-");
cb1cca12 2785 reject += 3;
7891065a
VM
2786 if (VECTOR_MODE_P (mode))
2787 {
2788 /* Spilling vectors into memory is usually more
2789 costly as they contain big values. */
2790 if (lra_dump_file != NULL)
2791 fprintf
2792 (lra_dump_file,
2793 " %d Spill vector pseudo: reject+=2\n",
2794 nop);
2795 reject += 2;
2796 }
cb1cca12 2797 }
1bdc4b11 2798
4796d8f6
VM
2799 /* When we use an operand requiring memory in given
2800 alternative, the insn should write *and* read the
2801 value to/from memory it is costly in comparison with
2802 an insn alternative which does not use memory
2803 (e.g. register or immediate operand). We exclude
2804 memory operand for such case as we can satisfy the
2805 memory constraints by reloading address. */
2806 if (no_regs_p && offmemok && !MEM_P (op))
9b195552
VM
2807 {
2808 if (lra_dump_file != NULL)
2809 fprintf
2810 (lra_dump_file,
2811 " Using memory insn operand %d: reject+=3\n",
2812 nop);
2813 reject += 3;
2814 }
2815
7100b561
UB
2816 /* If reload requires moving value through secondary
2817 memory, it will need one more insn at least. */
2818 if (this_alternative != NO_REGS
2819 && REG_P (op) && (cl = get_reg_class (REGNO (op))) != NO_REGS
2820 && ((curr_static_id->operand[nop].type != OP_OUT
f15643d4
RS
2821 && targetm.secondary_memory_needed (GET_MODE (op), cl,
2822 this_alternative))
7100b561 2823 || (curr_static_id->operand[nop].type != OP_IN
f15643d4
RS
2824 && (targetm.secondary_memory_needed
2825 (GET_MODE (op), this_alternative, cl)))))
7100b561 2826 losers++;
f15643d4 2827
feca7b89
VM
2828 if (MEM_P (op) && offmemok)
2829 addr_losers++;
82396b8c 2830 else
8b8e41e5 2831 {
82396b8c
VM
2832 /* Input reloads can be inherited more often than
2833 output reloads can be removed, so penalize output
2834 reloads. */
2835 if (!REG_P (op) || curr_static_id->operand[nop].type != OP_IN)
2836 {
2837 if (lra_dump_file != NULL)
2838 fprintf
2839 (lra_dump_file,
2840 " %d Non input pseudo reload: reject++\n",
2841 nop);
2842 reject++;
2843 }
2844
2845 if (curr_static_id->operand[nop].type == OP_INOUT)
2846 {
2847 if (lra_dump_file != NULL)
2848 fprintf
2849 (lra_dump_file,
2850 " %d Input/Output reload: reject+=%d\n",
2851 nop, LRA_LOSER_COST_FACTOR);
2852 reject += LRA_LOSER_COST_FACTOR;
2853 }
8b8e41e5 2854 }
55a2c322 2855 }
f4eafc30 2856
80f466c4 2857 if (early_clobber_p && ! scratch_p)
cb1cca12
VM
2858 {
2859 if (lra_dump_file != NULL)
2860 fprintf (lra_dump_file,
2861 " %d Early clobber: reject++\n", nop);
2862 reject++;
2863 }
55a2c322
VM
2864 /* ??? We check early clobbers after processing all operands
2865 (see loop below) and there we update the costs more.
2866 Should we update the cost (may be approximately) here
2867 because of early clobber register reloads or it is a rare
2868 or non-important thing to be worth to do it. */
feca7b89
VM
2869 overall = (losers * LRA_LOSER_COST_FACTOR + reject
2870 - (addr_losers == losers ? static_reject : 0));
55a2c322 2871 if ((best_losers == 0 || losers != 0) && best_overall < overall)
deca73f5
VM
2872 {
2873 if (lra_dump_file != NULL)
2874 fprintf (lra_dump_file,
cb1cca12 2875 " alt=%d,overall=%d,losers=%d -- refuse\n",
deca73f5
VM
2876 nalt, overall, losers);
2877 goto fail;
2878 }
55a2c322 2879
a25f3e8e
RS
2880 if (update_and_check_small_class_inputs (nop, nalt,
2881 this_alternative))
9b195552
VM
2882 {
2883 if (lra_dump_file != NULL)
2884 fprintf (lra_dump_file,
2885 " alt=%d, not enough small class regs -- refuse\n",
2886 nalt);
2887 goto fail;
2888 }
55a2c322 2889 curr_alt[nop] = this_alternative;
6576d245 2890 curr_alt_set[nop] = this_alternative_set;
55a2c322
VM
2891 curr_alt_win[nop] = this_alternative_win;
2892 curr_alt_match_win[nop] = this_alternative_match_win;
2893 curr_alt_offmemok[nop] = this_alternative_offmemok;
2894 curr_alt_matches[nop] = this_alternative_matches;
f4eafc30 2895
55a2c322
VM
2896 if (this_alternative_matches >= 0
2897 && !did_match && !this_alternative_win)
2898 curr_alt_win[this_alternative_matches] = false;
f4eafc30 2899
55a2c322
VM
2900 if (early_clobber_p && operand_reg[nop] != NULL_RTX)
2901 early_clobbered_nops[early_clobbered_regs_num++] = nop;
2902 }
feca7b89 2903
2c62cbaa
VM
2904 if (curr_insn_set != NULL_RTX && n_operands == 2
2905 /* Prevent processing non-move insns. */
2906 && (GET_CODE (SET_SRC (curr_insn_set)) == SUBREG
2907 || SET_SRC (curr_insn_set) == no_subreg_reg_operand[1])
2908 && ((! curr_alt_win[0] && ! curr_alt_win[1]
2909 && REG_P (no_subreg_reg_operand[0])
2910 && REG_P (no_subreg_reg_operand[1])
2911 && (reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2912 || reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0])))
2913 || (! curr_alt_win[0] && curr_alt_win[1]
2914 && REG_P (no_subreg_reg_operand[1])
feca7b89
VM
2915 /* Check that we reload memory not the memory
2916 address. */
9125b9fc
VM
2917 && ! (curr_alt_offmemok[0]
2918 && MEM_P (no_subreg_reg_operand[0]))
2c62cbaa
VM
2919 && reg_in_class_p (no_subreg_reg_operand[1], curr_alt[0]))
2920 || (curr_alt_win[0] && ! curr_alt_win[1]
2921 && REG_P (no_subreg_reg_operand[0])
feca7b89
VM
2922 /* Check that we reload memory not the memory
2923 address. */
9125b9fc
VM
2924 && ! (curr_alt_offmemok[1]
2925 && MEM_P (no_subreg_reg_operand[1]))
2c62cbaa
VM
2926 && reg_in_class_p (no_subreg_reg_operand[0], curr_alt[1])
2927 && (! CONST_POOL_OK_P (curr_operand_mode[1],
2928 no_subreg_reg_operand[1])
2929 || (targetm.preferred_reload_class
2930 (no_subreg_reg_operand[1],
2931 (enum reg_class) curr_alt[1]) != NO_REGS))
2932 /* If it is a result of recent elimination in move
2933 insn we can transform it into an add still by
2934 using this alternative. */
b4c96972
RS
2935 && GET_CODE (no_subreg_reg_operand[1]) != PLUS
2936 /* Likewise if the source has been replaced with an
2937 equivalent value. This only happens once -- the reload
2938 will use the equivalent value instead of the register it
2939 replaces -- so there should be no danger of cycling. */
2940 && !equiv_substition_p[1])))
cb1cca12
VM
2941 {
2942 /* We have a move insn and a new reload insn will be similar
9125b9fc
VM
2943 to the current insn. We should avoid such situation as
2944 it results in LRA cycling. */
2945 if (lra_dump_file != NULL)
2946 fprintf (lra_dump_file,
2947 " Cycle danger: overall += LRA_MAX_REJECT\n");
cb1cca12
VM
2948 overall += LRA_MAX_REJECT;
2949 }
55a2c322
VM
2950 ok_p = true;
2951 curr_alt_dont_inherit_ops_num = 0;
2952 for (nop = 0; nop < early_clobbered_regs_num; nop++)
2953 {
2194f7a2 2954 int i, j, clobbered_hard_regno, first_conflict_j, last_conflict_j;
55a2c322
VM
2955 HARD_REG_SET temp_set;
2956
2957 i = early_clobbered_nops[nop];
2958 if ((! curr_alt_win[i] && ! curr_alt_match_win[i])
2959 || hard_regno[i] < 0)
2960 continue;
1c86bd80 2961 lra_assert (operand_reg[i] != NULL_RTX);
55a2c322
VM
2962 clobbered_hard_regno = hard_regno[i];
2963 CLEAR_HARD_REG_SET (temp_set);
2964 add_to_hard_reg_set (&temp_set, biggest_mode[i], clobbered_hard_regno);
2194f7a2 2965 first_conflict_j = last_conflict_j = -1;
55a2c322
VM
2966 for (j = 0; j < n_operands; j++)
2967 if (j == i
2968 /* We don't want process insides of match_operator and
2969 match_parallel because otherwise we would process
2970 their operands once again generating a wrong
2971 code. */
2972 || curr_static_id->operand[j].is_operator)
2973 continue;
2974 else if ((curr_alt_matches[j] == i && curr_alt_match_win[j])
2975 || (curr_alt_matches[i] == j && curr_alt_match_win[i]))
2976 continue;
1c86bd80
VM
2977 /* If we don't reload j-th operand, check conflicts. */
2978 else if ((curr_alt_win[j] || curr_alt_match_win[j])
2979 && uses_hard_regs_p (*curr_id->operand_loc[j], temp_set))
2194f7a2
VM
2980 {
2981 if (first_conflict_j < 0)
2982 first_conflict_j = j;
2983 last_conflict_j = j;
2f0b80c7
PB
2984 /* Both the earlyclobber operand and conflicting operand
2985 cannot both be user defined hard registers. */
2986 if (HARD_REGISTER_P (operand_reg[i])
2987 && REG_USERVAR_P (operand_reg[i])
2988 && operand_reg[j] != NULL_RTX
2989 && HARD_REGISTER_P (operand_reg[j])
2990 && REG_USERVAR_P (operand_reg[j]))
2991 fatal_insn ("unable to generate reloads for "
2992 "impossible constraints:", curr_insn);
2194f7a2
VM
2993 }
2994 if (last_conflict_j < 0)
55a2c322 2995 continue;
2f0b80c7
PB
2996
2997 /* If an earlyclobber operand conflicts with another non-matching
2998 operand (ie, they have been assigned the same hard register),
2999 then it is better to reload the other operand, as there may
3000 exist yet another operand with a matching constraint associated
3001 with the earlyclobber operand. However, if one of the operands
3002 is an explicit use of a hard register, then we must reload the
3003 other non-hard register operand. */
3004 if (HARD_REGISTER_P (operand_reg[i])
3005 || (first_conflict_j == last_conflict_j
3006 && operand_reg[last_conflict_j] != NULL_RTX
3007 && !curr_alt_match_win[last_conflict_j]
3008 && !HARD_REGISTER_P (operand_reg[last_conflict_j])))
1c86bd80 3009 {
2194f7a2
VM
3010 curr_alt_win[last_conflict_j] = false;
3011 curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++]
3012 = last_conflict_j;
1c86bd80 3013 losers++;
cb1cca12
VM
3014 if (lra_dump_file != NULL)
3015 fprintf
3016 (lra_dump_file,
3017 " %d Conflict early clobber reload: reject--\n",
3018 i);
1c86bd80 3019 }
55a2c322
VM
3020 else
3021 {
1c86bd80
VM
3022 /* We need to reload early clobbered register and the
3023 matched registers. */
3024 for (j = 0; j < n_operands; j++)
3025 if (curr_alt_matches[j] == i)
3026 {
3027 curr_alt_match_win[j] = false;
3028 losers++;
3029 overall += LRA_LOSER_COST_FACTOR;
3030 }
3031 if (! curr_alt_match_win[i])
3032 curr_alt_dont_inherit_ops[curr_alt_dont_inherit_ops_num++] = i;
3033 else
3034 {
3035 /* Remember pseudos used for match reloads are never
3036 inherited. */
3037 lra_assert (curr_alt_matches[i] >= 0);
3038 curr_alt_win[curr_alt_matches[i]] = false;
3039 }
3040 curr_alt_win[i] = curr_alt_match_win[i] = false;
3041 losers++;
cb1cca12
VM
3042 if (lra_dump_file != NULL)
3043 fprintf
3044 (lra_dump_file,
aa326bfb 3045 " %d Matched conflict early clobber reloads: "
cb1cca12
VM
3046 "reject--\n",
3047 i);
dbe7895c
AS
3048 }
3049 /* Early clobber was already reflected in REJECT. */
3050 if (!matching_early_clobber[i])
3051 {
3052 lra_assert (reject > 0);
deca73f5 3053 reject--;
dbe7895c 3054 matching_early_clobber[i] = 1;
55a2c322 3055 }
dbe7895c 3056 overall += LRA_LOSER_COST_FACTOR - 1;
55a2c322 3057 }
deca73f5 3058 if (lra_dump_file != NULL)
36ff9dfb
VM
3059 fprintf (lra_dump_file, " alt=%d,overall=%d,losers=%d,rld_nregs=%d\n",
3060 nalt, overall, losers, reload_nregs);
deca73f5 3061
55a2c322
VM
3062 /* If this alternative can be made to work by reloading, and it
3063 needs less reloading than the others checked so far, record
3064 it as the chosen goal for reloading. */
3065 if ((best_losers != 0 && losers == 0)
3066 || (((best_losers == 0 && losers == 0)
3067 || (best_losers != 0 && losers != 0))
3068 && (best_overall > overall
3069 || (best_overall == overall
3070 /* If the cost of the reloads is the same,
3071 prefer alternative which requires minimal
36ff9dfb
VM
3072 number of reload regs. */
3073 && (reload_nregs < best_reload_nregs
3074 || (reload_nregs == best_reload_nregs
f15feaf9
VM
3075 && (best_reload_sum < reload_sum
3076 || (best_reload_sum == reload_sum
3077 && nalt < goal_alt_number))))))))
55a2c322
VM
3078 {
3079 for (nop = 0; nop < n_operands; nop++)
3080 {
3081 goal_alt_win[nop] = curr_alt_win[nop];
3082 goal_alt_match_win[nop] = curr_alt_match_win[nop];
3083 goal_alt_matches[nop] = curr_alt_matches[nop];
3084 goal_alt[nop] = curr_alt[nop];
3085 goal_alt_offmemok[nop] = curr_alt_offmemok[nop];
3086 }
3087 goal_alt_dont_inherit_ops_num = curr_alt_dont_inherit_ops_num;
3088 for (nop = 0; nop < curr_alt_dont_inherit_ops_num; nop++)
3089 goal_alt_dont_inherit_ops[nop] = curr_alt_dont_inherit_ops[nop];
3090 goal_alt_swapped = curr_swapped;
3091 best_overall = overall;
3092 best_losers = losers;
55a2c322
VM
3093 best_reload_nregs = reload_nregs;
3094 best_reload_sum = reload_sum;
3095 goal_alt_number = nalt;
3096 }
3097 if (losers == 0)
3098 /* Everything is satisfied. Do not process alternatives
f4eafc30 3099 anymore. */
55a2c322
VM
3100 break;
3101 fail:
3102 ;
3103 }
3104 return ok_p;
3105}
3106
c31d2d11
RS
3107/* Make reload base reg from address AD. */
3108static rtx
3109base_to_reg (struct address_info *ad)
3110{
3111 enum reg_class cl;
3112 int code = -1;
3113 rtx new_inner = NULL_RTX;
3114 rtx new_reg = NULL_RTX;
fee3e72c
DM
3115 rtx_insn *insn;
3116 rtx_insn *last_insn = get_last_insn();
c31d2d11 3117
0a001dcb 3118 lra_assert (ad->disp == ad->disp_term);
c31d2d11
RS
3119 cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3120 get_index_code (ad));
0a001dcb 3121 new_reg = lra_create_new_reg (GET_MODE (*ad->base), NULL_RTX,
c31d2d11
RS
3122 cl, "base");
3123 new_inner = simplify_gen_binary (PLUS, GET_MODE (new_reg), new_reg,
3124 ad->disp_term == NULL
0a001dcb 3125 ? const0_rtx
c31d2d11
RS
3126 : *ad->disp_term);
3127 if (!valid_address_p (ad->mode, new_inner, ad->as))
3128 return NULL_RTX;
0a001dcb 3129 insn = emit_insn (gen_rtx_SET (new_reg, *ad->base));
c31d2d11
RS
3130 code = recog_memoized (insn);
3131 if (code < 0)
3132 {
3133 delete_insns_since (last_insn);
3134 return NULL_RTX;
3135 }
3136
3137 return new_inner;
3138}
3139
9005477f 3140/* Make reload base reg + DISP from address AD. Return the new pseudo. */
55a2c322 3141static rtx
9005477f 3142base_plus_disp_to_reg (struct address_info *ad, rtx disp)
55a2c322
VM
3143{
3144 enum reg_class cl;
3145 rtx new_reg;
3146
9005477f 3147 lra_assert (ad->base == ad->base_term);
277f65de
RS
3148 cl = base_reg_class (ad->mode, ad->as, ad->base_outer_code,
3149 get_index_code (ad));
3150 new_reg = lra_create_new_reg (GET_MODE (*ad->base_term), NULL_RTX,
3151 cl, "base + disp");
9005477f 3152 lra_emit_add (new_reg, *ad->base_term, disp);
55a2c322
VM
3153 return new_reg;
3154}
3155
6e071b1e
VM
3156/* Make reload of index part of address AD. Return the new
3157 pseudo. */
3158static rtx
3159index_part_to_reg (struct address_info *ad)
3160{
3161 rtx new_reg;
3162
3163 new_reg = lra_create_new_reg (GET_MODE (*ad->index), NULL_RTX,
3164 INDEX_REG_CLASS, "index term");
3165 expand_mult (GET_MODE (*ad->index), *ad->index_term,
3166 GEN_INT (get_index_scale (ad)), new_reg, 1);
3167 return new_reg;
3168}
3169
277f65de
RS
3170/* Return true if we can add a displacement to address AD, even if that
3171 makes the address invalid. The fix-up code requires any new address
3172 to be the sum of the BASE_TERM, INDEX and DISP_TERM fields. */
02ea4bf4 3173static bool
277f65de 3174can_add_disp_p (struct address_info *ad)
02ea4bf4 3175{
277f65de
RS
3176 return (!ad->autoinc_p
3177 && ad->segment == NULL
3178 && ad->base == ad->base_term
3179 && ad->disp == ad->disp_term);
02ea4bf4
RS
3180}
3181
277f65de
RS
3182/* Make equiv substitution in address AD. Return true if a substitution
3183 was made. */
55a2c322 3184static bool
277f65de 3185equiv_address_substitution (struct address_info *ad)
55a2c322 3186{
277f65de 3187 rtx base_reg, new_base_reg, index_reg, new_index_reg, *base_term, *index_term;
73ca989c
RS
3188 poly_int64 disp;
3189 HOST_WIDE_INT scale;
55a2c322
VM
3190 bool change_p;
3191
277f65de
RS
3192 base_term = strip_subreg (ad->base_term);
3193 if (base_term == NULL)
55a2c322
VM
3194 base_reg = new_base_reg = NULL_RTX;
3195 else
3196 {
277f65de 3197 base_reg = *base_term;
8d49e7ef 3198 new_base_reg = get_equiv_with_elimination (base_reg, curr_insn);
55a2c322 3199 }
277f65de
RS
3200 index_term = strip_subreg (ad->index_term);
3201 if (index_term == NULL)
55a2c322
VM
3202 index_reg = new_index_reg = NULL_RTX;
3203 else
3204 {
277f65de 3205 index_reg = *index_term;
8d49e7ef 3206 new_index_reg = get_equiv_with_elimination (index_reg, curr_insn);
55a2c322
VM
3207 }
3208 if (base_reg == new_base_reg && index_reg == new_index_reg)
3209 return false;
3210 disp = 0;
3211 change_p = false;
3212 if (lra_dump_file != NULL)
3213 {
3214 fprintf (lra_dump_file, "Changing address in insn %d ",
3215 INSN_UID (curr_insn));
cfbeaedf 3216 dump_value_slim (lra_dump_file, *ad->outer, 1);
55a2c322
VM
3217 }
3218 if (base_reg != new_base_reg)
3219 {
73ca989c 3220 poly_int64 offset;
55a2c322
VM
3221 if (REG_P (new_base_reg))
3222 {
277f65de 3223 *base_term = new_base_reg;
55a2c322
VM
3224 change_p = true;
3225 }
3226 else if (GET_CODE (new_base_reg) == PLUS
3227 && REG_P (XEXP (new_base_reg, 0))
73ca989c 3228 && poly_int_rtx_p (XEXP (new_base_reg, 1), &offset)
277f65de 3229 && can_add_disp_p (ad))
55a2c322 3230 {
73ca989c 3231 disp += offset;
277f65de 3232 *base_term = XEXP (new_base_reg, 0);
55a2c322
VM
3233 change_p = true;
3234 }
277f65de
RS
3235 if (ad->base_term2 != NULL)
3236 *ad->base_term2 = *ad->base_term;
55a2c322 3237 }
55a2c322
VM
3238 if (index_reg != new_index_reg)
3239 {
73ca989c 3240 poly_int64 offset;
55a2c322
VM
3241 if (REG_P (new_index_reg))
3242 {
277f65de 3243 *index_term = new_index_reg;
55a2c322
VM
3244 change_p = true;
3245 }
3246 else if (GET_CODE (new_index_reg) == PLUS
3247 && REG_P (XEXP (new_index_reg, 0))
73ca989c 3248 && poly_int_rtx_p (XEXP (new_index_reg, 1), &offset)
277f65de 3249 && can_add_disp_p (ad)
02ea4bf4 3250 && (scale = get_index_scale (ad)))
55a2c322 3251 {
73ca989c 3252 disp += offset * scale;
277f65de 3253 *index_term = XEXP (new_index_reg, 0);
55a2c322
VM
3254 change_p = true;
3255 }
3256 }
73ca989c 3257 if (maybe_ne (disp, 0))
55a2c322 3258 {
277f65de
RS
3259 if (ad->disp != NULL)
3260 *ad->disp = plus_constant (GET_MODE (*ad->inner), *ad->disp, disp);
55a2c322
VM
3261 else
3262 {
277f65de
RS
3263 *ad->inner = plus_constant (GET_MODE (*ad->inner), *ad->inner, disp);
3264 update_address (ad);
55a2c322
VM
3265 }
3266 change_p = true;
3267 }
3268 if (lra_dump_file != NULL)
3269 {
3270 if (! change_p)
3271 fprintf (lra_dump_file, " -- no change\n");
3272 else
3273 {
3274 fprintf (lra_dump_file, " on equiv ");
cfbeaedf 3275 dump_value_slim (lra_dump_file, *ad->outer, 1);
55a2c322
VM
3276 fprintf (lra_dump_file, "\n");
3277 }
3278 }
3279 return change_p;
3280}
3281
d9cf932c
VM
3282/* Major function to make reloads for an address in operand NOP or
3283 check its correctness (If CHECK_ONLY_P is true). The supported
3284 cases are:
bd3d34d4 3285
5a107a0f
VM
3286 1) an address that existed before LRA started, at which point it
3287 must have been valid. These addresses are subject to elimination
3288 and may have become invalid due to the elimination offset being out
3289 of range.
bd3d34d4 3290
5a107a0f
VM
3291 2) an address created by forcing a constant to memory
3292 (force_const_to_mem). The initial form of these addresses might
3293 not be valid, and it is this function's job to make them valid.
bd3d34d4
RS
3294
3295 3) a frame address formed from a register and a (possibly zero)
5a107a0f
VM
3296 constant offset. As above, these addresses might not be valid and
3297 this function must make them so.
bd3d34d4
RS
3298
3299 Add reloads to the lists *BEFORE and *AFTER. We might need to add
55a2c322 3300 reloads to *AFTER because of inc/dec, {pre, post} modify in the
cc8849a1
VM
3301 address. Return true for any RTL change.
3302
3303 The function is a helper function which does not produce all
d9cf932c
VM
3304 transformations (when CHECK_ONLY_P is false) which can be
3305 necessary. It does just basic steps. To do all necessary
3306 transformations use function process_address. */
55a2c322 3307static bool
d9cf932c
VM
3308process_address_1 (int nop, bool check_only_p,
3309 rtx_insn **before, rtx_insn **after)
55a2c322 3310{
277f65de
RS
3311 struct address_info ad;
3312 rtx new_reg;
bc2fc1f3 3313 HOST_WIDE_INT scale;
55a2c322
VM
3314 rtx op = *curr_id->operand_loc[nop];
3315 const char *constraint = curr_static_id->operand[nop].constraint;
777e635f 3316 enum constraint_num cn = lookup_constraint (constraint);
d9cf932c 3317 bool change_p = false;
55a2c322 3318
823bb054
SB
3319 if (MEM_P (op)
3320 && GET_MODE (op) == BLKmode
3321 && GET_CODE (XEXP (op, 0)) == SCRATCH)
3322 return false;
3323
998fd141
AO
3324 if (insn_extra_address_constraint (cn)
3325 /* When we find an asm operand with an address constraint that
3326 doesn't satisfy address_operand to begin with, we clear
3327 is_address, so that we don't try to make a non-address fit.
3328 If the asm statement got this far, it's because other
3329 constraints are available, and we'll use them, disregarding
3330 the unsatisfiable address ones. */
3331 && curr_static_id->operand[nop].is_address)
277f65de 3332 decompose_lea_address (&ad, curr_id->operand_loc[nop]);
164f0634
EB
3333 /* Do not attempt to decompose arbitrary addresses generated by combine
3334 for asm operands with loose constraints, e.g 'X'. */
3335 else if (MEM_P (op)
a81a0bfa
TP
3336 && !(INSN_CODE (curr_insn) < 0
3337 && get_constraint_type (cn) == CT_FIXED_FORM
164f0634 3338 && constraint_satisfied_p (op, cn)))
277f65de 3339 decompose_mem_address (&ad, op);
55a2c322
VM
3340 else if (GET_CODE (op) == SUBREG
3341 && MEM_P (SUBREG_REG (op)))
277f65de 3342 decompose_mem_address (&ad, SUBREG_REG (op));
55a2c322
VM
3343 else
3344 return false;
70712859
KK
3345 /* If INDEX_REG_CLASS is assigned to base_term already and isn't to
3346 index_term, swap them so to avoid assigning INDEX_REG_CLASS to both
3347 when INDEX_REG_CLASS is a single register class. */
3348 if (ad.base_term != NULL
3349 && ad.index_term != NULL
3350 && ira_class_hard_regs_num[INDEX_REG_CLASS] == 1
3351 && REG_P (*ad.base_term)
3352 && REG_P (*ad.index_term)
3353 && in_class_p (*ad.base_term, INDEX_REG_CLASS, NULL)
3354 && ! in_class_p (*ad.index_term, INDEX_REG_CLASS, NULL))
3355 {
3356 std::swap (ad.base, ad.index);
3357 std::swap (ad.base_term, ad.index_term);
3358 }
d9cf932c
VM
3359 if (! check_only_p)
3360 change_p = equiv_address_substitution (&ad);
277f65de 3361 if (ad.base_term != NULL
55a2c322 3362 && (process_addr_reg
d9cf932c 3363 (ad.base_term, check_only_p, before,
277f65de
RS
3364 (ad.autoinc_p
3365 && !(REG_P (*ad.base_term)
3366 && find_regno_note (curr_insn, REG_DEAD,
3367 REGNO (*ad.base_term)) != NULL_RTX)
55a2c322 3368 ? after : NULL),
277f65de
RS
3369 base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3370 get_index_code (&ad)))))
55a2c322
VM
3371 {
3372 change_p = true;
277f65de
RS
3373 if (ad.base_term2 != NULL)
3374 *ad.base_term2 = *ad.base_term;
55a2c322 3375 }
277f65de 3376 if (ad.index_term != NULL
d9cf932c
VM
3377 && process_addr_reg (ad.index_term, check_only_p,
3378 before, NULL, INDEX_REG_CLASS))
55a2c322
VM
3379 change_p = true;
3380
777e635f
RS
3381 /* Target hooks sometimes don't treat extra-constraint addresses as
3382 legitimate address_operands, so handle them specially. */
8677664e 3383 if (insn_extra_address_constraint (cn)
777e635f 3384 && satisfies_address_constraint_p (&ad, cn))
2c62cbaa 3385 return change_p;
2c62cbaa 3386
d9cf932c
VM
3387 if (check_only_p)
3388 return change_p;
3389
277f65de 3390 /* There are three cases where the shape of *AD.INNER may now be invalid:
bd3d34d4
RS
3391
3392 1) the original address was valid, but either elimination or
5a107a0f
VM
3393 equiv_address_substitution was applied and that made
3394 the address invalid.
bd3d34d4
RS
3395
3396 2) the address is an invalid symbolic address created by
5a107a0f 3397 force_const_to_mem.
bd3d34d4
RS
3398
3399 3) the address is a frame address with an invalid offset.
3400
c31d2d11
RS
3401 4) the address is a frame address with an invalid base.
3402
2c62cbaa
VM
3403 All these cases involve a non-autoinc address, so there is no
3404 point revalidating other types. */
3405 if (ad.autoinc_p || valid_address_p (&ad))
55a2c322
VM
3406 return change_p;
3407
bd3d34d4
RS
3408 /* Any index existed before LRA started, so we can assume that the
3409 presence and shape of the index is valid. */
55a2c322 3410 push_to_sequence (*before);
2c62cbaa 3411 lra_assert (ad.disp == ad.disp_term);
277f65de 3412 if (ad.base == NULL)
55a2c322 3413 {
277f65de 3414 if (ad.index == NULL)
55a2c322 3415 {
95831c01
VM
3416 rtx_insn *insn;
3417 rtx_insn *last = get_last_insn ();
55a2c322 3418 int code = -1;
277f65de
RS
3419 enum reg_class cl = base_reg_class (ad.mode, ad.as,
3420 SCRATCH, SCRATCH);
2c62cbaa 3421 rtx addr = *ad.inner;
277f65de 3422
2c62cbaa 3423 new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
d0b2266a
TS
3424 if (HAVE_lo_sum)
3425 {
d0b2266a
TS
3426 /* addr => lo_sum (new_base, addr), case (2) above. */
3427 insn = emit_insn (gen_rtx_SET
3428 (new_reg,
3429 gen_rtx_HIGH (Pmode, copy_rtx (addr))));
3430 code = recog_memoized (insn);
3431 if (code >= 0)
3432 {
3433 *ad.inner = gen_rtx_LO_SUM (Pmode, new_reg, addr);
3434 if (! valid_address_p (ad.mode, *ad.outer, ad.as))
3435 {
3436 /* Try to put lo_sum into register. */
3437 insn = emit_insn (gen_rtx_SET
3438 (new_reg,
3439 gen_rtx_LO_SUM (Pmode, new_reg, addr)));
3440 code = recog_memoized (insn);
3441 if (code >= 0)
3442 {
3443 *ad.inner = new_reg;
3444 if (! valid_address_p (ad.mode, *ad.outer, ad.as))
3445 {
3446 *ad.inner = addr;
3447 code = -1;
3448 }
3449 }
3450
3451 }
3452 }
3453 if (code < 0)
3454 delete_insns_since (last);
3455 }
3456
55a2c322
VM
3457 if (code < 0)
3458 {
2c62cbaa
VM
3459 /* addr => new_base, case (2) above. */
3460 lra_emit_move (new_reg, addr);
95831c01
VM
3461
3462 for (insn = last == NULL_RTX ? get_insns () : NEXT_INSN (last);
3463 insn != NULL_RTX;
3464 insn = NEXT_INSN (insn))
3465 if (recog_memoized (insn) < 0)
3466 break;
3467 if (insn != NULL_RTX)
3468 {
3469 /* Do nothing if we cannot generate right insns.
9c582551 3470 This is analogous to reload pass behavior. */
95831c01
VM
3471 delete_insns_since (last);
3472 end_sequence ();
3473 return false;
3474 }
2c62cbaa 3475 *ad.inner = new_reg;
55a2c322
VM
3476 }
3477 }
3478 else
3479 {
bd3d34d4
RS
3480 /* index * scale + disp => new base + index * scale,
3481 case (1) above. */
277f65de
RS
3482 enum reg_class cl = base_reg_class (ad.mode, ad.as, PLUS,
3483 GET_CODE (*ad.index));
55a2c322
VM
3484
3485 lra_assert (INDEX_REG_CLASS != NO_REGS);
3486 new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "disp");
277f65de
RS
3487 lra_emit_move (new_reg, *ad.disp);
3488 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3489 new_reg, *ad.index);
55a2c322
VM
3490 }
3491 }
277f65de 3492 else if (ad.index == NULL)
55a2c322 3493 {
5a107a0f
VM
3494 int regno;
3495 enum reg_class cl;
cfa434f6
DM
3496 rtx set;
3497 rtx_insn *insns, *last_insn;
c31d2d11
RS
3498 /* Try to reload base into register only if the base is invalid
3499 for the address but with valid offset, case (4) above. */
3500 start_sequence ();
3501 new_reg = base_to_reg (&ad);
3502
bd3d34d4 3503 /* base + disp => new base, cases (1) and (3) above. */
55a2c322
VM
3504 /* Another option would be to reload the displacement into an
3505 index register. However, postreload has code to optimize
3506 address reloads that have the same base and different
3507 displacements, so reloading into an index register would
3508 not necessarily be a win. */
c31d2d11 3509 if (new_reg == NULL_RTX)
9005477f
RS
3510 {
3511 /* See if the target can split the displacement into a
3512 legitimate new displacement from a local anchor. */
3513 gcc_assert (ad.disp == ad.disp_term);
3514 poly_int64 orig_offset;
3515 rtx offset1, offset2;
3516 if (poly_int_rtx_p (*ad.disp, &orig_offset)
3517 && targetm.legitimize_address_displacement (&offset1, &offset2,
3518 orig_offset,
3519 ad.mode))
3520 {
3521 new_reg = base_plus_disp_to_reg (&ad, offset1);
3522 new_reg = gen_rtx_PLUS (GET_MODE (new_reg), new_reg, offset2);
3523 }
3524 else
3525 new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
3526 }
5a107a0f
VM
3527 insns = get_insns ();
3528 last_insn = get_last_insn ();
3529 /* If we generated at least two insns, try last insn source as
3530 an address. If we succeed, we generate one less insn. */
9005477f
RS
3531 if (REG_P (new_reg)
3532 && last_insn != insns
3533 && (set = single_set (last_insn)) != NULL_RTX
5a107a0f
VM
3534 && GET_CODE (SET_SRC (set)) == PLUS
3535 && REG_P (XEXP (SET_SRC (set), 0))
3536 && CONSTANT_P (XEXP (SET_SRC (set), 1)))
3537 {
3538 *ad.inner = SET_SRC (set);
3539 if (valid_address_p (ad.mode, *ad.outer, ad.as))
3540 {
3541 *ad.base_term = XEXP (SET_SRC (set), 0);
3542 *ad.disp_term = XEXP (SET_SRC (set), 1);
3543 cl = base_reg_class (ad.mode, ad.as, ad.base_outer_code,
3544 get_index_code (&ad));
3545 regno = REGNO (*ad.base_term);
3546 if (regno >= FIRST_PSEUDO_REGISTER
3547 && cl != lra_get_allocno_class (regno))
a2d0d374 3548 lra_change_class (regno, cl, " Change to", true);
5a107a0f
VM
3549 new_reg = SET_SRC (set);
3550 delete_insns_since (PREV_INSN (last_insn));
3551 }
3552 }
3553 end_sequence ();
3554 emit_insn (insns);
277f65de 3555 *ad.inner = new_reg;
55a2c322 3556 }
6e071b1e 3557 else if (ad.disp_term != NULL)
55a2c322 3558 {
bd3d34d4
RS
3559 /* base + scale * index + disp => new base + scale * index,
3560 case (1) above. */
9005477f
RS
3561 gcc_assert (ad.disp == ad.disp_term);
3562 new_reg = base_plus_disp_to_reg (&ad, *ad.disp);
277f65de
RS
3563 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3564 new_reg, *ad.index);
55a2c322 3565 }
bc2fc1f3 3566 else if ((scale = get_index_scale (&ad)) == 1)
5a770e01
VM
3567 {
3568 /* The last transformation to one reg will be made in
3569 curr_insn_transform function. */
3570 end_sequence ();
3571 return false;
3572 }
bc2fc1f3 3573 else if (scale != 0)
6e071b1e
VM
3574 {
3575 /* base + scale * index => base + new_reg,
3576 case (1) above.
3577 Index part of address may become invalid. For example, we
3578 changed pseudo on the equivalent memory and a subreg of the
3579 pseudo onto the memory of different mode for which the scale is
3580 prohibitted. */
3581 new_reg = index_part_to_reg (&ad);
3582 *ad.inner = simplify_gen_binary (PLUS, GET_MODE (new_reg),
3583 *ad.base_term, new_reg);
3584 }
bc2fc1f3
VM
3585 else
3586 {
3587 enum reg_class cl = base_reg_class (ad.mode, ad.as,
3588 SCRATCH, SCRATCH);
3589 rtx addr = *ad.inner;
3590
3591 new_reg = lra_create_new_reg (Pmode, NULL_RTX, cl, "addr");
3592 /* addr => new_base. */
3593 lra_emit_move (new_reg, addr);
3594 *ad.inner = new_reg;
3595 }
55a2c322
VM
3596 *before = get_insns ();
3597 end_sequence ();
3598 return true;
3599}
3600
d9cf932c
VM
3601/* If CHECK_ONLY_P is false, do address reloads until it is necessary.
3602 Use process_address_1 as a helper function. Return true for any
3603 RTL changes.
3604
3605 If CHECK_ONLY_P is true, just check address correctness. Return
3606 false if the address correct. */
cc8849a1 3607static bool
d9cf932c
VM
3608process_address (int nop, bool check_only_p,
3609 rtx_insn **before, rtx_insn **after)
cc8849a1
VM
3610{
3611 bool res = false;
3612
d9cf932c
VM
3613 while (process_address_1 (nop, check_only_p, before, after))
3614 {
3615 if (check_only_p)
3616 return true;
3617 res = true;
3618 }
cc8849a1
VM
3619 return res;
3620}
3621
55a2c322
VM
3622/* Emit insns to reload VALUE into a new register. VALUE is an
3623 auto-increment or auto-decrement RTX whose operand is a register or
3624 memory location; so reloading involves incrementing that location.
3625 IN is either identical to VALUE, or some cheaper place to reload
3626 value being incremented/decremented from.
3627
3628 INC_AMOUNT is the number to increment or decrement by (always
3629 positive and ignored for POST_MODIFY/PRE_MODIFY).
3630
3631 Return pseudo containing the result. */
3632static rtx
31ae0e43 3633emit_inc (enum reg_class new_rclass, rtx in, rtx value, poly_int64 inc_amount)
55a2c322
VM
3634{
3635 /* REG or MEM to be copied and incremented. */
3636 rtx incloc = XEXP (value, 0);
3637 /* Nonzero if increment after copying. */
3638 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
3639 || GET_CODE (value) == POST_MODIFY);
cfa434f6 3640 rtx_insn *last;
55a2c322 3641 rtx inc;
647d790d 3642 rtx_insn *add_insn;
55a2c322
VM
3643 int code;
3644 rtx real_in = in == value ? incloc : in;
3645 rtx result;
3646 bool plus_p = true;
3647
3648 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
3649 {
3650 lra_assert (GET_CODE (XEXP (value, 1)) == PLUS
3651 || GET_CODE (XEXP (value, 1)) == MINUS);
3652 lra_assert (rtx_equal_p (XEXP (XEXP (value, 1), 0), XEXP (value, 0)));
3653 plus_p = GET_CODE (XEXP (value, 1)) == PLUS;
3654 inc = XEXP (XEXP (value, 1), 1);
3655 }
3656 else
3657 {
3658 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
3659 inc_amount = -inc_amount;
3660
31ae0e43 3661 inc = gen_int_mode (inc_amount, GET_MODE (value));
55a2c322
VM
3662 }
3663
3664 if (! post && REG_P (incloc))
3665 result = incloc;
3666 else
3667 result = lra_create_new_reg (GET_MODE (value), value, new_rclass,
3668 "INC/DEC result");
3669
3670 if (real_in != result)
3671 {
3672 /* First copy the location to the result register. */
3673 lra_assert (REG_P (result));
3674 emit_insn (gen_move_insn (result, real_in));
3675 }
3676
3677 /* We suppose that there are insns to add/sub with the constant
3678 increment permitted in {PRE/POST)_{DEC/INC/MODIFY}. At least the
3679 old reload worked with this assumption. If the assumption
3680 becomes wrong, we should use approach in function
3681 base_plus_disp_to_reg. */
3682 if (in == value)
3683 {
3684 /* See if we can directly increment INCLOC. */
3685 last = get_last_insn ();
3686 add_insn = emit_insn (plus_p
3687 ? gen_add2_insn (incloc, inc)
3688 : gen_sub2_insn (incloc, inc));
3689
3690 code = recog_memoized (add_insn);
3691 if (code >= 0)
3692 {
3693 if (! post && result != incloc)
3694 emit_insn (gen_move_insn (result, incloc));
3695 return result;
3696 }
3697 delete_insns_since (last);
3698 }
3699
3700 /* If couldn't do the increment directly, must increment in RESULT.
3701 The way we do this depends on whether this is pre- or
3702 post-increment. For pre-increment, copy INCLOC to the reload
3703 register, increment it there, then save back. */
3704 if (! post)
3705 {
3706 if (real_in != result)
3707 emit_insn (gen_move_insn (result, real_in));
3708 if (plus_p)
3709 emit_insn (gen_add2_insn (result, inc));
3710 else
3711 emit_insn (gen_sub2_insn (result, inc));
3712 if (result != incloc)
3713 emit_insn (gen_move_insn (incloc, result));
3714 }
3715 else
3716 {
3717 /* Post-increment.
3718
3719 Because this might be a jump insn or a compare, and because
3720 RESULT may not be available after the insn in an input
3721 reload, we must do the incrementing before the insn being
3722 reloaded for.
3723
3724 We have already copied IN to RESULT. Increment the copy in
3725 RESULT, save that back, then decrement RESULT so it has
3726 the original value. */
3727 if (plus_p)
3728 emit_insn (gen_add2_insn (result, inc));
3729 else
3730 emit_insn (gen_sub2_insn (result, inc));
3731 emit_insn (gen_move_insn (incloc, result));
3732 /* Restore non-modified value for the result. We prefer this
3733 way because it does not require an additional hard
3734 register. */
3735 if (plus_p)
3736 {
73ca989c
RS
3737 poly_int64 offset;
3738 if (poly_int_rtx_p (inc, &offset))
69db2d57 3739 emit_insn (gen_add2_insn (result,
73ca989c 3740 gen_int_mode (-offset,
69db2d57 3741 GET_MODE (result))));
55a2c322
VM
3742 else
3743 emit_insn (gen_sub2_insn (result, inc));
3744 }
3745 else
3746 emit_insn (gen_add2_insn (result, inc));
3747 }
3748 return result;
3749}
3750
2c62cbaa
VM
3751/* Return true if the current move insn does not need processing as we
3752 already know that it satisfies its constraints. */
3753static bool
3754simple_move_p (void)
3755{
3756 rtx dest, src;
3757 enum reg_class dclass, sclass;
3758
3759 lra_assert (curr_insn_set != NULL_RTX);
3760 dest = SET_DEST (curr_insn_set);
3761 src = SET_SRC (curr_insn_set);
2008be40
SB
3762
3763 /* If the instruction has multiple sets we need to process it even if it
3764 is single_set. This can happen if one or more of the SETs are dead.
3765 See PR73650. */
3766 if (multiple_sets (curr_insn))
3767 return false;
3768
2c62cbaa
VM
3769 return ((dclass = get_op_class (dest)) != NO_REGS
3770 && (sclass = get_op_class (src)) != NO_REGS
3771 /* The backend guarantees that register moves of cost 2
3772 never need reloads. */
03b9b5ce 3773 && targetm.register_move_cost (GET_MODE (src), sclass, dclass) == 2);
2c62cbaa
VM
3774 }
3775
55a2c322
VM
3776/* Swap operands NOP and NOP + 1. */
3777static inline void
3778swap_operands (int nop)
3779{
fab27f52
MM
3780 std::swap (curr_operand_mode[nop], curr_operand_mode[nop + 1]);
3781 std::swap (original_subreg_reg_mode[nop], original_subreg_reg_mode[nop + 1]);
3782 std::swap (*curr_id->operand_loc[nop], *curr_id->operand_loc[nop + 1]);
987b67f1 3783 std::swap (equiv_substition_p[nop], equiv_substition_p[nop + 1]);
55a2c322
VM
3784 /* Swap the duplicates too. */
3785 lra_update_dup (curr_id, nop);
3786 lra_update_dup (curr_id, nop + 1);
3787}
3788
3789/* Main entry point of the constraint code: search the body of the
3790 current insn to choose the best alternative. It is mimicking insn
3791 alternative cost calculation model of former reload pass. That is
3792 because machine descriptions were written to use this model. This
3793 model can be changed in future. Make commutative operand exchange
3794 if it is chosen.
3795
d9cf932c
VM
3796 if CHECK_ONLY_P is false, do RTL changes to satisfy the
3797 constraints. Return true if any change happened during function
3798 call.
3799
3800 If CHECK_ONLY_P is true then don't do any transformation. Just
3801 check that the insn satisfies all constraints. If the insn does
3802 not satisfy any constraint, return true. */
55a2c322 3803static bool
d9cf932c 3804curr_insn_transform (bool check_only_p)
55a2c322
VM
3805{
3806 int i, j, k;
3807 int n_operands;
3808 int n_alternatives;
aefae0f1 3809 int n_outputs;
55a2c322
VM
3810 int commutative;
3811 signed char goal_alt_matched[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
511dcace 3812 signed char match_inputs[MAX_RECOG_OPERANDS + 1];
aefae0f1 3813 signed char outputs[MAX_RECOG_OPERANDS + 1];
cfa434f6 3814 rtx_insn *before, *after;
55a2c322
VM
3815 bool alt_p = false;
3816 /* Flag that the insn has been changed through a transformation. */
3817 bool change_p;
3818 bool sec_mem_p;
55a2c322 3819 bool use_sec_mem_p;
55a2c322
VM
3820 int max_regno_before;
3821 int reused_alternative_num;
3822
2c62cbaa
VM
3823 curr_insn_set = single_set (curr_insn);
3824 if (curr_insn_set != NULL_RTX && simple_move_p ())
7874b7c5
VM
3825 {
3826 /* We assume that the corresponding insn alternative has no
3827 earlier clobbers. If it is not the case, don't define move
3828 cost equal to 2 for the corresponding register classes. */
3829 lra_set_used_insn_alternative (curr_insn, LRA_NON_CLOBBERED_ALT);
3830 return false;
3831 }
2c62cbaa 3832
55a2c322
VM
3833 no_input_reloads_p = no_output_reloads_p = false;
3834 goal_alt_number = -1;
2c62cbaa 3835 change_p = sec_mem_p = false;
55a2c322
VM
3836 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output
3837 reloads; neither are insns that SET cc0. Insns that use CC0 are
3838 not allowed to have any input reloads. */
3839 if (JUMP_P (curr_insn) || CALL_P (curr_insn))
3840 no_output_reloads_p = true;
3841
058eb3b0 3842 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (curr_insn)))
55a2c322 3843 no_input_reloads_p = true;
058eb3b0 3844 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (curr_insn)))
55a2c322 3845 no_output_reloads_p = true;
55a2c322
VM
3846
3847 n_operands = curr_static_id->n_operands;
3848 n_alternatives = curr_static_id->n_alternatives;
3849
3850 /* Just return "no reloads" if insn has no operands with
3851 constraints. */
3852 if (n_operands == 0 || n_alternatives == 0)
3853 return false;
3854
3855 max_regno_before = max_reg_num ();
3856
3857 for (i = 0; i < n_operands; i++)
3858 {
3859 goal_alt_matched[i][0] = -1;
3860 goal_alt_matches[i] = -1;
3861 }
3862
3863 commutative = curr_static_id->commutative;
3864
3865 /* Now see what we need for pseudos that didn't get hard regs or got
3866 the wrong kind of hard reg. For this, we must consider all the
3867 operands together against the register constraints. */
3868
821b7577 3869 best_losers = best_overall = INT_MAX;
36ff9dfb 3870 best_reload_sum = 0;
55a2c322
VM
3871
3872 curr_swapped = false;
3873 goal_alt_swapped = false;
3874
d9cf932c
VM
3875 if (! check_only_p)
3876 /* Make equivalence substitution and memory subreg elimination
3877 before address processing because an address legitimacy can
3878 depend on memory mode. */
3879 for (i = 0; i < n_operands; i++)
3880 {
0b87be09 3881 rtx op, subst, old;
d9cf932c 3882 bool op_change_p = false;
0b87be09
VM
3883
3884 if (curr_static_id->operand[i].is_operator)
3885 continue;
d9cf932c 3886
0b87be09 3887 old = op = *curr_id->operand_loc[i];
d9cf932c
VM
3888 if (GET_CODE (old) == SUBREG)
3889 old = SUBREG_REG (old);
3890 subst = get_equiv_with_elimination (old, curr_insn);
895ff86f 3891 original_subreg_reg_mode[i] = VOIDmode;
987b67f1 3892 equiv_substition_p[i] = false;
d9cf932c
VM
3893 if (subst != old)
3894 {
987b67f1 3895 equiv_substition_p[i] = true;
d9cf932c
VM
3896 subst = copy_rtx (subst);
3897 lra_assert (REG_P (old));
895ff86f 3898 if (GET_CODE (op) != SUBREG)
d9cf932c 3899 *curr_id->operand_loc[i] = subst;
895ff86f
VM
3900 else
3901 {
3902 SUBREG_REG (op) = subst;
3903 if (GET_MODE (subst) == VOIDmode)
3904 original_subreg_reg_mode[i] = GET_MODE (old);
3905 }
d9cf932c
VM
3906 if (lra_dump_file != NULL)
3907 {
3908 fprintf (lra_dump_file,
3909 "Changing pseudo %d in operand %i of insn %u on equiv ",
3910 REGNO (old), i, INSN_UID (curr_insn));
3911 dump_value_slim (lra_dump_file, subst, 1);
895ff86f 3912 fprintf (lra_dump_file, "\n");
d9cf932c
VM
3913 }
3914 op_change_p = change_p = true;
3915 }
3916 if (simplify_operand_subreg (i, GET_MODE (old)) || op_change_p)
3917 {
3918 change_p = true;
3919 lra_update_dup (curr_id, i);
3920 }
3921 }
55a2c322
VM
3922
3923 /* Reload address registers and displacements. We do it before
3924 finding an alternative because of memory constraints. */
cfa434f6 3925 before = after = NULL;
55a2c322
VM
3926 for (i = 0; i < n_operands; i++)
3927 if (! curr_static_id->operand[i].is_operator
d9cf932c 3928 && process_address (i, check_only_p, &before, &after))
55a2c322 3929 {
d9cf932c
VM
3930 if (check_only_p)
3931 return true;
55a2c322
VM
3932 change_p = true;
3933 lra_update_dup (curr_id, i);
3934 }
cc8849a1 3935
55a2c322
VM
3936 if (change_p)
3937 /* If we've changed the instruction then any alternative that
3938 we chose previously may no longer be valid. */
7874b7c5 3939 lra_set_used_insn_alternative (curr_insn, LRA_UNKNOWN_ALT);
55a2c322 3940
d9cf932c 3941 if (! check_only_p && curr_insn_set != NULL_RTX
2c62cbaa
VM
3942 && check_and_process_move (&change_p, &sec_mem_p))
3943 return change_p;
3944
55a2c322
VM
3945 try_swapped:
3946
7874b7c5 3947 reused_alternative_num = check_only_p ? LRA_UNKNOWN_ALT : curr_id->used_insn_alternative;
55a2c322
VM
3948 if (lra_dump_file != NULL && reused_alternative_num >= 0)
3949 fprintf (lra_dump_file, "Reusing alternative %d for insn #%u\n",
3950 reused_alternative_num, INSN_UID (curr_insn));
3951
3952 if (process_alt_operands (reused_alternative_num))
3953 alt_p = true;
3954
d9cf932c
VM
3955 if (check_only_p)
3956 return ! alt_p || best_losers != 0;
3957
55a2c322
VM
3958 /* If insn is commutative (it's safe to exchange a certain pair of
3959 operands) then we need to try each alternative twice, the second
3960 time matching those two operands as if we had exchanged them. To
3961 do this, really exchange them in operands.
3962
3963 If we have just tried the alternatives the second time, return
3964 operands to normal and drop through. */
3965
3966 if (reused_alternative_num < 0 && commutative >= 0)
3967 {
3968 curr_swapped = !curr_swapped;
3969 if (curr_swapped)
3970 {
3971 swap_operands (commutative);
3972 goto try_swapped;
3973 }
3974 else
3975 swap_operands (commutative);
3976 }
3977
55a2c322
VM
3978 if (! alt_p && ! sec_mem_p)
3979 {
3980 /* No alternative works with reloads?? */
3981 if (INSN_CODE (curr_insn) >= 0)
3982 fatal_insn ("unable to generate reloads for:", curr_insn);
3983 error_for_asm (curr_insn,
3984 "inconsistent operand constraints in an %<asm%>");
11067dee 3985 lra_asm_error_p = true;
8b4aea73
VM
3986 /* Avoid further trouble with this insn. Don't generate use
3987 pattern here as we could use the insn SP offset. */
3988 lra_set_insn_deleted (curr_insn);
55a2c322
VM
3989 return true;
3990 }
3991
3992 /* If the best alternative is with operands 1 and 2 swapped, swap
3993 them. Update the operand numbers of any reloads already
3994 pushed. */
3995
3996 if (goal_alt_swapped)
3997 {
3998 if (lra_dump_file != NULL)
3999 fprintf (lra_dump_file, " Commutative operand exchange in insn %u\n",
4000 INSN_UID (curr_insn));
4001
4002 /* Swap the duplicates too. */
4003 swap_operands (commutative);
4004 change_p = true;
4005 }
4006
f15643d4 4007 /* Some targets' TARGET_SECONDARY_MEMORY_NEEDED (e.g. x86) are defined
55a2c322
VM
4008 too conservatively. So we use the secondary memory only if there
4009 is no any alternative without reloads. */
4010 use_sec_mem_p = false;
4011 if (! alt_p)
4012 use_sec_mem_p = true;
4013 else if (sec_mem_p)
4014 {
4015 for (i = 0; i < n_operands; i++)
4016 if (! goal_alt_win[i] && ! goal_alt_match_win[i])
4017 break;
4018 use_sec_mem_p = i < n_operands;
4019 }
4020
4021 if (use_sec_mem_p)
4022 {
e03dd765 4023 int in = -1, out = -1;
89d56d79 4024 rtx new_reg, src, dest, rld;
ef4bddc2 4025 machine_mode sec_mode, rld_mode;
55a2c322 4026
e03dd765
VM
4027 lra_assert (curr_insn_set != NULL_RTX && sec_mem_p);
4028 dest = SET_DEST (curr_insn_set);
4029 src = SET_SRC (curr_insn_set);
4030 for (i = 0; i < n_operands; i++)
4031 if (*curr_id->operand_loc[i] == dest)
4032 out = i;
4033 else if (*curr_id->operand_loc[i] == src)
4034 in = i;
4035 for (i = 0; i < curr_static_id->n_dups; i++)
4036 if (out < 0 && *curr_id->dup_loc[i] == dest)
4037 out = curr_static_id->dup_num[i];
4038 else if (in < 0 && *curr_id->dup_loc[i] == src)
4039 in = curr_static_id->dup_num[i];
4040 lra_assert (out >= 0 && in >= 0
4041 && curr_static_id->operand[out].type == OP_OUT
4042 && curr_static_id->operand[in].type == OP_IN);
bd4288c0 4043 rld = partial_subreg_p (GET_MODE (src), GET_MODE (dest)) ? src : dest;
66aa7879 4044 rld_mode = GET_MODE (rld);
94e23f53 4045 sec_mode = targetm.secondary_memory_needed_mode (rld_mode);
55a2c322
VM
4046 new_reg = lra_create_new_reg (sec_mode, NULL_RTX,
4047 NO_REGS, "secondary");
4048 /* If the mode is changed, it should be wider. */
bd4288c0 4049 lra_assert (!partial_subreg_p (sec_mode, rld_mode));
89d56d79
VM
4050 if (sec_mode != rld_mode)
4051 {
4052 /* If the target says specifically to use another mode for
67914693 4053 secondary memory moves we cannot reuse the original
89d56d79 4054 insn. */
1ccd4874 4055 after = emit_spill_move (false, new_reg, dest);
cfa434f6 4056 lra_process_new_insns (curr_insn, NULL, after,
1ccd4874
VM
4057 "Inserting the sec. move");
4058 /* We may have non null BEFORE here (e.g. after address
4059 processing. */
4060 push_to_sequence (before);
4061 before = emit_spill_move (true, new_reg, src);
4062 emit_insn (before);
4063 before = get_insns ();
4064 end_sequence ();
cfa434f6 4065 lra_process_new_insns (curr_insn, before, NULL, "Changing on");
1ccd4874
VM
4066 lra_set_insn_deleted (curr_insn);
4067 }
89d56d79 4068 else if (dest == rld)
1ccd4874 4069 {
e03dd765
VM
4070 *curr_id->operand_loc[out] = new_reg;
4071 lra_update_dup (curr_id, out);
66aa7879 4072 after = emit_spill_move (false, new_reg, dest);
cfa434f6 4073 lra_process_new_insns (curr_insn, NULL, after,
66aa7879
VM
4074 "Inserting the sec. move");
4075 }
4076 else
4077 {
e03dd765
VM
4078 *curr_id->operand_loc[in] = new_reg;
4079 lra_update_dup (curr_id, in);
1ccd4874
VM
4080 /* See comments above. */
4081 push_to_sequence (before);
66aa7879 4082 before = emit_spill_move (true, new_reg, src);
1ccd4874
VM
4083 emit_insn (before);
4084 before = get_insns ();
4085 end_sequence ();
cfa434f6 4086 lra_process_new_insns (curr_insn, before, NULL,
66aa7879
VM
4087 "Inserting the sec. move");
4088 }
4089 lra_update_insn_regno_info (curr_insn);
55a2c322
VM
4090 return true;
4091 }
55a2c322
VM
4092
4093 lra_assert (goal_alt_number >= 0);
4094 lra_set_used_insn_alternative (curr_insn, goal_alt_number);
4095
4096 if (lra_dump_file != NULL)
4097 {
4098 const char *p;
4099
4100 fprintf (lra_dump_file, " Choosing alt %d in insn %u:",
4101 goal_alt_number, INSN_UID (curr_insn));
4102 for (i = 0; i < n_operands; i++)
4103 {
4104 p = (curr_static_id->operand_alternative
4105 [goal_alt_number * n_operands + i].constraint);
4106 if (*p == '\0')
4107 continue;
4108 fprintf (lra_dump_file, " (%d) ", i);
4109 for (; *p != '\0' && *p != ',' && *p != '#'; p++)
4110 fputc (*p, lra_dump_file);
4111 }
36ff9dfb
VM
4112 if (INSN_CODE (curr_insn) >= 0
4113 && (p = get_insn_name (INSN_CODE (curr_insn))) != NULL)
4114 fprintf (lra_dump_file, " {%s}", p);
73ca989c
RS
4115 if (maybe_ne (curr_id->sp_offset, 0))
4116 {
4117 fprintf (lra_dump_file, " (sp_off=");
4118 print_dec (curr_id->sp_offset, lra_dump_file);
4119 fprintf (lra_dump_file, ")");
4120 }
4121 fprintf (lra_dump_file, "\n");
55a2c322
VM
4122 }
4123
4124 /* Right now, for any pair of operands I and J that are required to
4125 match, with J < I, goal_alt_matches[I] is J. Add I to
4126 goal_alt_matched[J]. */
f4eafc30 4127
55a2c322
VM
4128 for (i = 0; i < n_operands; i++)
4129 if ((j = goal_alt_matches[i]) >= 0)
4130 {
4131 for (k = 0; goal_alt_matched[j][k] >= 0; k++)
4132 ;
4133 /* We allow matching one output operand and several input
4134 operands. */
4135 lra_assert (k == 0
4136 || (curr_static_id->operand[j].type == OP_OUT
4137 && curr_static_id->operand[i].type == OP_IN
4138 && (curr_static_id->operand
4139 [goal_alt_matched[j][0]].type == OP_IN)));
4140 goal_alt_matched[j][k] = i;
4141 goal_alt_matched[j][k + 1] = -1;
4142 }
f4eafc30 4143
55a2c322
VM
4144 for (i = 0; i < n_operands; i++)
4145 goal_alt_win[i] |= goal_alt_match_win[i];
f4eafc30 4146
55a2c322
VM
4147 /* Any constants that aren't allowed and can't be reloaded into
4148 registers are here changed into memory references. */
4149 for (i = 0; i < n_operands; i++)
4150 if (goal_alt_win[i])
4151 {
4152 int regno;
4153 enum reg_class new_class;
4154 rtx reg = *curr_id->operand_loc[i];
4155
4156 if (GET_CODE (reg) == SUBREG)
4157 reg = SUBREG_REG (reg);
f4eafc30 4158
55a2c322
VM
4159 if (REG_P (reg) && (regno = REGNO (reg)) >= FIRST_PSEUDO_REGISTER)
4160 {
4161 bool ok_p = in_class_p (reg, goal_alt[i], &new_class);
4162
4163 if (new_class != NO_REGS && get_reg_class (regno) != new_class)
4164 {
4165 lra_assert (ok_p);
a2d0d374 4166 lra_change_class (regno, new_class, " Change to", true);
55a2c322
VM
4167 }
4168 }
4169 }
4170 else
4171 {
4172 const char *constraint;
4173 char c;
4174 rtx op = *curr_id->operand_loc[i];
4175 rtx subreg = NULL_RTX;
ef4bddc2 4176 machine_mode mode = curr_operand_mode[i];
f4eafc30 4177
55a2c322
VM
4178 if (GET_CODE (op) == SUBREG)
4179 {
4180 subreg = op;
4181 op = SUBREG_REG (op);
4182 mode = GET_MODE (op);
4183 }
f4eafc30 4184
55a2c322
VM
4185 if (CONST_POOL_OK_P (mode, op)
4186 && ((targetm.preferred_reload_class
4187 (op, (enum reg_class) goal_alt[i]) == NO_REGS)
4188 || no_input_reloads_p))
4189 {
4190 rtx tem = force_const_mem (mode, op);
f4eafc30 4191
55a2c322
VM
4192 change_p = true;
4193 if (subreg != NULL_RTX)
4194 tem = gen_rtx_SUBREG (mode, tem, SUBREG_BYTE (subreg));
f4eafc30 4195
55a2c322
VM
4196 *curr_id->operand_loc[i] = tem;
4197 lra_update_dup (curr_id, i);
d9cf932c 4198 process_address (i, false, &before, &after);
f4eafc30 4199
55a2c322
VM
4200 /* If the alternative accepts constant pool refs directly
4201 there will be no reload needed at all. */
4202 if (subreg != NULL_RTX)
4203 continue;
4204 /* Skip alternatives before the one requested. */
4205 constraint = (curr_static_id->operand_alternative
4206 [goal_alt_number * n_operands + i].constraint);
4207 for (;
4208 (c = *constraint) && c != ',' && c != '#';
4209 constraint += CONSTRAINT_LEN (c, constraint))
4210 {
777e635f 4211 enum constraint_num cn = lookup_constraint (constraint);
9eb1ca69
VM
4212 if ((insn_extra_memory_constraint (cn)
4213 || insn_extra_special_memory_constraint (cn))
777e635f 4214 && satisfies_memory_constraint_p (tem, cn))
55a2c322 4215 break;
55a2c322
VM
4216 }
4217 if (c == '\0' || c == ',' || c == '#')
4218 continue;
f4eafc30 4219
55a2c322
VM
4220 goal_alt_win[i] = true;
4221 }
4222 }
f4eafc30 4223
aefae0f1
TP
4224 n_outputs = 0;
4225 outputs[0] = -1;
55a2c322
VM
4226 for (i = 0; i < n_operands; i++)
4227 {
2b778c9d
VM
4228 int regno;
4229 bool optional_p = false;
55a2c322
VM
4230 rtx old, new_reg;
4231 rtx op = *curr_id->operand_loc[i];
4232
4233 if (goal_alt_win[i])
4234 {
4235 if (goal_alt[i] == NO_REGS
4236 && REG_P (op)
4237 /* When we assign NO_REGS it means that we will not
4238 assign a hard register to the scratch pseudo by
4239 assigment pass and the scratch pseudo will be
4240 spilled. Spilled scratch pseudos are transformed
4241 back to scratches at the LRA end. */
6c051d60
VM
4242 && lra_former_scratch_operand_p (curr_insn, i)
4243 && lra_former_scratch_p (REGNO (op)))
deca73f5
VM
4244 {
4245 int regno = REGNO (op);
a2d0d374 4246 lra_change_class (regno, NO_REGS, " Change to", true);
deca73f5
VM
4247 if (lra_get_regno_hard_regno (regno) >= 0)
4248 /* We don't have to mark all insn affected by the
4249 spilled pseudo as there is only one such insn, the
4250 current one. */
4251 reg_renumber[regno] = -1;
6c051d60
VM
4252 lra_assert (bitmap_single_bit_set_p
4253 (&lra_reg_info[REGNO (op)].insn_bitmap));
deca73f5 4254 }
2b778c9d
VM
4255 /* We can do an optional reload. If the pseudo got a hard
4256 reg, we might improve the code through inheritance. If
4257 it does not get a hard register we coalesce memory/memory
4258 moves later. Ignore move insns to avoid cycling. */
b0681c9e 4259 if (! lra_simple_p
2b778c9d
VM
4260 && lra_undo_inheritance_iter < LRA_MAX_INHERITANCE_PASSES
4261 && goal_alt[i] != NO_REGS && REG_P (op)
4262 && (regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER
a2d0d374 4263 && regno < new_regno_start
b0681c9e 4264 && ! lra_former_scratch_p (regno)
2b778c9d 4265 && reg_renumber[regno] < 0
3c954213
VM
4266 /* Check that the optional reload pseudo will be able to
4267 hold given mode value. */
4268 && ! (prohibited_class_reg_set_mode_p
4269 (goal_alt[i], reg_class_contents[goal_alt[i]],
4270 PSEUDO_REGNO_MODE (regno)))
2b778c9d 4271 && (curr_insn_set == NULL_RTX
b0681c9e
VM
4272 || !((REG_P (SET_SRC (curr_insn_set))
4273 || MEM_P (SET_SRC (curr_insn_set))
4274 || GET_CODE (SET_SRC (curr_insn_set)) == SUBREG)
4275 && (REG_P (SET_DEST (curr_insn_set))
4276 || MEM_P (SET_DEST (curr_insn_set))
4277 || GET_CODE (SET_DEST (curr_insn_set)) == SUBREG))))
2b778c9d 4278 optional_p = true;
c07a0a22
VM
4279 else if (goal_alt_matched[i][0] != -1
4280 && curr_static_id->operand[i].type == OP_OUT
4281 && (curr_static_id->operand_alternative
33163a62
VM
4282 [goal_alt_number * n_operands + i].earlyclobber)
4283 && REG_P (op))
c07a0a22 4284 {
33163a62
VM
4285 for (j = 0; goal_alt_matched[i][j] != -1; j++)
4286 {
4287 rtx op2 = *curr_id->operand_loc[goal_alt_matched[i][j]];
4288
4289 if (REG_P (op2) && REGNO (op) != REGNO (op2))
4290 break;
4291 }
4292 if (goal_alt_matched[i][j] != -1)
4293 {
4294 /* Generate reloads for different output and matched
4295 input registers. This is the easiest way to avoid
4296 creation of non-existing register conflicts in
4297 lra-lives.c. */
4298 match_reload (i, goal_alt_matched[i], outputs, goal_alt[i], &before,
4299 &after, TRUE);
4300 outputs[n_outputs++] = i;
4301 outputs[n_outputs] = -1;
4302 }
c07a0a22
VM
4303 continue;
4304 }
2b778c9d
VM
4305 else
4306 continue;
55a2c322 4307 }
f4eafc30 4308
55a2c322
VM
4309 /* Operands that match previous ones have already been handled. */
4310 if (goal_alt_matches[i] >= 0)
4311 continue;
4312
4313 /* We should not have an operand with a non-offsettable address
4314 appearing where an offsettable address will do. It also may
4315 be a case when the address should be special in other words
4316 not a general one (e.g. it needs no index reg). */
4317 if (goal_alt_matched[i][0] == -1 && goal_alt_offmemok[i] && MEM_P (op))
4318 {
4319 enum reg_class rclass;
4320 rtx *loc = &XEXP (op, 0);
4321 enum rtx_code code = GET_CODE (*loc);
4322
4323 push_to_sequence (before);
4324 rclass = base_reg_class (GET_MODE (op), MEM_ADDR_SPACE (op),
4325 MEM, SCRATCH);
4326 if (GET_RTX_CLASS (code) == RTX_AUTOINC)
4327 new_reg = emit_inc (rclass, *loc, *loc,
4328 /* This value does not matter for MODIFY. */
4329 GET_MODE_SIZE (GET_MODE (op)));
95921002 4330 else if (get_reload_reg (OP_IN, Pmode, *loc, rclass, FALSE,
55a2c322 4331 "offsetable address", &new_reg))
634c3ff0
VM
4332 {
4333 rtx addr = *loc;
4334 enum rtx_code code = GET_CODE (addr);
4335
4336 if (code == AND && CONST_INT_P (XEXP (addr, 1)))
4337 /* (and ... (const_int -X)) is used to align to X bytes. */
4338 addr = XEXP (*loc, 0);
4339 lra_emit_move (new_reg, addr);
4340 if (addr != *loc)
4341 emit_move_insn (new_reg, gen_rtx_AND (GET_MODE (new_reg), new_reg, XEXP (*loc, 1)));
4342 }
55a2c322
VM
4343 before = get_insns ();
4344 end_sequence ();
4345 *loc = new_reg;
4346 lra_update_dup (curr_id, i);
4347 }
4348 else if (goal_alt_matched[i][0] == -1)
4349 {
ef4bddc2 4350 machine_mode mode;
55a2c322 4351 rtx reg, *loc;
91914e56 4352 int hard_regno;
55a2c322
VM
4353 enum op_type type = curr_static_id->operand[i].type;
4354
4355 loc = curr_id->operand_loc[i];
4356 mode = curr_operand_mode[i];
4357 if (GET_CODE (*loc) == SUBREG)
4358 {
4359 reg = SUBREG_REG (*loc);
91914e56 4360 poly_int64 byte = SUBREG_BYTE (*loc);
55a2c322 4361 if (REG_P (reg)
8e02e8a0
MF
4362 /* Strict_low_part requires reloading the register and not
4363 just the subreg. Likewise for a strict subreg no wider
4364 than a word for WORD_REGISTER_OPERATIONS targets. */
55a2c322 4365 && (curr_static_id->operand[i].strict_low
03a95621 4366 || (!paradoxical_subreg_p (mode, GET_MODE (reg))
55a2c322
VM
4367 && (hard_regno
4368 = get_try_hard_regno (REGNO (reg))) >= 0
4369 && (simplify_subreg_regno
4370 (hard_regno,
4371 GET_MODE (reg), byte, mode) < 0)
4372 && (goal_alt[i] == NO_REGS
4373 || (simplify_subreg_regno
4374 (ira_class_hard_regs[goal_alt[i]][0],
8e02e8a0 4375 GET_MODE (reg), byte, mode) >= 0)))
e5f83886 4376 || (partial_subreg_p (mode, GET_MODE (reg))
cf098191
RS
4377 && known_le (GET_MODE_SIZE (GET_MODE (reg)),
4378 UNITS_PER_WORD)
8e02e8a0 4379 && WORD_REGISTER_OPERATIONS)))
55a2c322 4380 {
62cdb862
MF
4381 /* An OP_INOUT is required when reloading a subreg of a
4382 mode wider than a word to ensure that data beyond the
4383 word being reloaded is preserved. Also automatically
4384 ensure that strict_low_part reloads are made into
4385 OP_INOUT which should already be true from the backend
4386 constraints. */
4387 if (type == OP_OUT
4388 && (curr_static_id->operand[i].strict_low
9eaf97d6 4389 || read_modify_subreg_p (*loc)))
8b8e23de 4390 type = OP_INOUT;
55a2c322
VM
4391 loc = &SUBREG_REG (*loc);
4392 mode = GET_MODE (*loc);
4393 }
4394 }
4395 old = *loc;
95921002
VM
4396 if (get_reload_reg (type, mode, old, goal_alt[i],
4397 loc != curr_id->operand_loc[i], "", &new_reg)
55a2c322
VM
4398 && type != OP_OUT)
4399 {
4400 push_to_sequence (before);
4401 lra_emit_move (new_reg, old);
4402 before = get_insns ();
4403 end_sequence ();
4404 }
4405 *loc = new_reg;
4406 if (type != OP_IN
4407 && find_reg_note (curr_insn, REG_UNUSED, old) == NULL_RTX)
4408 {
4409 start_sequence ();
4410 lra_emit_move (type == OP_INOUT ? copy_rtx (old) : old, new_reg);
4411 emit_insn (after);
4412 after = get_insns ();
4413 end_sequence ();
4414 *loc = new_reg;
4415 }
4416 for (j = 0; j < goal_alt_dont_inherit_ops_num; j++)
4417 if (goal_alt_dont_inherit_ops[j] == i)
4418 {
4419 lra_set_regno_unique_value (REGNO (new_reg));
4420 break;
4421 }
4422 lra_update_dup (curr_id, i);
4423 }
4424 else if (curr_static_id->operand[i].type == OP_IN
4425 && (curr_static_id->operand[goal_alt_matched[i][0]].type
57d69a63
VM
4426 == OP_OUT
4427 || (curr_static_id->operand[goal_alt_matched[i][0]].type
4428 == OP_INOUT
4429 && (operands_match_p
4430 (*curr_id->operand_loc[i],
4431 *curr_id->operand_loc[goal_alt_matched[i][0]],
4432 -1)))))
55a2c322 4433 {
511dcace
VM
4434 /* generate reloads for input and matched outputs. */
4435 match_inputs[0] = i;
4436 match_inputs[1] = -1;
aefae0f1 4437 match_reload (goal_alt_matched[i][0], match_inputs, outputs,
599e1cf8
VM
4438 goal_alt[i], &before, &after,
4439 curr_static_id->operand_alternative
4440 [goal_alt_number * n_operands + goal_alt_matched[i][0]]
4441 .earlyclobber);
55a2c322 4442 }
57d69a63
VM
4443 else if ((curr_static_id->operand[i].type == OP_OUT
4444 || (curr_static_id->operand[i].type == OP_INOUT
4445 && (operands_match_p
4446 (*curr_id->operand_loc[i],
4447 *curr_id->operand_loc[goal_alt_matched[i][0]],
4448 -1))))
55a2c322 4449 && (curr_static_id->operand[goal_alt_matched[i][0]].type
57d69a63 4450 == OP_IN))
511dcace 4451 /* Generate reloads for output and matched inputs. */
aefae0f1
TP
4452 match_reload (i, goal_alt_matched[i], outputs, goal_alt[i], &before,
4453 &after, curr_static_id->operand_alternative
4454 [goal_alt_number * n_operands + i].earlyclobber);
511dcace
VM
4455 else if (curr_static_id->operand[i].type == OP_IN
4456 && (curr_static_id->operand[goal_alt_matched[i][0]].type
4457 == OP_IN))
4458 {
4459 /* Generate reloads for matched inputs. */
4460 match_inputs[0] = i;
4461 for (j = 0; (k = goal_alt_matched[i][j]) >= 0; j++)
4462 match_inputs[j + 1] = k;
4463 match_inputs[j + 1] = -1;
aefae0f1
TP
4464 match_reload (-1, match_inputs, outputs, goal_alt[i], &before,
4465 &after, false);
511dcace 4466 }
55a2c322
VM
4467 else
4468 /* We must generate code in any case when function
4469 process_alt_operands decides that it is possible. */
4470 gcc_unreachable ();
aefae0f1
TP
4471
4472 /* Memorise processed outputs so that output remaining to be processed
4473 can avoid using the same register value (see match_reload). */
4474 if (curr_static_id->operand[i].type == OP_OUT)
4475 {
4476 outputs[n_outputs++] = i;
4477 outputs[n_outputs] = -1;
4478 }
4479
2b778c9d
VM
4480 if (optional_p)
4481 {
8a8330b7
VM
4482 rtx reg = op;
4483
4484 lra_assert (REG_P (reg));
4485 regno = REGNO (reg);
2b778c9d
VM
4486 op = *curr_id->operand_loc[i]; /* Substitution. */
4487 if (GET_CODE (op) == SUBREG)
4488 op = SUBREG_REG (op);
4489 gcc_assert (REG_P (op) && (int) REGNO (op) >= new_regno_start);
4490 bitmap_set_bit (&lra_optional_reload_pseudos, REGNO (op));
8a8330b7 4491 lra_reg_info[REGNO (op)].restore_rtx = reg;
2b778c9d
VM
4492 if (lra_dump_file != NULL)
4493 fprintf (lra_dump_file,
4494 " Making reload reg %d for reg %d optional\n",
4495 REGNO (op), regno);
4496 }
55a2c322
VM
4497 }
4498 if (before != NULL_RTX || after != NULL_RTX
4499 || max_regno_before != max_reg_num ())
4500 change_p = true;
4501 if (change_p)
4502 {
4503 lra_update_operator_dups (curr_id);
4504 /* Something changes -- process the insn. */
4505 lra_update_insn_regno_info (curr_insn);
4506 }
4507 lra_process_new_insns (curr_insn, before, after, "Inserting insn reload");
4508 return change_p;
4509}
4510
d9cf932c
VM
4511/* Return true if INSN satisfies all constraints. In other words, no
4512 reload insns are needed. */
4513bool
4514lra_constrain_insn (rtx_insn *insn)
4515{
4516 int saved_new_regno_start = new_regno_start;
4517 int saved_new_insn_uid_start = new_insn_uid_start;
4518 bool change_p;
4519
4520 curr_insn = insn;
4521 curr_id = lra_get_insn_recog_data (curr_insn);
4522 curr_static_id = curr_id->insn_static_data;
4523 new_insn_uid_start = get_max_uid ();
4524 new_regno_start = max_reg_num ();
4525 change_p = curr_insn_transform (true);
4526 new_regno_start = saved_new_regno_start;
4527 new_insn_uid_start = saved_new_insn_uid_start;
4528 return ! change_p;
4529}
4530
55a2c322
VM
4531/* Return true if X is in LIST. */
4532static bool
4533in_list_p (rtx x, rtx list)
4534{
4535 for (; list != NULL_RTX; list = XEXP (list, 1))
4536 if (XEXP (list, 0) == x)
4537 return true;
4538 return false;
4539}
4540
4541/* Return true if X contains an allocatable hard register (if
4542 HARD_REG_P) or a (spilled if SPILLED_P) pseudo. */
4543static bool
4544contains_reg_p (rtx x, bool hard_reg_p, bool spilled_p)
4545{
4546 int i, j;
4547 const char *fmt;
4548 enum rtx_code code;
4549
4550 code = GET_CODE (x);
4551 if (REG_P (x))
4552 {
4553 int regno = REGNO (x);
4554 HARD_REG_SET alloc_regs;
4555
4556 if (hard_reg_p)
4557 {
4558 if (regno >= FIRST_PSEUDO_REGISTER)
4559 regno = lra_get_regno_hard_regno (regno);
4560 if (regno < 0)
4561 return false;
50b3f54d 4562 alloc_regs = ~lra_no_alloc_regs;
55a2c322
VM
4563 return overlaps_hard_reg_set_p (alloc_regs, GET_MODE (x), regno);
4564 }
4565 else
4566 {
4567 if (regno < FIRST_PSEUDO_REGISTER)
4568 return false;
4569 if (! spilled_p)
4570 return true;
4571 return lra_get_regno_hard_regno (regno) < 0;
4572 }
4573 }
4574 fmt = GET_RTX_FORMAT (code);
4575 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4576 {
4577 if (fmt[i] == 'e')
4578 {
4579 if (contains_reg_p (XEXP (x, i), hard_reg_p, spilled_p))
4580 return true;
4581 }
4582 else if (fmt[i] == 'E')
4583 {
4584 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4585 if (contains_reg_p (XVECEXP (x, i, j), hard_reg_p, spilled_p))
4586 return true;
4587 }
4588 }
4589 return false;
4590}
4591
28430b2e
VM
4592/* Process all regs in location *LOC and change them on equivalent
4593 substitution. Return true if any change was done. */
55a2c322 4594static bool
28430b2e 4595loc_equivalence_change_p (rtx *loc)
55a2c322
VM
4596{
4597 rtx subst, reg, x = *loc;
4598 bool result = false;
4599 enum rtx_code code = GET_CODE (x);
4600 const char *fmt;
4601 int i, j;
4602
4603 if (code == SUBREG)
4604 {
4605 reg = SUBREG_REG (x);
8d49e7ef 4606 if ((subst = get_equiv_with_elimination (reg, curr_insn)) != reg
55a2c322
VM
4607 && GET_MODE (subst) == VOIDmode)
4608 {
4609 /* We cannot reload debug location. Simplify subreg here
4610 while we know the inner mode. */
4611 *loc = simplify_gen_subreg (GET_MODE (x), subst,
4612 GET_MODE (reg), SUBREG_BYTE (x));
4613 return true;
4614 }
4615 }
8d49e7ef 4616 if (code == REG && (subst = get_equiv_with_elimination (x, curr_insn)) != x)
55a2c322
VM
4617 {
4618 *loc = subst;
4619 return true;
4620 }
4621
4622 /* Scan all the operand sub-expressions. */
4623 fmt = GET_RTX_FORMAT (code);
4624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4625 {
4626 if (fmt[i] == 'e')
28430b2e 4627 result = loc_equivalence_change_p (&XEXP (x, i)) || result;
55a2c322
VM
4628 else if (fmt[i] == 'E')
4629 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4630 result
28430b2e 4631 = loc_equivalence_change_p (&XVECEXP (x, i, j)) || result;
55a2c322
VM
4632 }
4633 return result;
4634}
4635
d0608e59 4636/* Similar to loc_equivalence_change_p, but for use as
4c2b2d79
VM
4637 simplify_replace_fn_rtx callback. DATA is insn for which the
4638 elimination is done. If it null we don't do the elimination. */
d0608e59 4639static rtx
4c2b2d79 4640loc_equivalence_callback (rtx loc, const_rtx, void *data)
d0608e59
JJ
4641{
4642 if (!REG_P (loc))
4643 return NULL_RTX;
4644
4c2b2d79 4645 rtx subst = (data == NULL
cfa434f6 4646 ? get_equiv (loc) : get_equiv_with_elimination (loc, (rtx_insn *) data));
d0608e59
JJ
4647 if (subst != loc)
4648 return subst;
4649
4650 return NULL_RTX;
4651}
4652
55a2c322
VM
4653/* Maximum number of generated reload insns per an insn. It is for
4654 preventing this pass cycling in a bug case. */
4655#define MAX_RELOAD_INSNS_NUMBER LRA_MAX_INSN_RELOADS
4656
4657/* The current iteration number of this LRA pass. */
4658int lra_constraint_iter;
4659
55a2c322
VM
4660/* True if we substituted equiv which needs checking register
4661 allocation correctness because the equivalent value contains
4662 allocatable hard registers or when we restore multi-register
4663 pseudo. */
4664bool lra_risky_transformations_p;
4665
4666/* Return true if REGNO is referenced in more than one block. */
4667static bool
4668multi_block_pseudo_p (int regno)
4669{
4670 basic_block bb = NULL;
4671 unsigned int uid;
4672 bitmap_iterator bi;
f4eafc30 4673
55a2c322
VM
4674 if (regno < FIRST_PSEUDO_REGISTER)
4675 return false;
f4eafc30 4676
55a2c322
VM
4677 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
4678 if (bb == NULL)
4679 bb = BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn);
4680 else if (BLOCK_FOR_INSN (lra_insn_recog_data[uid]->insn) != bb)
4681 return true;
4682 return false;
4683}
4684
1966c91b
VM
4685/* Return true if LIST contains a deleted insn. */
4686static bool
0cc97fc5 4687contains_deleted_insn_p (rtx_insn_list *list)
1966c91b 4688{
0cc97fc5
DM
4689 for (; list != NULL_RTX; list = list->next ())
4690 if (NOTE_P (list->insn ())
4691 && NOTE_KIND (list->insn ()) == NOTE_INSN_DELETED)
1966c91b
VM
4692 return true;
4693 return false;
4694}
4695
55a2c322
VM
4696/* Return true if X contains a pseudo dying in INSN. */
4697static bool
605780f6 4698dead_pseudo_p (rtx x, rtx_insn *insn)
55a2c322
VM
4699{
4700 int i, j;
4701 const char *fmt;
4702 enum rtx_code code;
4703
4704 if (REG_P (x))
4705 return (insn != NULL_RTX
4706 && find_regno_note (insn, REG_DEAD, REGNO (x)) != NULL_RTX);
4707 code = GET_CODE (x);
4708 fmt = GET_RTX_FORMAT (code);
4709 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4710 {
4711 if (fmt[i] == 'e')
4712 {
4713 if (dead_pseudo_p (XEXP (x, i), insn))
4714 return true;
4715 }
4716 else if (fmt[i] == 'E')
4717 {
4718 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4719 if (dead_pseudo_p (XVECEXP (x, i, j), insn))
4720 return true;
4721 }
4722 }
4723 return false;
4724}
4725
4726/* Return true if INSN contains a dying pseudo in INSN right hand
4727 side. */
4728static bool
e8a54173 4729insn_rhs_dead_pseudo_p (rtx_insn *insn)
55a2c322
VM
4730{
4731 rtx set = single_set (insn);
4732
4733 gcc_assert (set != NULL);
4734 return dead_pseudo_p (SET_SRC (set), insn);
4735}
4736
4737/* Return true if any init insn of REGNO contains a dying pseudo in
4738 insn right hand side. */
4739static bool
4740init_insn_rhs_dead_pseudo_p (int regno)
4741{
0cc97fc5 4742 rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
55a2c322
VM
4743
4744 if (insns == NULL)
4745 return false;
0cc97fc5
DM
4746 for (; insns != NULL_RTX; insns = insns->next ())
4747 if (insn_rhs_dead_pseudo_p (insns->insn ()))
55a2c322
VM
4748 return true;
4749 return false;
4750}
4751
01e54ef8
VM
4752/* Return TRUE if REGNO has a reverse equivalence. The equivalence is
4753 reverse only if we have one init insn with given REGNO as a
4754 source. */
4755static bool
4756reverse_equiv_p (int regno)
4757{
0cc97fc5
DM
4758 rtx_insn_list *insns = ira_reg_equiv[regno].init_insns;
4759 rtx set;
01e54ef8 4760
0cc97fc5 4761 if (insns == NULL)
01e54ef8 4762 return false;
0cc97fc5
DM
4763 if (! INSN_P (insns->insn ())
4764 || insns->next () != NULL)
01e54ef8 4765 return false;
0cc97fc5 4766 if ((set = single_set (insns->insn ())) == NULL_RTX)
01e54ef8
VM
4767 return false;
4768 return REG_P (SET_SRC (set)) && (int) REGNO (SET_SRC (set)) == regno;
4769}
4770
4771/* Return TRUE if REGNO was reloaded in an equivalence init insn. We
4772 call this function only for non-reverse equivalence. */
4773static bool
4774contains_reloaded_insn_p (int regno)
4775{
4776 rtx set;
0cc97fc5 4777 rtx_insn_list *list = ira_reg_equiv[regno].init_insns;
01e54ef8 4778
0cc97fc5
DM
4779 for (; list != NULL; list = list->next ())
4780 if ((set = single_set (list->insn ())) == NULL_RTX
01e54ef8
VM
4781 || ! REG_P (SET_DEST (set))
4782 || (int) REGNO (SET_DEST (set)) != regno)
4783 return true;
4784 return false;
4785}
4786
55a2c322
VM
4787/* Entry function of LRA constraint pass. Return true if the
4788 constraint pass did change the code. */
4789bool
4790lra_constraints (bool first_p)
4791{
4792 bool changed_p;
4793 int i, hard_regno, new_insns_num;
6cd1dd26
VM
4794 unsigned int min_len, new_min_len, uid;
4795 rtx set, x, reg, dest_reg;
55a2c322 4796 basic_block last_bb;
6cd1dd26 4797 bitmap_iterator bi;
55a2c322
VM
4798
4799 lra_constraint_iter++;
4800 if (lra_dump_file != NULL)
4801 fprintf (lra_dump_file, "\n********** Local #%d: **********\n\n",
4802 lra_constraint_iter);
55a2c322 4803 changed_p = false;
bcb21886
KY
4804 if (pic_offset_table_rtx
4805 && REGNO (pic_offset_table_rtx) >= FIRST_PSEUDO_REGISTER)
4806 lra_risky_transformations_p = true;
4807 else
15961e4a
VM
4808 /* On the first iteration we should check IRA assignment
4809 correctness. In rare cases, the assignments can be wrong as
7e4d17a8
VM
4810 early clobbers operands are ignored in IRA or usages of
4811 paradoxical sub-registers are not taken into account by
4812 IRA. */
15961e4a 4813 lra_risky_transformations_p = first_p;
55a2c322
VM
4814 new_insn_uid_start = get_max_uid ();
4815 new_regno_start = first_p ? lra_constraint_new_regno_start : max_reg_num ();
8d49e7ef
VM
4816 /* Mark used hard regs for target stack size calulations. */
4817 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4818 if (lra_reg_info[i].nrefs != 0
4819 && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
4820 {
4821 int j, nregs;
4822
ad474626 4823 nregs = hard_regno_nregs (hard_regno, lra_reg_info[i].biggest_mode);
8d49e7ef
VM
4824 for (j = 0; j < nregs; j++)
4825 df_set_regs_ever_live (hard_regno + j, true);
4826 }
4827 /* Do elimination before the equivalence processing as we can spill
4828 some pseudos during elimination. */
4829 lra_eliminate (false, first_p);
d648b5ff 4830 auto_bitmap equiv_insn_bitmap (&reg_obstack);
55a2c322
VM
4831 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4832 if (lra_reg_info[i].nrefs != 0)
4833 {
4834 ira_reg_equiv[i].profitable_p = true;
6cd1dd26 4835 reg = regno_reg_rtx[i];
8d49e7ef 4836 if (lra_get_regno_hard_regno (i) < 0 && (x = get_equiv (reg)) != reg)
55a2c322
VM
4837 {
4838 bool pseudo_p = contains_reg_p (x, false, false);
55a2c322 4839
67914693 4840 /* After RTL transformation, we cannot guarantee that
1966c91b
VM
4841 pseudo in the substitution was not reloaded which might
4842 make equivalence invalid. For example, in reverse
4843 equiv of p0
4844
4845 p0 <- ...
4846 ...
4847 equiv_mem <- p0
4848
4849 the memory address register was reloaded before the 2nd
4850 insn. */
4851 if ((! first_p && pseudo_p)
4852 /* We don't use DF for compilation speed sake. So it
4853 is problematic to update live info when we use an
4854 equivalence containing pseudos in more than one
4855 BB. */
4856 || (pseudo_p && multi_block_pseudo_p (i))
4857 /* If an init insn was deleted for some reason, cancel
4858 the equiv. We could update the equiv insns after
4859 transformations including an equiv insn deletion
4860 but it is not worthy as such cases are extremely
4861 rare. */
4862 || contains_deleted_insn_p (ira_reg_equiv[i].init_insns)
55a2c322
VM
4863 /* If it is not a reverse equivalence, we check that a
4864 pseudo in rhs of the init insn is not dying in the
4865 insn. Otherwise, the live info at the beginning of
4866 the corresponding BB might be wrong after we
4867 removed the insn. When the equiv can be a
4868 constant, the right hand side of the init insn can
4869 be a pseudo. */
01e54ef8
VM
4870 || (! reverse_equiv_p (i)
4871 && (init_insn_rhs_dead_pseudo_p (i)
4872 /* If we reloaded the pseudo in an equivalence
67914693 4873 init insn, we cannot remove the equiv init
01e54ef8
VM
4874 insns and the init insns might write into
4875 const memory in this case. */
4876 || contains_reloaded_insn_p (i)))
b28ece32
VM
4877 /* Prevent access beyond equivalent memory for
4878 paradoxical subregs. */
4879 || (MEM_P (x)
cf098191
RS
4880 && maybe_gt (GET_MODE_SIZE (lra_reg_info[i].biggest_mode),
4881 GET_MODE_SIZE (GET_MODE (x))))
bcb21886
KY
4882 || (pic_offset_table_rtx
4883 && ((CONST_POOL_OK_P (PSEUDO_REGNO_MODE (i), x)
4884 && (targetm.preferred_reload_class
4885 (x, lra_get_allocno_class (i)) == NO_REGS))
b81a2f0d 4886 || contains_symbol_ref_p (x))))
55a2c322 4887 ira_reg_equiv[i].defined_p = false;
55a2c322
VM
4888 if (contains_reg_p (x, false, true))
4889 ira_reg_equiv[i].profitable_p = false;
8d49e7ef 4890 if (get_equiv (reg) != reg)
d648b5ff 4891 bitmap_ior_into (equiv_insn_bitmap, &lra_reg_info[i].insn_bitmap);
55a2c322
VM
4892 }
4893 }
4c2b2d79
VM
4894 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
4895 update_equiv (i);
6cd1dd26
VM
4896 /* We should add all insns containing pseudos which should be
4897 substituted by their equivalences. */
d648b5ff 4898 EXECUTE_IF_SET_IN_BITMAP (equiv_insn_bitmap, 0, uid, bi)
6cd1dd26 4899 lra_push_insn_by_uid (uid);
55a2c322
VM
4900 min_len = lra_insn_stack_length ();
4901 new_insns_num = 0;
4902 last_bb = NULL;
4903 changed_p = false;
4904 while ((new_min_len = lra_insn_stack_length ()) != 0)
4905 {
4906 curr_insn = lra_pop_insn ();
4907 --new_min_len;
f4eafc30 4908 curr_bb = BLOCK_FOR_INSN (curr_insn);
55a2c322
VM
4909 if (curr_bb != last_bb)
4910 {
4911 last_bb = curr_bb;
4912 bb_reload_num = lra_curr_reload_num;
4913 }
4914 if (min_len > new_min_len)
4915 {
4916 min_len = new_min_len;
4917 new_insns_num = 0;
4918 }
4919 if (new_insns_num > MAX_RELOAD_INSNS_NUMBER)
4920 internal_error
a9c697b8 4921 ("maximum number of generated reload insns per insn achieved (%d)",
55a2c322
VM
4922 MAX_RELOAD_INSNS_NUMBER);
4923 new_insns_num++;
4924 if (DEBUG_INSN_P (curr_insn))
4925 {
4926 /* We need to check equivalence in debug insn and change
4927 pseudo to the equivalent value if necessary. */
4928 curr_id = lra_get_insn_recog_data (curr_insn);
d648b5ff 4929 if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn)))
4d64ce5c 4930 {
d0608e59
JJ
4931 rtx old = *curr_id->operand_loc[0];
4932 *curr_id->operand_loc[0]
4933 = simplify_replace_fn_rtx (old, NULL_RTX,
4c2b2d79 4934 loc_equivalence_callback, curr_insn);
d0608e59
JJ
4935 if (old != *curr_id->operand_loc[0])
4936 {
4937 lra_update_insn_regno_info (curr_insn);
4938 changed_p = true;
4939 }
4d64ce5c 4940 }
55a2c322
VM
4941 }
4942 else if (INSN_P (curr_insn))
4943 {
4944 if ((set = single_set (curr_insn)) != NULL_RTX)
4945 {
4946 dest_reg = SET_DEST (set);
4947 /* The equivalence pseudo could be set up as SUBREG in a
4948 case when it is a call restore insn in a mode
4949 different from the pseudo mode. */
4950 if (GET_CODE (dest_reg) == SUBREG)
4951 dest_reg = SUBREG_REG (dest_reg);
4952 if ((REG_P (dest_reg)
8d49e7ef 4953 && (x = get_equiv (dest_reg)) != dest_reg
55a2c322 4954 /* Remove insns which set up a pseudo whose value
67914693 4955 cannot be changed. Such insns might be not in
55a2c322
VM
4956 init_insns because we don't update equiv data
4957 during insn transformations.
5a107a0f 4958
55a2c322
VM
4959 As an example, let suppose that a pseudo got
4960 hard register and on the 1st pass was not
4961 changed to equivalent constant. We generate an
4962 additional insn setting up the pseudo because of
4963 secondary memory movement. Then the pseudo is
4964 spilled and we use the equiv constant. In this
4965 case we should remove the additional insn and
f6937e32 4966 this insn is not init_insns list. */
55a2c322 4967 && (! MEM_P (x) || MEM_READONLY_P (x)
f6937e32
VM
4968 /* Check that this is actually an insn setting
4969 up the equivalence. */
55a2c322
VM
4970 || in_list_p (curr_insn,
4971 ira_reg_equiv
4972 [REGNO (dest_reg)].init_insns)))
8d49e7ef 4973 || (((x = get_equiv (SET_SRC (set))) != SET_SRC (set))
55a2c322
VM
4974 && in_list_p (curr_insn,
4975 ira_reg_equiv
4976 [REGNO (SET_SRC (set))].init_insns)))
4977 {
4978 /* This is equiv init insn of pseudo which did not get a
4979 hard register -- remove the insn. */
4980 if (lra_dump_file != NULL)
4981 {
4982 fprintf (lra_dump_file,
4983 " Removing equiv init insn %i (freq=%d)\n",
4984 INSN_UID (curr_insn),
fef37404 4985 REG_FREQ_FROM_BB (BLOCK_FOR_INSN (curr_insn)));
cfbeaedf 4986 dump_insn_slim (lra_dump_file, curr_insn);
55a2c322
VM
4987 }
4988 if (contains_reg_p (x, true, false))
4989 lra_risky_transformations_p = true;
4990 lra_set_insn_deleted (curr_insn);
4991 continue;
4992 }
4993 }
4994 curr_id = lra_get_insn_recog_data (curr_insn);
4995 curr_static_id = curr_id->insn_static_data;
4996 init_curr_insn_input_reloads ();
4997 init_curr_operand_mode ();
d9cf932c 4998 if (curr_insn_transform (false))
55a2c322 4999 changed_p = true;
28430b2e
VM
5000 /* Check non-transformed insns too for equiv change as USE
5001 or CLOBBER don't need reloads but can contain pseudos
5002 being changed on their equivalences. */
d648b5ff 5003 else if (bitmap_bit_p (equiv_insn_bitmap, INSN_UID (curr_insn))
28430b2e
VM
5004 && loc_equivalence_change_p (&PATTERN (curr_insn)))
5005 {
5006 lra_update_insn_regno_info (curr_insn);
5007 changed_p = true;
5008 }
55a2c322
VM
5009 }
5010 }
d648b5ff 5011
55a2c322
VM
5012 /* If we used a new hard regno, changed_p should be true because the
5013 hard reg is assigned to a new pseudo. */
b2b29377 5014 if (flag_checking && !changed_p)
55a2c322
VM
5015 {
5016 for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
5017 if (lra_reg_info[i].nrefs != 0
5018 && (hard_regno = lra_get_regno_hard_regno (i)) >= 0)
5019 {
ad474626
RS
5020 int j, nregs = hard_regno_nregs (hard_regno,
5021 PSEUDO_REGNO_MODE (i));
f4eafc30 5022
55a2c322
VM
5023 for (j = 0; j < nregs; j++)
5024 lra_assert (df_regs_ever_live_p (hard_regno + j));
5025 }
5026 }
55a2c322
VM
5027 return changed_p;
5028}
5029
8a8330b7
VM
5030static void initiate_invariants (void);
5031static void finish_invariants (void);
5032
55a2c322
VM
5033/* Initiate the LRA constraint pass. It is done once per
5034 function. */
5035void
5036lra_constraints_init (void)
5037{
8a8330b7 5038 initiate_invariants ();
55a2c322
VM
5039}
5040
5041/* Finalize the LRA constraint pass. It is done once per
5042 function. */
5043void
5044lra_constraints_finish (void)
5045{
8a8330b7
VM
5046 finish_invariants ();
5047}
5048
5049\f
5050
5051/* Structure describes invariants for ineheritance. */
eb0f8780 5052struct lra_invariant
8a8330b7
VM
5053{
5054 /* The order number of the invariant. */
5055 int num;
5056 /* The invariant RTX. */
5057 rtx invariant_rtx;
5058 /* The origin insn of the invariant. */
5059 rtx_insn *insn;
5060};
5061
eb0f8780 5062typedef lra_invariant invariant_t;
8a8330b7
VM
5063typedef invariant_t *invariant_ptr_t;
5064typedef const invariant_t *const_invariant_ptr_t;
5065
5066/* Pointer to the inheritance invariants. */
5067static vec<invariant_ptr_t> invariants;
5068
5069/* Allocation pool for the invariants. */
eb0f8780 5070static object_allocator<lra_invariant> *invariants_pool;
8a8330b7
VM
5071
5072/* Hash table for the invariants. */
5073static htab_t invariant_table;
5074
5075/* Hash function for INVARIANT. */
5076static hashval_t
5077invariant_hash (const void *invariant)
5078{
5079 rtx inv = ((const_invariant_ptr_t) invariant)->invariant_rtx;
5080 return lra_rtx_hash (inv);
5081}
5082
5083/* Equal function for invariants INVARIANT1 and INVARIANT2. */
5084static int
5085invariant_eq_p (const void *invariant1, const void *invariant2)
5086{
5087 rtx inv1 = ((const_invariant_ptr_t) invariant1)->invariant_rtx;
5088 rtx inv2 = ((const_invariant_ptr_t) invariant2)->invariant_rtx;
5089
5090 return rtx_equal_p (inv1, inv2);
5091}
5092
5093/* Insert INVARIANT_RTX into the table if it is not there yet. Return
5094 invariant which is in the table. */
5095static invariant_ptr_t
5096insert_invariant (rtx invariant_rtx)
5097{
5098 void **entry_ptr;
5099 invariant_t invariant;
5100 invariant_ptr_t invariant_ptr;
5101
5102 invariant.invariant_rtx = invariant_rtx;
5103 entry_ptr = htab_find_slot (invariant_table, &invariant, INSERT);
5104 if (*entry_ptr == NULL)
5105 {
5106 invariant_ptr = invariants_pool->allocate ();
5107 invariant_ptr->invariant_rtx = invariant_rtx;
5108 invariant_ptr->insn = NULL;
5109 invariants.safe_push (invariant_ptr);
5110 *entry_ptr = (void *) invariant_ptr;
5111 }
5112 return (invariant_ptr_t) *entry_ptr;
5113}
5114
5115/* Initiate the invariant table. */
5116static void
5117initiate_invariants (void)
5118{
5119 invariants.create (100);
eb0f8780
ML
5120 invariants_pool
5121 = new object_allocator<lra_invariant> ("Inheritance invariants");
8a8330b7
VM
5122 invariant_table = htab_create (100, invariant_hash, invariant_eq_p, NULL);
5123}
5124
5125/* Finish the invariant table. */
5126static void
5127finish_invariants (void)
5128{
5129 htab_delete (invariant_table);
5130 delete invariants_pool;
5131 invariants.release ();
5132}
5133
5134/* Make the invariant table empty. */
5135static void
5136clear_invariants (void)
5137{
5138 htab_empty (invariant_table);
5139 invariants_pool->release ();
5140 invariants.truncate (0);
55a2c322
VM
5141}
5142
5143\f
5144
5145/* This page contains code to do inheritance/split
5146 transformations. */
5147
5148/* Number of reloads passed so far in current EBB. */
5149static int reloads_num;
5150
5151/* Number of calls passed so far in current EBB. */
5152static int calls_num;
5153
5154/* Current reload pseudo check for validity of elements in
5155 USAGE_INSNS. */
5156static int curr_usage_insns_check;
5157
5158/* Info about last usage of registers in EBB to do inheritance/split
5159 transformation. Inheritance transformation is done from a spilled
5160 pseudo and split transformations from a hard register or a pseudo
5161 assigned to a hard register. */
5162struct usage_insns
5163{
5164 /* If the value is equal to CURR_USAGE_INSNS_CHECK, then the member
5165 value INSNS is valid. The insns is chain of optional debug insns
1ccd4874
VM
5166 and a finishing non-debug insn using the corresponding reg. The
5167 value is also used to mark the registers which are set up in the
5168 current insn. The negated insn uid is used for this. */
55a2c322
VM
5169 int check;
5170 /* Value of global reloads_num at the last insn in INSNS. */
5171 int reloads_num;
5172 /* Value of global reloads_nums at the last insn in INSNS. */
5173 int calls_num;
5174 /* It can be true only for splitting. And it means that the restore
5175 insn should be put after insn given by the following member. */
5176 bool after_p;
5177 /* Next insns in the current EBB which use the original reg and the
5178 original reg value is not changed between the current insn and
5179 the next insns. In order words, e.g. for inheritance, if we need
5180 to use the original reg value again in the next insns we can try
5181 to use the value in a hard register from a reload insn of the
5182 current insn. */
5183 rtx insns;
5184};
5185
5186/* Map: regno -> corresponding pseudo usage insns. */
5187static struct usage_insns *usage_insns;
5188
5189static void
1476d1bd 5190setup_next_usage_insn (int regno, rtx insn, int reloads_num, bool after_p)
55a2c322
VM
5191{
5192 usage_insns[regno].check = curr_usage_insns_check;
5193 usage_insns[regno].insns = insn;
5194 usage_insns[regno].reloads_num = reloads_num;
5195 usage_insns[regno].calls_num = calls_num;
5196 usage_insns[regno].after_p = after_p;
5197}
5198
5199/* The function is used to form list REGNO usages which consists of
5200 optional debug insns finished by a non-debug insn using REGNO.
5201 RELOADS_NUM is current number of reload insns processed so far. */
5202static void
767dc529 5203add_next_usage_insn (int regno, rtx_insn *insn, int reloads_num)
55a2c322
VM
5204{
5205 rtx next_usage_insns;
f4eafc30 5206
55a2c322
VM
5207 if (usage_insns[regno].check == curr_usage_insns_check
5208 && (next_usage_insns = usage_insns[regno].insns) != NULL_RTX
5209 && DEBUG_INSN_P (insn))
5210 {
5211 /* Check that we did not add the debug insn yet. */
5212 if (next_usage_insns != insn
5213 && (GET_CODE (next_usage_insns) != INSN_LIST
5214 || XEXP (next_usage_insns, 0) != insn))
5215 usage_insns[regno].insns = gen_rtx_INSN_LIST (VOIDmode, insn,
5216 next_usage_insns);
5217 }
5218 else if (NONDEBUG_INSN_P (insn))
5219 setup_next_usage_insn (regno, insn, reloads_num, false);
5220 else
5221 usage_insns[regno].check = 0;
5222}
f4eafc30 5223
bc3591eb 5224/* Return first non-debug insn in list USAGE_INSNS. */
e8a54173 5225static rtx_insn *
bc3591eb
VM
5226skip_usage_debug_insns (rtx usage_insns)
5227{
5228 rtx insn;
5229
5230 /* Skip debug insns. */
5231 for (insn = usage_insns;
5232 insn != NULL_RTX && GET_CODE (insn) == INSN_LIST;
5233 insn = XEXP (insn, 1))
5234 ;
e8a54173 5235 return safe_as_a <rtx_insn *> (insn);
bc3591eb
VM
5236}
5237
5238/* Return true if we need secondary memory moves for insn in
5239 USAGE_INSNS after inserting inherited pseudo of class INHER_CL
5240 into the insn. */
5241static bool
fbebbadd
JR
5242check_secondary_memory_needed_p (enum reg_class inher_cl ATTRIBUTE_UNUSED,
5243 rtx usage_insns ATTRIBUTE_UNUSED)
bc3591eb 5244{
e8a54173
DM
5245 rtx_insn *insn;
5246 rtx set, dest;
bc3591eb
VM
5247 enum reg_class cl;
5248
5249 if (inher_cl == ALL_REGS
5250 || (insn = skip_usage_debug_insns (usage_insns)) == NULL_RTX)
5251 return false;
5252 lra_assert (INSN_P (insn));
5253 if ((set = single_set (insn)) == NULL_RTX || ! REG_P (SET_DEST (set)))
5254 return false;
5255 dest = SET_DEST (set);
5256 if (! REG_P (dest))
5257 return false;
5258 lra_assert (inher_cl != NO_REGS);
5259 cl = get_reg_class (REGNO (dest));
5260 return (cl != NO_REGS && cl != ALL_REGS
f15643d4 5261 && targetm.secondary_memory_needed (GET_MODE (dest), inher_cl, cl));
bc3591eb
VM
5262}
5263
55a2c322
VM
5264/* Registers involved in inheritance/split in the current EBB
5265 (inheritance/split pseudos and original registers). */
5266static bitmap_head check_only_regs;
5267
67914693 5268/* Reload pseudos cannot be involded in invariant inheritance in the
8a8330b7
VM
5269 current EBB. */
5270static bitmap_head invalid_invariant_regs;
5271
55a2c322
VM
5272/* Do inheritance transformations for insn INSN, which defines (if
5273 DEF_P) or uses ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which
5274 instruction in the EBB next uses ORIGINAL_REGNO; it has the same
5275 form as the "insns" field of usage_insns. Return true if we
5276 succeed in such transformation.
5277
5278 The transformations look like:
5279
5280 p <- ... i <- ...
5281 ... p <- i (new insn)
5282 ... =>
5283 <- ... p ... <- ... i ...
5284 or
5285 ... i <- p (new insn)
5286 <- ... p ... <- ... i ...
5287 ... =>
5288 <- ... p ... <- ... i ...
5289 where p is a spilled original pseudo and i is a new inheritance pseudo.
f4eafc30
L
5290
5291
55a2c322
VM
5292 The inheritance pseudo has the smallest class of two classes CL and
5293 class of ORIGINAL REGNO. */
5294static bool
5295inherit_reload_reg (bool def_p, int original_regno,
cfa434f6 5296 enum reg_class cl, rtx_insn *insn, rtx next_usage_insns)
55a2c322 5297{
2ae577fd
VM
5298 if (optimize_function_for_size_p (cfun))
5299 return false;
5300
55a2c322
VM
5301 enum reg_class rclass = lra_get_allocno_class (original_regno);
5302 rtx original_reg = regno_reg_rtx[original_regno];
cfa434f6
DM
5303 rtx new_reg, usage_insn;
5304 rtx_insn *new_insns;
55a2c322
VM
5305
5306 lra_assert (! usage_insns[original_regno].after_p);
5307 if (lra_dump_file != NULL)
5308 fprintf (lra_dump_file,
bc3591eb 5309 " <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<\n");
55a2c322
VM
5310 if (! ira_reg_classes_intersect_p[cl][rclass])
5311 {
5312 if (lra_dump_file != NULL)
5313 {
5314 fprintf (lra_dump_file,
bc3591eb 5315 " Rejecting inheritance for %d "
55a2c322
VM
5316 "because of disjoint classes %s and %s\n",
5317 original_regno, reg_class_names[cl],
5318 reg_class_names[rclass]);
5319 fprintf (lra_dump_file,
bc3591eb 5320 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
55a2c322
VM
5321 }
5322 return false;
5323 }
5324 if ((ira_class_subset_p[cl][rclass] && cl != rclass)
5325 /* We don't use a subset of two classes because it can be
5326 NO_REGS. This transformation is still profitable in most
5327 cases even if the classes are not intersected as register
5328 move is probably cheaper than a memory load. */
5329 || ira_class_hard_regs_num[cl] < ira_class_hard_regs_num[rclass])
5330 {
5331 if (lra_dump_file != NULL)
5332 fprintf (lra_dump_file, " Use smallest class of %s and %s\n",
5333 reg_class_names[cl], reg_class_names[rclass]);
f4eafc30 5334
55a2c322
VM
5335 rclass = cl;
5336 }
66aa7879 5337 if (check_secondary_memory_needed_p (rclass, next_usage_insns))
bc3591eb
VM
5338 {
5339 /* Reject inheritance resulting in secondary memory moves.
5340 Otherwise, there is a danger in LRA cycling. Also such
5341 transformation will be unprofitable. */
5342 if (lra_dump_file != NULL)
5343 {
e8a54173 5344 rtx_insn *insn = skip_usage_debug_insns (next_usage_insns);
bc3591eb
VM
5345 rtx set = single_set (insn);
5346
5347 lra_assert (set != NULL_RTX);
5348
5349 rtx dest = SET_DEST (set);
5350
5351 lra_assert (REG_P (dest));
5352 fprintf (lra_dump_file,
5353 " Rejecting inheritance for insn %d(%s)<-%d(%s) "
5354 "as secondary mem is needed\n",
5355 REGNO (dest), reg_class_names[get_reg_class (REGNO (dest))],
66aa7879 5356 original_regno, reg_class_names[rclass]);
bc3591eb
VM
5357 fprintf (lra_dump_file,
5358 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5359 }
5360 return false;
5361 }
55a2c322
VM
5362 new_reg = lra_create_new_reg (GET_MODE (original_reg), original_reg,
5363 rclass, "inheritance");
5364 start_sequence ();
5365 if (def_p)
a810ee82 5366 lra_emit_move (original_reg, new_reg);
55a2c322 5367 else
a810ee82 5368 lra_emit_move (new_reg, original_reg);
55a2c322
VM
5369 new_insns = get_insns ();
5370 end_sequence ();
5371 if (NEXT_INSN (new_insns) != NULL_RTX)
5372 {
5373 if (lra_dump_file != NULL)
5374 {
5375 fprintf (lra_dump_file,
bc3591eb 5376 " Rejecting inheritance %d->%d "
55a2c322
VM
5377 "as it results in 2 or more insns:\n",
5378 original_regno, REGNO (new_reg));
dc01c3d1 5379 dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
55a2c322
VM
5380 fprintf (lra_dump_file,
5381 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5382 }
5383 return false;
5384 }
ef87312e 5385 lra_substitute_pseudo_within_insn (insn, original_regno, new_reg, false);
55a2c322
VM
5386 lra_update_insn_regno_info (insn);
5387 if (! def_p)
5388 /* We now have a new usage insn for original regno. */
5389 setup_next_usage_insn (original_regno, new_insns, reloads_num, false);
5390 if (lra_dump_file != NULL)
bc3591eb 5391 fprintf (lra_dump_file, " Original reg change %d->%d (bb%d):\n",
55a2c322 5392 original_regno, REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
8a8330b7 5393 lra_reg_info[REGNO (new_reg)].restore_rtx = regno_reg_rtx[original_regno];
55a2c322
VM
5394 bitmap_set_bit (&check_only_regs, REGNO (new_reg));
5395 bitmap_set_bit (&check_only_regs, original_regno);
5396 bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
5397 if (def_p)
cfa434f6 5398 lra_process_new_insns (insn, NULL, new_insns,
55a2c322
VM
5399 "Add original<-inheritance");
5400 else
cfa434f6 5401 lra_process_new_insns (insn, new_insns, NULL,
55a2c322
VM
5402 "Add inheritance<-original");
5403 while (next_usage_insns != NULL_RTX)
5404 {
5405 if (GET_CODE (next_usage_insns) != INSN_LIST)
5406 {
5407 usage_insn = next_usage_insns;
5408 lra_assert (NONDEBUG_INSN_P (usage_insn));
5409 next_usage_insns = NULL;
5410 }
5411 else
5412 {
5413 usage_insn = XEXP (next_usage_insns, 0);
5414 lra_assert (DEBUG_INSN_P (usage_insn));
5415 next_usage_insns = XEXP (next_usage_insns, 1);
5416 }
33006d53
JJ
5417 lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
5418 DEBUG_INSN_P (usage_insn));
cfa434f6 5419 lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
55a2c322
VM
5420 if (lra_dump_file != NULL)
5421 {
96a95ac1 5422 basic_block bb = BLOCK_FOR_INSN (usage_insn);
55a2c322
VM
5423 fprintf (lra_dump_file,
5424 " Inheritance reuse change %d->%d (bb%d):\n",
5425 original_regno, REGNO (new_reg),
96a95ac1 5426 bb ? bb->index : -1);
1476d1bd 5427 dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
55a2c322
VM
5428 }
5429 }
5430 if (lra_dump_file != NULL)
5431 fprintf (lra_dump_file,
5432 " >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
5433 return true;
5434}
5435
5436/* Return true if we need a caller save/restore for pseudo REGNO which
5437 was assigned to a hard register. */
5438static inline bool
5439need_for_call_save_p (int regno)
5440{
5441 lra_assert (regno >= FIRST_PSEUDO_REGISTER && reg_renumber[regno] >= 0);
5442 return (usage_insns[regno].calls_num < calls_num
5443 && (overlaps_hard_reg_set_p
1e288103 5444 ((flag_ipa_ra &&
10e1bdb2
TV
5445 ! hard_reg_set_empty_p (lra_reg_info[regno].actual_call_used_reg_set))
5446 ? lra_reg_info[regno].actual_call_used_reg_set
5447 : call_used_reg_set,
8a26ad39 5448 PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
80ec73f4 5449 || (targetm.hard_regno_call_part_clobbered
473574ee
SE
5450 (lra_reg_info[regno].call_insn,
5451 reg_renumber[regno], PSEUDO_REGNO_MODE (regno)))));
55a2c322
VM
5452}
5453
1aa95df7 5454/* Global registers occurring in the current EBB. */
55a2c322
VM
5455static bitmap_head ebb_global_regs;
5456
5457/* Return true if we need a split for hard register REGNO or pseudo
5458 REGNO which was assigned to a hard register.
5459 POTENTIAL_RELOAD_HARD_REGS contains hard registers which might be
5460 used for reloads since the EBB end. It is an approximation of the
5461 used hard registers in the split range. The exact value would
5462 require expensive calculations. If we were aggressive with
5463 splitting because of the approximation, the split pseudo will save
5464 the same hard register assignment and will be removed in the undo
5465 pass. We still need the approximation because too aggressive
5466 splitting would result in too inaccurate cost calculation in the
5467 assignment pass because of too many generated moves which will be
5468 probably removed in the undo pass. */
5469static inline bool
5470need_for_split_p (HARD_REG_SET potential_reload_hard_regs, int regno)
5471{
5472 int hard_regno = regno < FIRST_PSEUDO_REGISTER ? regno : reg_renumber[regno];
5473
5474 lra_assert (hard_regno >= 0);
5475 return ((TEST_HARD_REG_BIT (potential_reload_hard_regs, hard_regno)
5476 /* Don't split eliminable hard registers, otherwise we can
5477 split hard registers like hard frame pointer, which
5478 lives on BB start/end according to DF-infrastructure,
5479 when there is a pseudo assigned to the register and
5480 living in the same BB. */
5481 && (regno >= FIRST_PSEUDO_REGISTER
5482 || ! TEST_HARD_REG_BIT (eliminable_regset, hard_regno))
5483 && ! TEST_HARD_REG_BIT (lra_no_alloc_regs, hard_regno)
e32e4c4a
VM
5484 /* Don't split call clobbered hard regs living through
5485 calls, otherwise we might have a check problem in the
5486 assign sub-pass as in the most cases (exception is a
5487 situation when lra_risky_transformations_p value is
5488 true) the assign pass assumes that all pseudos living
5489 through calls are assigned to call saved hard regs. */
5490 && (regno >= FIRST_PSEUDO_REGISTER
5491 || ! TEST_HARD_REG_BIT (call_used_reg_set, regno)
5492 || usage_insns[regno].calls_num == calls_num)
55a2c322
VM
5493 /* We need at least 2 reloads to make pseudo splitting
5494 profitable. We should provide hard regno splitting in
5495 any case to solve 1st insn scheduling problem when
5496 moving hard register definition up might result in
5497 impossibility to find hard register for reload pseudo of
5498 small register class. */
5499 && (usage_insns[regno].reloads_num
8e9d68a9 5500 + (regno < FIRST_PSEUDO_REGISTER ? 0 : 3) < reloads_num)
55a2c322
VM
5501 && (regno < FIRST_PSEUDO_REGISTER
5502 /* For short living pseudos, spilling + inheritance can
5503 be considered a substitution for splitting.
5504 Therefore we do not splitting for local pseudos. It
5505 decreases also aggressiveness of splitting. The
5506 minimal number of references is chosen taking into
5507 account that for 2 references splitting has no sense
5508 as we can just spill the pseudo. */
5509 || (regno >= FIRST_PSEUDO_REGISTER
5510 && lra_reg_info[regno].nrefs > 3
5511 && bitmap_bit_p (&ebb_global_regs, regno))))
5512 || (regno >= FIRST_PSEUDO_REGISTER && need_for_call_save_p (regno)));
5513}
5514
5515/* Return class for the split pseudo created from original pseudo with
5516 ALLOCNO_CLASS and MODE which got a hard register HARD_REGNO. We
5517 choose subclass of ALLOCNO_CLASS which contains HARD_REGNO and
5518 results in no secondary memory movements. */
5519static enum reg_class
5520choose_split_class (enum reg_class allocno_class,
5521 int hard_regno ATTRIBUTE_UNUSED,
ef4bddc2 5522 machine_mode mode ATTRIBUTE_UNUSED)
55a2c322 5523{
55a2c322
VM
5524 int i;
5525 enum reg_class cl, best_cl = NO_REGS;
ef4dbe49
JR
5526 enum reg_class hard_reg_class ATTRIBUTE_UNUSED
5527 = REGNO_REG_CLASS (hard_regno);
f4eafc30 5528
f15643d4 5529 if (! targetm.secondary_memory_needed (mode, allocno_class, allocno_class)
55a2c322
VM
5530 && TEST_HARD_REG_BIT (reg_class_contents[allocno_class], hard_regno))
5531 return allocno_class;
5532 for (i = 0;
5533 (cl = reg_class_subclasses[allocno_class][i]) != LIM_REG_CLASSES;
5534 i++)
f15643d4
RS
5535 if (! targetm.secondary_memory_needed (mode, cl, hard_reg_class)
5536 && ! targetm.secondary_memory_needed (mode, hard_reg_class, cl)
55a2c322
VM
5537 && TEST_HARD_REG_BIT (reg_class_contents[cl], hard_regno)
5538 && (best_cl == NO_REGS
5539 || ira_class_hard_regs_num[best_cl] < ira_class_hard_regs_num[cl]))
5540 best_cl = cl;
5541 return best_cl;
55a2c322
VM
5542}
5543
8ffa3150
RS
5544/* Copy any equivalence information from ORIGINAL_REGNO to NEW_REGNO.
5545 It only makes sense to call this function if NEW_REGNO is always
5546 equal to ORIGINAL_REGNO. */
5547
5548static void
5549lra_copy_reg_equiv (unsigned int new_regno, unsigned int original_regno)
5550{
5551 if (!ira_reg_equiv[original_regno].defined_p)
5552 return;
5553
5554 ira_expand_reg_equiv ();
5555 ira_reg_equiv[new_regno].defined_p = true;
5556 if (ira_reg_equiv[original_regno].memory)
5557 ira_reg_equiv[new_regno].memory
5558 = copy_rtx (ira_reg_equiv[original_regno].memory);
5559 if (ira_reg_equiv[original_regno].constant)
5560 ira_reg_equiv[new_regno].constant
5561 = copy_rtx (ira_reg_equiv[original_regno].constant);
5562 if (ira_reg_equiv[original_regno].invariant)
5563 ira_reg_equiv[new_regno].invariant
5564 = copy_rtx (ira_reg_equiv[original_regno].invariant);
5565}
5566
55a2c322
VM
5567/* Do split transformations for insn INSN, which defines or uses
5568 ORIGINAL_REGNO. NEXT_USAGE_INSNS specifies which instruction in
5569 the EBB next uses ORIGINAL_REGNO; it has the same form as the
6027ea4c 5570 "insns" field of usage_insns. If TO is not NULL, we don't use
037586dd
VM
5571 usage_insns, we put restore insns after TO insn. It is a case when
5572 we call it from lra_split_hard_reg_for, outside the inheritance
5573 pass.
55a2c322
VM
5574
5575 The transformations look like:
5576
5577 p <- ... p <- ...
5578 ... s <- p (new insn -- save)
5579 ... =>
5580 ... p <- s (new insn -- restore)
5581 <- ... p ... <- ... p ...
5582 or
5583 <- ... p ... <- ... p ...
5584 ... s <- p (new insn -- save)
5585 ... =>
5586 ... p <- s (new insn -- restore)
5587 <- ... p ... <- ... p ...
5588
5589 where p is an original pseudo got a hard register or a hard
5590 register and s is a new split pseudo. The save is put before INSN
5591 if BEFORE_P is true. Return true if we succeed in such
5592 transformation. */
5593static bool
cfa434f6 5594split_reg (bool before_p, int original_regno, rtx_insn *insn,
6027ea4c 5595 rtx next_usage_insns, rtx_insn *to)
55a2c322
VM
5596{
5597 enum reg_class rclass;
5598 rtx original_reg;
77bce07c 5599 int hard_regno, nregs;
cfa434f6
DM
5600 rtx new_reg, usage_insn;
5601 rtx_insn *restore, *save;
55a2c322
VM
5602 bool after_p;
5603 bool call_save_p;
3cbf012a 5604 machine_mode mode;
55a2c322
VM
5605
5606 if (original_regno < FIRST_PSEUDO_REGISTER)
5607 {
5608 rclass = ira_allocno_class_translate[REGNO_REG_CLASS (original_regno)];
5609 hard_regno = original_regno;
5610 call_save_p = false;
77bce07c 5611 nregs = 1;
3cbf012a
BS
5612 mode = lra_reg_info[hard_regno].biggest_mode;
5613 machine_mode reg_rtx_mode = GET_MODE (regno_reg_rtx[hard_regno]);
5c6a601c
BS
5614 /* A reg can have a biggest_mode of VOIDmode if it was only ever seen
5615 as part of a multi-word register. In that case, or if the biggest
5616 mode was larger than a register, just use the reg_rtx. Otherwise,
5617 limit the size to that of the biggest access in the function. */
5618 if (mode == VOIDmode
03a95621 5619 || paradoxical_subreg_p (mode, reg_rtx_mode))
3cbf012a
BS
5620 {
5621 original_reg = regno_reg_rtx[hard_regno];
5622 mode = reg_rtx_mode;
5623 }
5624 else
5625 original_reg = gen_rtx_REG (mode, hard_regno);
55a2c322
VM
5626 }
5627 else
5628 {
3cbf012a 5629 mode = PSEUDO_REGNO_MODE (original_regno);
55a2c322 5630 hard_regno = reg_renumber[original_regno];
ad474626 5631 nregs = hard_regno_nregs (hard_regno, mode);
55a2c322
VM
5632 rclass = lra_get_allocno_class (original_regno);
5633 original_reg = regno_reg_rtx[original_regno];
5634 call_save_p = need_for_call_save_p (original_regno);
5635 }
55a2c322
VM
5636 lra_assert (hard_regno >= 0);
5637 if (lra_dump_file != NULL)
5638 fprintf (lra_dump_file,
5639 " ((((((((((((((((((((((((((((((((((((((((((((((((\n");
3cbf012a 5640
55a2c322
VM
5641 if (call_save_p)
5642 {
cb1cca12 5643 mode = HARD_REGNO_CALLER_SAVE_MODE (hard_regno,
ad474626 5644 hard_regno_nregs (hard_regno, mode),
cb1cca12
VM
5645 mode);
5646 new_reg = lra_create_new_reg (mode, NULL_RTX, NO_REGS, "save");
55a2c322
VM
5647 }
5648 else
5649 {
3cbf012a 5650 rclass = choose_split_class (rclass, hard_regno, mode);
55a2c322
VM
5651 if (rclass == NO_REGS)
5652 {
5653 if (lra_dump_file != NULL)
5654 {
5655 fprintf (lra_dump_file,
5656 " Rejecting split of %d(%s): "
5657 "no good reg class for %d(%s)\n",
5658 original_regno,
5659 reg_class_names[lra_get_allocno_class (original_regno)],
5660 hard_regno,
5661 reg_class_names[REGNO_REG_CLASS (hard_regno)]);
5662 fprintf
5663 (lra_dump_file,
5664 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5665 }
5666 return false;
5667 }
1b51df94
MF
5668 /* Split_if_necessary can split hard registers used as part of a
5669 multi-register mode but splits each register individually. The
5670 mode used for each independent register may not be supported
5671 so reject the split. Splitting the wider mode should theoretically
5672 be possible but is not implemented. */
f939c3e6 5673 if (!targetm.hard_regno_mode_ok (hard_regno, mode))
1b51df94
MF
5674 {
5675 if (lra_dump_file != NULL)
5676 {
5677 fprintf (lra_dump_file,
5678 " Rejecting split of %d(%s): unsuitable mode %s\n",
5679 original_regno,
5680 reg_class_names[lra_get_allocno_class (original_regno)],
5681 GET_MODE_NAME (mode));
5682 fprintf
5683 (lra_dump_file,
5684 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5685 }
5686 return false;
5687 }
3cbf012a 5688 new_reg = lra_create_new_reg (mode, original_reg, rclass, "split");
55a2c322
VM
5689 reg_renumber[REGNO (new_reg)] = hard_regno;
5690 }
8ffa3150 5691 int new_regno = REGNO (new_reg);
55a2c322 5692 save = emit_spill_move (true, new_reg, original_reg);
c61fe0cc 5693 if (NEXT_INSN (save) != NULL_RTX && !call_save_p)
55a2c322 5694 {
55a2c322
VM
5695 if (lra_dump_file != NULL)
5696 {
5697 fprintf
5698 (lra_dump_file,
c61fe0cc 5699 " Rejecting split %d->%d resulting in > 2 save insns:\n",
8ffa3150 5700 original_regno, new_regno);
dc01c3d1 5701 dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
55a2c322
VM
5702 fprintf (lra_dump_file,
5703 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5704 }
5705 return false;
5706 }
5707 restore = emit_spill_move (false, new_reg, original_reg);
c61fe0cc 5708 if (NEXT_INSN (restore) != NULL_RTX && !call_save_p)
55a2c322 5709 {
55a2c322
VM
5710 if (lra_dump_file != NULL)
5711 {
5712 fprintf (lra_dump_file,
5713 " Rejecting split %d->%d "
c61fe0cc 5714 "resulting in > 2 restore insns:\n",
8ffa3150 5715 original_regno, new_regno);
dc01c3d1 5716 dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
55a2c322
VM
5717 fprintf (lra_dump_file,
5718 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5719 }
5720 return false;
5721 }
8ffa3150
RS
5722 /* Transfer equivalence information to the spill register, so that
5723 if we fail to allocate the spill register, we have the option of
5724 rematerializing the original value instead of spilling to the stack. */
5725 if (!HARD_REGISTER_NUM_P (original_regno)
5726 && mode == PSEUDO_REGNO_MODE (original_regno))
5727 lra_copy_reg_equiv (new_regno, original_regno);
8ffa3150 5728 lra_reg_info[new_regno].restore_rtx = regno_reg_rtx[original_regno];
8ffa3150 5729 bitmap_set_bit (&lra_split_regs, new_regno);
6027ea4c 5730 if (to != NULL)
55a2c322 5731 {
037586dd 5732 lra_assert (next_usage_insns == NULL);
6027ea4c
VM
5733 usage_insn = to;
5734 after_p = TRUE;
5735 }
5736 else
5737 {
037586dd
VM
5738 /* We need check_only_regs only inside the inheritance pass. */
5739 bitmap_set_bit (&check_only_regs, new_regno);
5740 bitmap_set_bit (&check_only_regs, original_regno);
6027ea4c
VM
5741 after_p = usage_insns[original_regno].after_p;
5742 for (;;)
b3231b65 5743 {
6027ea4c
VM
5744 if (GET_CODE (next_usage_insns) != INSN_LIST)
5745 {
5746 usage_insn = next_usage_insns;
5747 break;
5748 }
5749 usage_insn = XEXP (next_usage_insns, 0);
5750 lra_assert (DEBUG_INSN_P (usage_insn));
5751 next_usage_insns = XEXP (next_usage_insns, 1);
5752 lra_substitute_pseudo (&usage_insn, original_regno, new_reg, false,
5753 true);
5754 lra_update_insn_regno_info (as_a <rtx_insn *> (usage_insn));
5755 if (lra_dump_file != NULL)
5756 {
5757 fprintf (lra_dump_file, " Split reuse change %d->%d:\n",
5758 original_regno, new_regno);
5759 dump_insn_slim (lra_dump_file, as_a <rtx_insn *> (usage_insn));
5760 }
55a2c322
VM
5761 }
5762 }
5763 lra_assert (NOTE_P (usage_insn) || NONDEBUG_INSN_P (usage_insn));
5764 lra_assert (usage_insn != insn || (after_p && before_p));
cfa434f6
DM
5765 lra_process_new_insns (as_a <rtx_insn *> (usage_insn),
5766 after_p ? NULL : restore,
5767 after_p ? restore : NULL,
55a2c322
VM
5768 call_save_p
5769 ? "Add reg<-save" : "Add reg<-split");
cfa434f6
DM
5770 lra_process_new_insns (insn, before_p ? save : NULL,
5771 before_p ? NULL : save,
55a2c322
VM
5772 call_save_p
5773 ? "Add save<-reg" : "Add split<-reg");
77bce07c
VM
5774 if (nregs > 1)
5775 /* If we are trying to split multi-register. We should check
5776 conflicts on the next assignment sub-pass. IRA can allocate on
5777 sub-register levels, LRA do this on pseudos level right now and
5778 this discrepancy may create allocation conflicts after
5779 splitting. */
5780 lra_risky_transformations_p = true;
55a2c322
VM
5781 if (lra_dump_file != NULL)
5782 fprintf (lra_dump_file,
5783 " ))))))))))))))))))))))))))))))))))))))))))))))))\n");
5784 return true;
5785}
5786
6027ea4c
VM
5787/* Split a hard reg for reload pseudo REGNO having RCLASS and living
5788 in the range [FROM, TO]. Return true if did a split. Otherwise,
5789 return false. */
5790bool
5791spill_hard_reg_in_range (int regno, enum reg_class rclass, rtx_insn *from, rtx_insn *to)
5792{
5793 int i, hard_regno;
5794 int rclass_size;
5795 rtx_insn *insn;
7293e3f5
VM
5796 unsigned int uid;
5797 bitmap_iterator bi;
5798 HARD_REG_SET ignore;
6027ea4c
VM
5799
5800 lra_assert (from != NULL && to != NULL);
7293e3f5
VM
5801 CLEAR_HARD_REG_SET (ignore);
5802 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi)
5803 {
5804 lra_insn_recog_data_t id = lra_insn_recog_data[uid];
5805 struct lra_static_insn_data *static_id = id->insn_static_data;
5806 struct lra_insn_reg *reg;
5807
5808 for (reg = id->regs; reg != NULL; reg = reg->next)
65e87462 5809 if (reg->regno < FIRST_PSEUDO_REGISTER)
7293e3f5
VM
5810 SET_HARD_REG_BIT (ignore, reg->regno);
5811 for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
5812 SET_HARD_REG_BIT (ignore, reg->regno);
5813 }
6027ea4c
VM
5814 rclass_size = ira_class_hard_regs_num[rclass];
5815 for (i = 0; i < rclass_size; i++)
5816 {
5817 hard_regno = ira_class_hard_regs[rclass][i];
7293e3f5
VM
5818 if (! TEST_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hard_regno)
5819 || TEST_HARD_REG_BIT (ignore, hard_regno))
6027ea4c
VM
5820 continue;
5821 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
dc843a85 5822 {
3664a0f1 5823 struct lra_static_insn_data *static_id;
dc843a85
IL
5824 struct lra_insn_reg *reg;
5825
3664a0f1
IL
5826 if (!INSN_P (insn))
5827 continue;
5828 if (bitmap_bit_p (&lra_reg_info[hard_regno].insn_bitmap,
5829 INSN_UID (insn)))
dc843a85 5830 break;
3664a0f1 5831 static_id = lra_get_insn_recog_data (insn)->insn_static_data;
dc843a85
IL
5832 for (reg = static_id->hard_regs; reg != NULL; reg = reg->next)
5833 if (reg->regno == hard_regno)
5834 break;
5835 if (reg != NULL)
5836 break;
5837 }
6027ea4c
VM
5838 if (insn != NEXT_INSN (to))
5839 continue;
5840 if (split_reg (TRUE, hard_regno, from, NULL, to))
5841 return true;
5842 }
5843 return false;
5844}
5845
55a2c322
VM
5846/* Recognize that we need a split transformation for insn INSN, which
5847 defines or uses REGNO in its insn biggest MODE (we use it only if
5848 REGNO is a hard register). POTENTIAL_RELOAD_HARD_REGS contains
5849 hard registers which might be used for reloads since the EBB end.
5850 Put the save before INSN if BEFORE_P is true. MAX_UID is maximla
5851 uid before starting INSN processing. Return true if we succeed in
5852 such transformation. */
5853static bool
ef4bddc2 5854split_if_necessary (int regno, machine_mode mode,
55a2c322 5855 HARD_REG_SET potential_reload_hard_regs,
cfa434f6 5856 bool before_p, rtx_insn *insn, int max_uid)
55a2c322
VM
5857{
5858 bool res = false;
5859 int i, nregs = 1;
5860 rtx next_usage_insns;
5861
5862 if (regno < FIRST_PSEUDO_REGISTER)
ad474626 5863 nregs = hard_regno_nregs (regno, mode);
55a2c322
VM
5864 for (i = 0; i < nregs; i++)
5865 if (usage_insns[regno + i].check == curr_usage_insns_check
5866 && (next_usage_insns = usage_insns[regno + i].insns) != NULL_RTX
5867 /* To avoid processing the register twice or more. */
5868 && ((GET_CODE (next_usage_insns) != INSN_LIST
5869 && INSN_UID (next_usage_insns) < max_uid)
5870 || (GET_CODE (next_usage_insns) == INSN_LIST
5871 && (INSN_UID (XEXP (next_usage_insns, 0)) < max_uid)))
5872 && need_for_split_p (potential_reload_hard_regs, regno + i)
6027ea4c 5873 && split_reg (before_p, regno + i, insn, next_usage_insns, NULL))
55a2c322
VM
5874 res = true;
5875 return res;
5876}
5877
8a8330b7
VM
5878/* Return TRUE if rtx X is considered as an invariant for
5879 inheritance. */
5880static bool
5881invariant_p (const_rtx x)
5882{
5883 machine_mode mode;
5884 const char *fmt;
5885 enum rtx_code code;
5886 int i, j;
5887
850b8aa3
SB
5888 if (side_effects_p (x))
5889 return false;
5890
8a8330b7
VM
5891 code = GET_CODE (x);
5892 mode = GET_MODE (x);
5893 if (code == SUBREG)
5894 {
5895 x = SUBREG_REG (x);
5896 code = GET_CODE (x);
bd5a2c67 5897 mode = wider_subreg_mode (mode, GET_MODE (x));
8a8330b7
VM
5898 }
5899
5900 if (MEM_P (x))
5901 return false;
5902
5903 if (REG_P (x))
5904 {
5905 int i, nregs, regno = REGNO (x);
5906
5907 if (regno >= FIRST_PSEUDO_REGISTER || regno == STACK_POINTER_REGNUM
5908 || TEST_HARD_REG_BIT (eliminable_regset, regno)
5909 || GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
5910 return false;
ad474626 5911 nregs = hard_regno_nregs (regno, mode);
8a8330b7
VM
5912 for (i = 0; i < nregs; i++)
5913 if (! fixed_regs[regno + i]
5914 /* A hard register may be clobbered in the current insn
5915 but we can ignore this case because if the hard
5916 register is used it should be set somewhere after the
5917 clobber. */
5918 || bitmap_bit_p (&invalid_invariant_regs, regno + i))
5919 return false;
5920 }
5921 fmt = GET_RTX_FORMAT (code);
5922 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5923 {
5924 if (fmt[i] == 'e')
5925 {
5926 if (! invariant_p (XEXP (x, i)))
5927 return false;
5928 }
5929 else if (fmt[i] == 'E')
5930 {
5931 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5932 if (! invariant_p (XVECEXP (x, i, j)))
5933 return false;
5934 }
5935 }
5936 return true;
5937}
5938
5939/* We have 'dest_reg <- invariant'. Let us try to make an invariant
5940 inheritance transformation (using dest_reg instead invariant in a
5941 subsequent insn). */
5942static bool
5943process_invariant_for_inheritance (rtx dst_reg, rtx invariant_rtx)
5944{
5945 invariant_ptr_t invariant_ptr;
5946 rtx_insn *insn, *new_insns;
5947 rtx insn_set, insn_reg, new_reg;
5948 int insn_regno;
5949 bool succ_p = false;
5950 int dst_regno = REGNO (dst_reg);
b8506a8a 5951 machine_mode dst_mode = GET_MODE (dst_reg);
8a8330b7
VM
5952 enum reg_class cl = lra_get_allocno_class (dst_regno), insn_reg_cl;
5953
5954 invariant_ptr = insert_invariant (invariant_rtx);
5955 if ((insn = invariant_ptr->insn) != NULL_RTX)
5956 {
5957 /* We have a subsequent insn using the invariant. */
5958 insn_set = single_set (insn);
5959 lra_assert (insn_set != NULL);
5960 insn_reg = SET_DEST (insn_set);
5961 lra_assert (REG_P (insn_reg));
5962 insn_regno = REGNO (insn_reg);
5963 insn_reg_cl = lra_get_allocno_class (insn_regno);
5964
5965 if (dst_mode == GET_MODE (insn_reg)
5966 /* We should consider only result move reg insns which are
5967 cheap. */
5968 && targetm.register_move_cost (dst_mode, cl, insn_reg_cl) == 2
5969 && targetm.register_move_cost (dst_mode, cl, cl) == 2)
5970 {
5971 if (lra_dump_file != NULL)
5972 fprintf (lra_dump_file,
5973 " [[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[\n");
5974 new_reg = lra_create_new_reg (dst_mode, dst_reg,
5975 cl, "invariant inheritance");
5976 bitmap_set_bit (&lra_inheritance_pseudos, REGNO (new_reg));
5977 bitmap_set_bit (&check_only_regs, REGNO (new_reg));
b10d44ef 5978 lra_reg_info[REGNO (new_reg)].restore_rtx = PATTERN (insn);
8a8330b7
VM
5979 start_sequence ();
5980 lra_emit_move (new_reg, dst_reg);
5981 new_insns = get_insns ();
5982 end_sequence ();
5983 lra_process_new_insns (curr_insn, NULL, new_insns,
5984 "Add invariant inheritance<-original");
5985 start_sequence ();
5986 lra_emit_move (SET_DEST (insn_set), new_reg);
5987 new_insns = get_insns ();
5988 end_sequence ();
5989 lra_process_new_insns (insn, NULL, new_insns,
5990 "Changing reload<-inheritance");
5991 lra_set_insn_deleted (insn);
5992 succ_p = true;
5993 if (lra_dump_file != NULL)
5994 {
5995 fprintf (lra_dump_file,
5996 " Invariant inheritance reuse change %d (bb%d):\n",
5997 REGNO (new_reg), BLOCK_FOR_INSN (insn)->index);
5998 dump_insn_slim (lra_dump_file, insn);
5999 fprintf (lra_dump_file,
6000 " ]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]\n");
6001 }
6002 }
6003 }
6004 invariant_ptr->insn = curr_insn;
6005 return succ_p;
6006}
6007
55a2c322
VM
6008/* Check only registers living at the current program point in the
6009 current EBB. */
6010static bitmap_head live_regs;
6011
6012/* Update live info in EBB given by its HEAD and TAIL insns after
6013 inheritance/split transformation. The function removes dead moves
6014 too. */
6015static void
cfa434f6 6016update_ebb_live_info (rtx_insn *head, rtx_insn *tail)
55a2c322
VM
6017{
6018 unsigned int j;
8e9d68a9 6019 int i, regno;
55a2c322 6020 bool live_p;
cfa434f6
DM
6021 rtx_insn *prev_insn;
6022 rtx set;
55a2c322
VM
6023 bool remove_p;
6024 basic_block last_bb, prev_bb, curr_bb;
6025 bitmap_iterator bi;
6026 struct lra_insn_reg *reg;
6027 edge e;
6028 edge_iterator ei;
6029
f4eafc30 6030 last_bb = BLOCK_FOR_INSN (tail);
55a2c322
VM
6031 prev_bb = NULL;
6032 for (curr_insn = tail;
6033 curr_insn != PREV_INSN (head);
6034 curr_insn = prev_insn)
6035 {
6036 prev_insn = PREV_INSN (curr_insn);
911598e3
VM
6037 /* We need to process empty blocks too. They contain
6038 NOTE_INSN_BASIC_BLOCK referring for the basic block. */
6039 if (NOTE_P (curr_insn) && NOTE_KIND (curr_insn) != NOTE_INSN_BASIC_BLOCK)
6040 continue;
55a2c322
VM
6041 curr_bb = BLOCK_FOR_INSN (curr_insn);
6042 if (curr_bb != prev_bb)
6043 {
6044 if (prev_bb != NULL)
6045 {
6046 /* Update df_get_live_in (prev_bb): */
6047 EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6048 if (bitmap_bit_p (&live_regs, j))
6049 bitmap_set_bit (df_get_live_in (prev_bb), j);
6050 else
6051 bitmap_clear_bit (df_get_live_in (prev_bb), j);
6052 }
6053 if (curr_bb != last_bb)
6054 {
6055 /* Update df_get_live_out (curr_bb): */
6056 EXECUTE_IF_SET_IN_BITMAP (&check_only_regs, 0, j, bi)
6057 {
6058 live_p = bitmap_bit_p (&live_regs, j);
6059 if (! live_p)
6060 FOR_EACH_EDGE (e, ei, curr_bb->succs)
6061 if (bitmap_bit_p (df_get_live_in (e->dest), j))
6062 {
6063 live_p = true;
6064 break;
6065 }
6066 if (live_p)
6067 bitmap_set_bit (df_get_live_out (curr_bb), j);
6068 else
6069 bitmap_clear_bit (df_get_live_out (curr_bb), j);
6070 }
6071 }
6072 prev_bb = curr_bb;
6073 bitmap_and (&live_regs, &check_only_regs, df_get_live_out (curr_bb));
6074 }
44b94bdb 6075 if (! NONDEBUG_INSN_P (curr_insn))
55a2c322
VM
6076 continue;
6077 curr_id = lra_get_insn_recog_data (curr_insn);
8e9d68a9 6078 curr_static_id = curr_id->insn_static_data;
55a2c322 6079 remove_p = false;
53250f44
BS
6080 if ((set = single_set (curr_insn)) != NULL_RTX
6081 && REG_P (SET_DEST (set))
55a2c322 6082 && (regno = REGNO (SET_DEST (set))) >= FIRST_PSEUDO_REGISTER
53250f44 6083 && SET_DEST (set) != pic_offset_table_rtx
55a2c322
VM
6084 && bitmap_bit_p (&check_only_regs, regno)
6085 && ! bitmap_bit_p (&live_regs, regno))
6086 remove_p = true;
6087 /* See which defined values die here. */
6088 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6089 if (reg->type == OP_OUT && ! reg->subreg_p)
6090 bitmap_clear_bit (&live_regs, reg->regno);
8e9d68a9
VM
6091 for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6092 if (reg->type == OP_OUT && ! reg->subreg_p)
6093 bitmap_clear_bit (&live_regs, reg->regno);
9d86e84e
VM
6094 if (curr_id->arg_hard_regs != NULL)
6095 /* Make clobbered argument hard registers die. */
6096 for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6097 if (regno >= FIRST_PSEUDO_REGISTER)
6098 bitmap_clear_bit (&live_regs, regno - FIRST_PSEUDO_REGISTER);
55a2c322
VM
6099 /* Mark each used value as live. */
6100 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
d89ae27c 6101 if (reg->type != OP_OUT
55a2c322
VM
6102 && bitmap_bit_p (&check_only_regs, reg->regno))
6103 bitmap_set_bit (&live_regs, reg->regno);
8e9d68a9
VM
6104 for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6105 if (reg->type != OP_OUT
6106 && bitmap_bit_p (&check_only_regs, reg->regno))
6107 bitmap_set_bit (&live_regs, reg->regno);
6108 if (curr_id->arg_hard_regs != NULL)
9d86e84e 6109 /* Make used argument hard registers live. */
8e9d68a9 6110 for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
9d86e84e
VM
6111 if (regno < FIRST_PSEUDO_REGISTER
6112 && bitmap_bit_p (&check_only_regs, regno))
8e9d68a9 6113 bitmap_set_bit (&live_regs, regno);
55a2c322
VM
6114 /* It is quite important to remove dead move insns because it
6115 means removing dead store. We don't need to process them for
6116 constraints. */
6117 if (remove_p)
6118 {
6119 if (lra_dump_file != NULL)
6120 {
6121 fprintf (lra_dump_file, " Removing dead insn:\n ");
cfbeaedf 6122 dump_insn_slim (lra_dump_file, curr_insn);
55a2c322
VM
6123 }
6124 lra_set_insn_deleted (curr_insn);
6125 }
6126 }
6127}
6128
6129/* The structure describes info to do an inheritance for the current
6130 insn. We need to collect such info first before doing the
6131 transformations because the transformations change the insn
6132 internal representation. */
6133struct to_inherit
6134{
6135 /* Original regno. */
6136 int regno;
6137 /* Subsequent insns which can inherit original reg value. */
6138 rtx insns;
6139};
6140
6141/* Array containing all info for doing inheritance from the current
6142 insn. */
6143static struct to_inherit to_inherit[LRA_MAX_INSN_RELOADS];
6144
6145/* Number elements in the previous array. */
6146static int to_inherit_num;
6147
6148/* Add inheritance info REGNO and INSNS. Their meaning is described in
6149 structure to_inherit. */
6150static void
6151add_to_inherit (int regno, rtx insns)
6152{
6153 int i;
6154
6155 for (i = 0; i < to_inherit_num; i++)
6156 if (to_inherit[i].regno == regno)
6157 return;
6158 lra_assert (to_inherit_num < LRA_MAX_INSN_RELOADS);
6159 to_inherit[to_inherit_num].regno = regno;
6160 to_inherit[to_inherit_num++].insns = insns;
6161}
6162
6163/* Return the last non-debug insn in basic block BB, or the block begin
6164 note if none. */
cfa434f6 6165static rtx_insn *
55a2c322
VM
6166get_last_insertion_point (basic_block bb)
6167{
cfa434f6 6168 rtx_insn *insn;
55a2c322
VM
6169
6170 FOR_BB_INSNS_REVERSE (bb, insn)
6171 if (NONDEBUG_INSN_P (insn) || NOTE_INSN_BASIC_BLOCK_P (insn))
6172 return insn;
6173 gcc_unreachable ();
6174}
6175
6176/* Set up RES by registers living on edges FROM except the edge (FROM,
6177 TO) or by registers set up in a jump insn in BB FROM. */
6178static void
6179get_live_on_other_edges (basic_block from, basic_block to, bitmap res)
6180{
cfa434f6 6181 rtx_insn *last;
55a2c322
VM
6182 struct lra_insn_reg *reg;
6183 edge e;
6184 edge_iterator ei;
6185
6186 lra_assert (to != NULL);
6187 bitmap_clear (res);
6188 FOR_EACH_EDGE (e, ei, from->succs)
6189 if (e->dest != to)
6190 bitmap_ior_into (res, df_get_live_in (e->dest));
6191 last = get_last_insertion_point (from);
6192 if (! JUMP_P (last))
6193 return;
6194 curr_id = lra_get_insn_recog_data (last);
6195 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6196 if (reg->type != OP_IN)
6197 bitmap_set_bit (res, reg->regno);
6198}
f4eafc30 6199
55a2c322
VM
6200/* Used as a temporary results of some bitmap calculations. */
6201static bitmap_head temp_bitmap;
6202
8e9d68a9
VM
6203/* We split for reloads of small class of hard regs. The following
6204 defines how many hard regs the class should have to be qualified as
6205 small. The code is mostly oriented to x86/x86-64 architecture
6206 where some insns need to use only specific register or pair of
6207 registers and these register can live in RTL explicitly, e.g. for
6208 parameter passing. */
6209static const int max_small_class_regs_num = 2;
6210
55a2c322
VM
6211/* Do inheritance/split transformations in EBB starting with HEAD and
6212 finishing on TAIL. We process EBB insns in the reverse order.
6213 Return true if we did any inheritance/split transformation in the
6214 EBB.
6215
6216 We should avoid excessive splitting which results in worse code
6217 because of inaccurate cost calculations for spilling new split
6218 pseudos in such case. To achieve this we do splitting only if
6219 register pressure is high in given basic block and there are reload
6220 pseudos requiring hard registers. We could do more register
6221 pressure calculations at any given program point to avoid necessary
6222 splitting even more but it is to expensive and the current approach
6223 works well enough. */
6224static bool
cfa434f6 6225inherit_in_ebb (rtx_insn *head, rtx_insn *tail)
55a2c322
VM
6226{
6227 int i, src_regno, dst_regno, nregs;
df2980be 6228 bool change_p, succ_p, update_reloads_num_p;
cfa434f6 6229 rtx_insn *prev_insn, *last_insn;
8a8330b7 6230 rtx next_usage_insns, curr_set;
55a2c322
VM
6231 enum reg_class cl;
6232 struct lra_insn_reg *reg;
6233 basic_block last_processed_bb, curr_bb = NULL;
6234 HARD_REG_SET potential_reload_hard_regs, live_hard_regs;
6235 bitmap to_process;
6236 unsigned int j;
6237 bitmap_iterator bi;
6238 bool head_p, after_p;
6239
6240 change_p = false;
6241 curr_usage_insns_check++;
8a8330b7 6242 clear_invariants ();
55a2c322
VM
6243 reloads_num = calls_num = 0;
6244 bitmap_clear (&check_only_regs);
8a8330b7 6245 bitmap_clear (&invalid_invariant_regs);
55a2c322
VM
6246 last_processed_bb = NULL;
6247 CLEAR_HARD_REG_SET (potential_reload_hard_regs);
6576d245 6248 live_hard_regs = eliminable_regset;
8e9d68a9 6249 IOR_HARD_REG_SET (live_hard_regs, lra_no_alloc_regs);
55a2c322
VM
6250 /* We don't process new insns generated in the loop. */
6251 for (curr_insn = tail; curr_insn != PREV_INSN (head); curr_insn = prev_insn)
6252 {
6253 prev_insn = PREV_INSN (curr_insn);
6254 if (BLOCK_FOR_INSN (curr_insn) != NULL)
6255 curr_bb = BLOCK_FOR_INSN (curr_insn);
6256 if (last_processed_bb != curr_bb)
6257 {
6258 /* We are at the end of BB. Add qualified living
6259 pseudos for potential splitting. */
6260 to_process = df_get_live_out (curr_bb);
6261 if (last_processed_bb != NULL)
f4eafc30 6262 {
55a2c322
VM
6263 /* We are somewhere in the middle of EBB. */
6264 get_live_on_other_edges (curr_bb, last_processed_bb,
6265 &temp_bitmap);
6266 to_process = &temp_bitmap;
6267 }
6268 last_processed_bb = curr_bb;
6269 last_insn = get_last_insertion_point (curr_bb);
6270 after_p = (! JUMP_P (last_insn)
6271 && (! CALL_P (last_insn)
6272 || (find_reg_note (last_insn,
6273 REG_NORETURN, NULL_RTX) == NULL_RTX
6274 && ! SIBLING_CALL_P (last_insn))));
55a2c322
VM
6275 CLEAR_HARD_REG_SET (potential_reload_hard_regs);
6276 EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
6277 {
6278 if ((int) j >= lra_constraint_new_regno_start)
6279 break;
6280 if (j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
6281 {
6282 if (j < FIRST_PSEUDO_REGISTER)
6283 SET_HARD_REG_BIT (live_hard_regs, j);
6284 else
6285 add_to_hard_reg_set (&live_hard_regs,
6286 PSEUDO_REGNO_MODE (j),
6287 reg_renumber[j]);
6288 setup_next_usage_insn (j, last_insn, reloads_num, after_p);
6289 }
6290 }
6291 }
6292 src_regno = dst_regno = -1;
8a8330b7
VM
6293 curr_set = single_set (curr_insn);
6294 if (curr_set != NULL_RTX && REG_P (SET_DEST (curr_set)))
6295 dst_regno = REGNO (SET_DEST (curr_set));
6296 if (curr_set != NULL_RTX && REG_P (SET_SRC (curr_set)))
6297 src_regno = REGNO (SET_SRC (curr_set));
df2980be 6298 update_reloads_num_p = true;
55a2c322
VM
6299 if (src_regno < lra_constraint_new_regno_start
6300 && src_regno >= FIRST_PSEUDO_REGISTER
6301 && reg_renumber[src_regno] < 0
6302 && dst_regno >= lra_constraint_new_regno_start
6303 && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS)
6304 {
6305 /* 'reload_pseudo <- original_pseudo'. */
8e9d68a9
VM
6306 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6307 reloads_num++;
df2980be 6308 update_reloads_num_p = false;
55a2c322
VM
6309 succ_p = false;
6310 if (usage_insns[src_regno].check == curr_usage_insns_check
6311 && (next_usage_insns = usage_insns[src_regno].insns) != NULL_RTX)
6312 succ_p = inherit_reload_reg (false, src_regno, cl,
6313 curr_insn, next_usage_insns);
6314 if (succ_p)
6315 change_p = true;
6316 else
6317 setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
6318 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6319 IOR_HARD_REG_SET (potential_reload_hard_regs,
6320 reg_class_contents[cl]);
6321 }
8a8330b7
VM
6322 else if (src_regno < 0
6323 && dst_regno >= lra_constraint_new_regno_start
6324 && invariant_p (SET_SRC (curr_set))
6325 && (cl = lra_get_allocno_class (dst_regno)) != NO_REGS
f7abdf36
VM
6326 && ! bitmap_bit_p (&invalid_invariant_regs, dst_regno)
6327 && ! bitmap_bit_p (&invalid_invariant_regs,
6328 ORIGINAL_REGNO(regno_reg_rtx[dst_regno])))
8a8330b7
VM
6329 {
6330 /* 'reload_pseudo <- invariant'. */
6331 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6332 reloads_num++;
6333 update_reloads_num_p = false;
6334 if (process_invariant_for_inheritance (SET_DEST (curr_set), SET_SRC (curr_set)))
6335 change_p = true;
6336 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6337 IOR_HARD_REG_SET (potential_reload_hard_regs,
6338 reg_class_contents[cl]);
6339 }
55a2c322
VM
6340 else if (src_regno >= lra_constraint_new_regno_start
6341 && dst_regno < lra_constraint_new_regno_start
6342 && dst_regno >= FIRST_PSEUDO_REGISTER
6343 && reg_renumber[dst_regno] < 0
6344 && (cl = lra_get_allocno_class (src_regno)) != NO_REGS
6345 && usage_insns[dst_regno].check == curr_usage_insns_check
6346 && (next_usage_insns
6347 = usage_insns[dst_regno].insns) != NULL_RTX)
6348 {
8e9d68a9
VM
6349 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6350 reloads_num++;
df2980be 6351 update_reloads_num_p = false;
55a2c322
VM
6352 /* 'original_pseudo <- reload_pseudo'. */
6353 if (! JUMP_P (curr_insn)
6354 && inherit_reload_reg (true, dst_regno, cl,
6355 curr_insn, next_usage_insns))
6356 change_p = true;
6357 /* Invalidate. */
6358 usage_insns[dst_regno].check = 0;
6359 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6360 IOR_HARD_REG_SET (potential_reload_hard_regs,
6361 reg_class_contents[cl]);
6362 }
6363 else if (INSN_P (curr_insn))
6364 {
2f259720 6365 int iter;
55a2c322
VM
6366 int max_uid = get_max_uid ();
6367
6368 curr_id = lra_get_insn_recog_data (curr_insn);
2f259720 6369 curr_static_id = curr_id->insn_static_data;
55a2c322
VM
6370 to_inherit_num = 0;
6371 /* Process insn definitions. */
2f259720
VM
6372 for (iter = 0; iter < 2; iter++)
6373 for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
6374 reg != NULL;
6375 reg = reg->next)
6376 if (reg->type != OP_IN
6377 && (dst_regno = reg->regno) < lra_constraint_new_regno_start)
6378 {
6379 if (dst_regno >= FIRST_PSEUDO_REGISTER && reg->type == OP_OUT
6380 && reg_renumber[dst_regno] < 0 && ! reg->subreg_p
6381 && usage_insns[dst_regno].check == curr_usage_insns_check
6382 && (next_usage_insns
6383 = usage_insns[dst_regno].insns) != NULL_RTX)
6384 {
6385 struct lra_insn_reg *r;
6386
6387 for (r = curr_id->regs; r != NULL; r = r->next)
6388 if (r->type != OP_OUT && r->regno == dst_regno)
6389 break;
6390 /* Don't do inheritance if the pseudo is also
6391 used in the insn. */
6392 if (r == NULL)
67914693 6393 /* We cannot do inheritance right now
2f259720
VM
6394 because the current insn reg info (chain
6395 regs) can change after that. */
6396 add_to_inherit (dst_regno, next_usage_insns);
6397 }
67914693 6398 /* We cannot process one reg twice here because of
2f259720
VM
6399 usage_insns invalidation. */
6400 if ((dst_regno < FIRST_PSEUDO_REGISTER
6401 || reg_renumber[dst_regno] >= 0)
e32e4c4a 6402 && ! reg->subreg_p && reg->type != OP_IN)
2f259720
VM
6403 {
6404 HARD_REG_SET s;
6405
6406 if (split_if_necessary (dst_regno, reg->biggest_mode,
6407 potential_reload_hard_regs,
6408 false, curr_insn, max_uid))
6409 change_p = true;
6410 CLEAR_HARD_REG_SET (s);
6411 if (dst_regno < FIRST_PSEUDO_REGISTER)
6412 add_to_hard_reg_set (&s, reg->biggest_mode, dst_regno);
6413 else
6414 add_to_hard_reg_set (&s, PSEUDO_REGNO_MODE (dst_regno),
6415 reg_renumber[dst_regno]);
6416 AND_COMPL_HARD_REG_SET (live_hard_regs, s);
1e05d185 6417 AND_COMPL_HARD_REG_SET (potential_reload_hard_regs, s);
2f259720
VM
6418 }
6419 /* We should invalidate potential inheritance or
6420 splitting for the current insn usages to the next
6421 usage insns (see code below) as the output pseudo
6422 prevents this. */
6423 if ((dst_regno >= FIRST_PSEUDO_REGISTER
6424 && reg_renumber[dst_regno] < 0)
6425 || (reg->type == OP_OUT && ! reg->subreg_p
6426 && (dst_regno < FIRST_PSEUDO_REGISTER
6427 || reg_renumber[dst_regno] >= 0)))
6428 {
6429 /* Invalidate and mark definitions. */
6430 if (dst_regno >= FIRST_PSEUDO_REGISTER)
6431 usage_insns[dst_regno].check = -(int) INSN_UID (curr_insn);
6432 else
6433 {
ad474626
RS
6434 nregs = hard_regno_nregs (dst_regno,
6435 reg->biggest_mode);
2f259720
VM
6436 for (i = 0; i < nregs; i++)
6437 usage_insns[dst_regno + i].check
6438 = -(int) INSN_UID (curr_insn);
6439 }
6440 }
6441 }
9d86e84e
VM
6442 /* Process clobbered call regs. */
6443 if (curr_id->arg_hard_regs != NULL)
6444 for (i = 0; (dst_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6445 if (dst_regno >= FIRST_PSEUDO_REGISTER)
6446 usage_insns[dst_regno - FIRST_PSEUDO_REGISTER].check
6447 = -(int) INSN_UID (curr_insn);
55a2c322
VM
6448 if (! JUMP_P (curr_insn))
6449 for (i = 0; i < to_inherit_num; i++)
6450 if (inherit_reload_reg (true, to_inherit[i].regno,
6451 ALL_REGS, curr_insn,
6452 to_inherit[i].insns))
6453 change_p = true;
6454 if (CALL_P (curr_insn))
6455 {
cfa434f6
DM
6456 rtx cheap, pat, dest;
6457 rtx_insn *restore;
55a2c322
VM
6458 int regno, hard_regno;
6459
6460 calls_num++;
6461 if ((cheap = find_reg_note (curr_insn,
6462 REG_RETURNED, NULL_RTX)) != NULL_RTX
6463 && ((cheap = XEXP (cheap, 0)), true)
6464 && (regno = REGNO (cheap)) >= FIRST_PSEUDO_REGISTER
6465 && (hard_regno = reg_renumber[regno]) >= 0
851dac7c 6466 && usage_insns[regno].check == curr_usage_insns_check
55a2c322
VM
6467 /* If there are pending saves/restores, the
6468 optimization is not worth. */
6469 && usage_insns[regno].calls_num == calls_num - 1
6470 && TEST_HARD_REG_BIT (call_used_reg_set, hard_regno))
6471 {
6472 /* Restore the pseudo from the call result as
6473 REG_RETURNED note says that the pseudo value is
6474 in the call result and the pseudo is an argument
6475 of the call. */
6476 pat = PATTERN (curr_insn);
6477 if (GET_CODE (pat) == PARALLEL)
6478 pat = XVECEXP (pat, 0, 0);
6479 dest = SET_DEST (pat);
54bddf1d
IE
6480 /* For multiple return values dest is PARALLEL.
6481 Currently we handle only single return value case. */
6482 if (REG_P (dest))
6483 {
6484 start_sequence ();
6485 emit_move_insn (cheap, copy_rtx (dest));
6486 restore = get_insns ();
6487 end_sequence ();
6488 lra_process_new_insns (curr_insn, NULL, restore,
6489 "Inserting call parameter restore");
6490 /* We don't need to save/restore of the pseudo from
6491 this call. */
6492 usage_insns[regno].calls_num = calls_num;
6493 bitmap_set_bit (&check_only_regs, regno);
6494 }
55a2c322
VM
6495 }
6496 }
6497 to_inherit_num = 0;
6498 /* Process insn usages. */
2f259720
VM
6499 for (iter = 0; iter < 2; iter++)
6500 for (reg = iter == 0 ? curr_id->regs : curr_static_id->hard_regs;
6501 reg != NULL;
6502 reg = reg->next)
6503 if ((reg->type != OP_OUT
6504 || (reg->type == OP_OUT && reg->subreg_p))
6505 && (src_regno = reg->regno) < lra_constraint_new_regno_start)
6506 {
6507 if (src_regno >= FIRST_PSEUDO_REGISTER
6508 && reg_renumber[src_regno] < 0 && reg->type == OP_IN)
6509 {
6510 if (usage_insns[src_regno].check == curr_usage_insns_check
6511 && (next_usage_insns
6512 = usage_insns[src_regno].insns) != NULL_RTX
6513 && NONDEBUG_INSN_P (curr_insn))
6514 add_to_inherit (src_regno, next_usage_insns);
6515 else if (usage_insns[src_regno].check
6516 != -(int) INSN_UID (curr_insn))
6517 /* Add usages but only if the reg is not set up
6518 in the same insn. */
6519 add_next_usage_insn (src_regno, curr_insn, reloads_num);
6520 }
6521 else if (src_regno < FIRST_PSEUDO_REGISTER
6522 || reg_renumber[src_regno] >= 0)
6523 {
6524 bool before_p;
e67d1102 6525 rtx_insn *use_insn = curr_insn;
2f259720
VM
6526
6527 before_p = (JUMP_P (curr_insn)
6528 || (CALL_P (curr_insn) && reg->type == OP_IN));
6529 if (NONDEBUG_INSN_P (curr_insn)
8e9d68a9 6530 && (! JUMP_P (curr_insn) || reg->type == OP_IN)
2f259720
VM
6531 && split_if_necessary (src_regno, reg->biggest_mode,
6532 potential_reload_hard_regs,
6533 before_p, curr_insn, max_uid))
6534 {
6535 if (reg->subreg_p)
6536 lra_risky_transformations_p = true;
6537 change_p = true;
8e9d68a9 6538 /* Invalidate. */
2f259720
VM
6539 usage_insns[src_regno].check = 0;
6540 if (before_p)
6541 use_insn = PREV_INSN (curr_insn);
6542 }
6543 if (NONDEBUG_INSN_P (curr_insn))
6544 {
6545 if (src_regno < FIRST_PSEUDO_REGISTER)
6546 add_to_hard_reg_set (&live_hard_regs,
6547 reg->biggest_mode, src_regno);
6548 else
6549 add_to_hard_reg_set (&live_hard_regs,
6550 PSEUDO_REGNO_MODE (src_regno),
6551 reg_renumber[src_regno]);
6552 }
16ba97b9
VM
6553 if (src_regno >= FIRST_PSEUDO_REGISTER)
6554 add_next_usage_insn (src_regno, use_insn, reloads_num);
6555 else
6556 {
6557 for (i = 0; i < hard_regno_nregs (src_regno, reg->biggest_mode); i++)
6558 add_next_usage_insn (src_regno + i, use_insn, reloads_num);
6559 }
2f259720
VM
6560 }
6561 }
9d86e84e 6562 /* Process used call regs. */
df2980be
VM
6563 if (curr_id->arg_hard_regs != NULL)
6564 for (i = 0; (src_regno = curr_id->arg_hard_regs[i]) >= 0; i++)
6565 if (src_regno < FIRST_PSEUDO_REGISTER)
6566 {
6567 SET_HARD_REG_BIT (live_hard_regs, src_regno);
6568 add_next_usage_insn (src_regno, curr_insn, reloads_num);
6569 }
55a2c322
VM
6570 for (i = 0; i < to_inherit_num; i++)
6571 {
6572 src_regno = to_inherit[i].regno;
6573 if (inherit_reload_reg (false, src_regno, ALL_REGS,
6574 curr_insn, to_inherit[i].insns))
6575 change_p = true;
6576 else
6577 setup_next_usage_insn (src_regno, curr_insn, reloads_num, false);
6578 }
6579 }
df2980be 6580 if (update_reloads_num_p
8a8330b7 6581 && NONDEBUG_INSN_P (curr_insn) && curr_set != NULL_RTX)
df2980be
VM
6582 {
6583 int regno = -1;
8a8330b7
VM
6584 if ((REG_P (SET_DEST (curr_set))
6585 && (regno = REGNO (SET_DEST (curr_set))) >= lra_constraint_new_regno_start
df2980be
VM
6586 && reg_renumber[regno] < 0
6587 && (cl = lra_get_allocno_class (regno)) != NO_REGS)
8a8330b7
VM
6588 || (REG_P (SET_SRC (curr_set))
6589 && (regno = REGNO (SET_SRC (curr_set))) >= lra_constraint_new_regno_start
df2980be
VM
6590 && reg_renumber[regno] < 0
6591 && (cl = lra_get_allocno_class (regno)) != NO_REGS))
6592 {
8e9d68a9
VM
6593 if (ira_class_hard_regs_num[cl] <= max_small_class_regs_num)
6594 reloads_num++;
df2980be
VM
6595 if (hard_reg_set_subset_p (reg_class_contents[cl], live_hard_regs))
6596 IOR_HARD_REG_SET (potential_reload_hard_regs,
6597 reg_class_contents[cl]);
6598 }
6599 }
8a8330b7
VM
6600 if (NONDEBUG_INSN_P (curr_insn))
6601 {
6602 int regno;
6603
6604 /* Invalidate invariants with changed regs. */
6605 curr_id = lra_get_insn_recog_data (curr_insn);
6606 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6607 if (reg->type != OP_IN)
f7abdf36
VM
6608 {
6609 bitmap_set_bit (&invalid_invariant_regs, reg->regno);
6610 bitmap_set_bit (&invalid_invariant_regs,
6611 ORIGINAL_REGNO (regno_reg_rtx[reg->regno]));
6612 }
8a8330b7
VM
6613 curr_static_id = curr_id->insn_static_data;
6614 for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
6615 if (reg->type != OP_IN)
6616 bitmap_set_bit (&invalid_invariant_regs, reg->regno);
6617 if (curr_id->arg_hard_regs != NULL)
6618 for (i = 0; (regno = curr_id->arg_hard_regs[i]) >= 0; i++)
f7abdf36 6619 if (regno >= FIRST_PSEUDO_REGISTER)
8a8330b7 6620 bitmap_set_bit (&invalid_invariant_regs,
f7abdf36 6621 regno - FIRST_PSEUDO_REGISTER);
8a8330b7 6622 }
55a2c322
VM
6623 /* We reached the start of the current basic block. */
6624 if (prev_insn == NULL_RTX || prev_insn == PREV_INSN (head)
6625 || BLOCK_FOR_INSN (prev_insn) != curr_bb)
6626 {
6627 /* We reached the beginning of the current block -- do
6628 rest of spliting in the current BB. */
6629 to_process = df_get_live_in (curr_bb);
6630 if (BLOCK_FOR_INSN (head) != curr_bb)
f4eafc30 6631 {
55a2c322
VM
6632 /* We are somewhere in the middle of EBB. */
6633 get_live_on_other_edges (EDGE_PRED (curr_bb, 0)->src,
6634 curr_bb, &temp_bitmap);
6635 to_process = &temp_bitmap;
6636 }
6637 head_p = true;
6638 EXECUTE_IF_SET_IN_BITMAP (to_process, 0, j, bi)
6639 {
6640 if ((int) j >= lra_constraint_new_regno_start)
6641 break;
6642 if (((int) j < FIRST_PSEUDO_REGISTER || reg_renumber[j] >= 0)
6643 && usage_insns[j].check == curr_usage_insns_check
6644 && (next_usage_insns = usage_insns[j].insns) != NULL_RTX)
6645 {
6646 if (need_for_split_p (potential_reload_hard_regs, j))
6647 {
6648 if (lra_dump_file != NULL && head_p)
6649 {
6650 fprintf (lra_dump_file,
6651 " ----------------------------------\n");
6652 head_p = false;
6653 }
6654 if (split_reg (false, j, bb_note (curr_bb),
6027ea4c 6655 next_usage_insns, NULL))
55a2c322
VM
6656 change_p = true;
6657 }
6658 usage_insns[j].check = 0;
6659 }
6660 }
6661 }
6662 }
6663 return change_p;
6664}
6665
6666/* This value affects EBB forming. If probability of edge from EBB to
6667 a BB is not greater than the following value, we don't add the BB
f4eafc30 6668 to EBB. */
fb8a0e40
VM
6669#define EBB_PROBABILITY_CUTOFF \
6670 ((REG_BR_PROB_BASE * LRA_INHERITANCE_EBB_PROBABILITY_CUTOFF) / 100)
55a2c322
VM
6671
6672/* Current number of inheritance/split iteration. */
6673int lra_inheritance_iter;
6674
6675/* Entry function for inheritance/split pass. */
6676void
6677lra_inheritance (void)
6678{
6679 int i;
6680 basic_block bb, start_bb;
6681 edge e;
6682
55a2c322 6683 lra_inheritance_iter++;
8e3a4869 6684 if (lra_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
e731262b
VM
6685 return;
6686 timevar_push (TV_LRA_INHERITANCE);
55a2c322
VM
6687 if (lra_dump_file != NULL)
6688 fprintf (lra_dump_file, "\n********** Inheritance #%d: **********\n\n",
6689 lra_inheritance_iter);
6690 curr_usage_insns_check = 0;
6691 usage_insns = XNEWVEC (struct usage_insns, lra_constraint_new_regno_start);
6692 for (i = 0; i < lra_constraint_new_regno_start; i++)
6693 usage_insns[i].check = 0;
6694 bitmap_initialize (&check_only_regs, &reg_obstack);
8a8330b7 6695 bitmap_initialize (&invalid_invariant_regs, &reg_obstack);
55a2c322
VM
6696 bitmap_initialize (&live_regs, &reg_obstack);
6697 bitmap_initialize (&temp_bitmap, &reg_obstack);
6698 bitmap_initialize (&ebb_global_regs, &reg_obstack);
11cd3bed 6699 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
6700 {
6701 start_bb = bb;
6702 if (lra_dump_file != NULL)
6703 fprintf (lra_dump_file, "EBB");
6704 /* Form a EBB starting with BB. */
6705 bitmap_clear (&ebb_global_regs);
6706 bitmap_ior_into (&ebb_global_regs, df_get_live_in (bb));
6707 for (;;)
6708 {
6709 if (lra_dump_file != NULL)
6710 fprintf (lra_dump_file, " %d", bb->index);
fefa31b5
DM
6711 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
6712 || LABEL_P (BB_HEAD (bb->next_bb)))
55a2c322
VM
6713 break;
6714 e = find_fallthru_edge (bb->succs);
6715 if (! e)
6716 break;
357067f2
JH
6717 if (e->probability.initialized_p ()
6718 && e->probability.to_reg_br_prob_base () < EBB_PROBABILITY_CUTOFF)
55a2c322
VM
6719 break;
6720 bb = bb->next_bb;
6721 }
6722 bitmap_ior_into (&ebb_global_regs, df_get_live_out (bb));
6723 if (lra_dump_file != NULL)
6724 fprintf (lra_dump_file, "\n");
6725 if (inherit_in_ebb (BB_HEAD (start_bb), BB_END (bb)))
6726 /* Remember that the EBB head and tail can change in
6727 inherit_in_ebb. */
6728 update_ebb_live_info (BB_HEAD (start_bb), BB_END (bb));
6729 }
1c252ef3
RB
6730 bitmap_release (&ebb_global_regs);
6731 bitmap_release (&temp_bitmap);
6732 bitmap_release (&live_regs);
6733 bitmap_release (&invalid_invariant_regs);
6734 bitmap_release (&check_only_regs);
55a2c322
VM
6735 free (usage_insns);
6736
6737 timevar_pop (TV_LRA_INHERITANCE);
6738}
6739
6740\f
6741
6742/* This page contains code to undo failed inheritance/split
6743 transformations. */
6744
6745/* Current number of iteration undoing inheritance/split. */
6746int lra_undo_inheritance_iter;
6747
6748/* Fix BB live info LIVE after removing pseudos created on pass doing
6749 inheritance/split which are REMOVED_PSEUDOS. */
6750static void
6751fix_bb_live_info (bitmap live, bitmap removed_pseudos)
6752{
6753 unsigned int regno;
6754 bitmap_iterator bi;
6755
6756 EXECUTE_IF_SET_IN_BITMAP (removed_pseudos, 0, regno, bi)
8a8330b7
VM
6757 if (bitmap_clear_bit (live, regno)
6758 && REG_P (lra_reg_info[regno].restore_rtx))
6759 bitmap_set_bit (live, REGNO (lra_reg_info[regno].restore_rtx));
55a2c322
VM
6760}
6761
6762/* Return regno of the (subreg of) REG. Otherwise, return a negative
6763 number. */
6764static int
6765get_regno (rtx reg)
6766{
6767 if (GET_CODE (reg) == SUBREG)
6768 reg = SUBREG_REG (reg);
6769 if (REG_P (reg))
6770 return REGNO (reg);
6771 return -1;
6772}
6773
cefe08a4
VM
6774/* Delete a move INSN with destination reg DREGNO and a previous
6775 clobber insn with the same regno. The inheritance/split code can
6776 generate moves with preceding clobber and when we delete such moves
6777 we should delete the clobber insn too to keep the correct life
6778 info. */
6779static void
6780delete_move_and_clobber (rtx_insn *insn, int dregno)
6781{
6782 rtx_insn *prev_insn = PREV_INSN (insn);
6783
6784 lra_set_insn_deleted (insn);
79b57d18 6785 lra_assert (dregno >= 0);
cefe08a4
VM
6786 if (prev_insn != NULL && NONDEBUG_INSN_P (prev_insn)
6787 && GET_CODE (PATTERN (prev_insn)) == CLOBBER
6788 && dregno == get_regno (XEXP (PATTERN (prev_insn), 0)))
6789 lra_set_insn_deleted (prev_insn);
6790}
6791
55a2c322
VM
6792/* Remove inheritance/split pseudos which are in REMOVE_PSEUDOS and
6793 return true if we did any change. The undo transformations for
6794 inheritance looks like
6795 i <- i2
6796 p <- i => p <- i2
6797 or removing
6798 p <- i, i <- p, and i <- i3
6799 where p is original pseudo from which inheritance pseudo i was
6800 created, i and i3 are removed inheritance pseudos, i2 is another
6801 not removed inheritance pseudo. All split pseudos or other
6802 occurrences of removed inheritance pseudos are changed on the
6803 corresponding original pseudos.
6804
6805 The function also schedules insns changed and created during
6806 inheritance/split pass for processing by the subsequent constraint
6807 pass. */
6808static bool
6809remove_inheritance_pseudos (bitmap remove_pseudos)
6810{
6811 basic_block bb;
8a8330b7
VM
6812 int regno, sregno, prev_sregno, dregno;
6813 rtx restore_rtx;
cfa434f6
DM
6814 rtx set, prev_set;
6815 rtx_insn *prev_insn;
55a2c322
VM
6816 bool change_p, done_p;
6817
6818 change_p = ! bitmap_empty_p (remove_pseudos);
67914693 6819 /* We cannot finish the function right away if CHANGE_P is true
55a2c322
VM
6820 because we need to marks insns affected by previous
6821 inheritance/split pass for processing by the subsequent
6822 constraint pass. */
11cd3bed 6823 FOR_EACH_BB_FN (bb, cfun)
55a2c322
VM
6824 {
6825 fix_bb_live_info (df_get_live_in (bb), remove_pseudos);
6826 fix_bb_live_info (df_get_live_out (bb), remove_pseudos);
6827 FOR_BB_INSNS_REVERSE (bb, curr_insn)
6828 {
6829 if (! INSN_P (curr_insn))
6830 continue;
6831 done_p = false;
6832 sregno = dregno = -1;
6833 if (change_p && NONDEBUG_INSN_P (curr_insn)
6834 && (set = single_set (curr_insn)) != NULL_RTX)
6835 {
6836 dregno = get_regno (SET_DEST (set));
6837 sregno = get_regno (SET_SRC (set));
6838 }
f4eafc30 6839
55a2c322
VM
6840 if (sregno >= 0 && dregno >= 0)
6841 {
8a8330b7
VM
6842 if (bitmap_bit_p (remove_pseudos, dregno)
6843 && ! REG_P (lra_reg_info[dregno].restore_rtx))
6844 {
6845 /* invariant inheritance pseudo <- original pseudo */
6846 if (lra_dump_file != NULL)
6847 {
6848 fprintf (lra_dump_file, " Removing invariant inheritance:\n");
6849 dump_insn_slim (lra_dump_file, curr_insn);
6850 fprintf (lra_dump_file, "\n");
6851 }
6852 delete_move_and_clobber (curr_insn, dregno);
6853 done_p = true;
6854 }
6855 else if (bitmap_bit_p (remove_pseudos, sregno)
6856 && ! REG_P (lra_reg_info[sregno].restore_rtx))
6857 {
6858 /* reload pseudo <- invariant inheritance pseudo */
6859 start_sequence ();
67914693 6860 /* We cannot just change the source. It might be
8a8330b7 6861 an insn different from the move. */
b10d44ef 6862 emit_insn (lra_reg_info[sregno].restore_rtx);
8a8330b7
VM
6863 rtx_insn *new_insns = get_insns ();
6864 end_sequence ();
b10d44ef
VM
6865 lra_assert (single_set (new_insns) != NULL
6866 && SET_DEST (set) == SET_DEST (single_set (new_insns)));
8a8330b7
VM
6867 lra_process_new_insns (curr_insn, NULL, new_insns,
6868 "Changing reload<-invariant inheritance");
6869 delete_move_and_clobber (curr_insn, dregno);
6870 done_p = true;
6871 }
6872 else if ((bitmap_bit_p (remove_pseudos, sregno)
6873 && (get_regno (lra_reg_info[sregno].restore_rtx) == dregno
6874 || (bitmap_bit_p (remove_pseudos, dregno)
6875 && get_regno (lra_reg_info[sregno].restore_rtx) >= 0
6876 && (get_regno (lra_reg_info[sregno].restore_rtx)
6877 == get_regno (lra_reg_info[dregno].restore_rtx)))))
55a2c322 6878 || (bitmap_bit_p (remove_pseudos, dregno)
8a8330b7 6879 && get_regno (lra_reg_info[dregno].restore_rtx) == sregno))
55a2c322
VM
6880 /* One of the following cases:
6881 original <- removed inheritance pseudo
6882 removed inherit pseudo <- another removed inherit pseudo
6883 removed inherit pseudo <- original pseudo
6884 Or
6885 removed_split_pseudo <- original_reg
6886 original_reg <- removed_split_pseudo */
6887 {
6888 if (lra_dump_file != NULL)
6889 {
6890 fprintf (lra_dump_file, " Removing %s:\n",
6891 bitmap_bit_p (&lra_split_regs, sregno)
6892 || bitmap_bit_p (&lra_split_regs, dregno)
6893 ? "split" : "inheritance");
cfbeaedf 6894 dump_insn_slim (lra_dump_file, curr_insn);
55a2c322 6895 }
cefe08a4 6896 delete_move_and_clobber (curr_insn, dregno);
55a2c322
VM
6897 done_p = true;
6898 }
6899 else if (bitmap_bit_p (remove_pseudos, sregno)
6900 && bitmap_bit_p (&lra_inheritance_pseudos, sregno))
6901 {
6902 /* Search the following pattern:
6903 inherit_or_split_pseudo1 <- inherit_or_split_pseudo2
6904 original_pseudo <- inherit_or_split_pseudo1
6905 where the 2nd insn is the current insn and
6906 inherit_or_split_pseudo2 is not removed. If it is found,
6907 change the current insn onto:
6908 original_pseudo <- inherit_or_split_pseudo2. */
6909 for (prev_insn = PREV_INSN (curr_insn);
6910 prev_insn != NULL_RTX && ! NONDEBUG_INSN_P (prev_insn);
6911 prev_insn = PREV_INSN (prev_insn))
6912 ;
6913 if (prev_insn != NULL_RTX && BLOCK_FOR_INSN (prev_insn) == bb
6914 && (prev_set = single_set (prev_insn)) != NULL_RTX
6915 /* There should be no subregs in insn we are
6916 searching because only the original reg might
6917 be in subreg when we changed the mode of
6918 load/store for splitting. */
6919 && REG_P (SET_DEST (prev_set))
6920 && REG_P (SET_SRC (prev_set))
6921 && (int) REGNO (SET_DEST (prev_set)) == sregno
6922 && ((prev_sregno = REGNO (SET_SRC (prev_set)))
6923 >= FIRST_PSEUDO_REGISTER)
8a8330b7
VM
6924 && (lra_reg_info[prev_sregno].restore_rtx == NULL_RTX
6925 ||
6926 /* As we consider chain of inheritance or
6927 splitting described in above comment we should
6928 check that sregno and prev_sregno were
6929 inheritance/split pseudos created from the
6930 same original regno. */
6931 (get_regno (lra_reg_info[sregno].restore_rtx) >= 0
6932 && (get_regno (lra_reg_info[sregno].restore_rtx)
6933 == get_regno (lra_reg_info[prev_sregno].restore_rtx))))
55a2c322
VM
6934 && ! bitmap_bit_p (remove_pseudos, prev_sregno))
6935 {
6936 lra_assert (GET_MODE (SET_SRC (prev_set))
6937 == GET_MODE (regno_reg_rtx[sregno]));
ef61d1ab
AK
6938 /* Although we have a single set, the insn can
6939 contain more one sregno register occurrence
6940 as a source. Change all occurrences. */
6941 lra_substitute_pseudo_within_insn (curr_insn, sregno,
6942 SET_SRC (prev_set),
6943 false);
12b308fa
VM
6944 /* As we are finishing with processing the insn
6945 here, check the destination too as it might
6946 inheritance pseudo for another pseudo. */
6947 if (bitmap_bit_p (remove_pseudos, dregno)
6948 && bitmap_bit_p (&lra_inheritance_pseudos, dregno)
8a8330b7
VM
6949 && (restore_rtx
6950 = lra_reg_info[dregno].restore_rtx) != NULL_RTX)
12b308fa
VM
6951 {
6952 if (GET_CODE (SET_DEST (set)) == SUBREG)
8a8330b7 6953 SUBREG_REG (SET_DEST (set)) = restore_rtx;
12b308fa 6954 else
8a8330b7 6955 SET_DEST (set) = restore_rtx;
12b308fa 6956 }
55a2c322
VM
6957 lra_push_insn_and_update_insn_regno_info (curr_insn);
6958 lra_set_used_insn_alternative_by_uid
7874b7c5 6959 (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
55a2c322
VM
6960 done_p = true;
6961 if (lra_dump_file != NULL)
6962 {
6963 fprintf (lra_dump_file, " Change reload insn:\n");
cfbeaedf 6964 dump_insn_slim (lra_dump_file, curr_insn);
55a2c322
VM
6965 }
6966 }
6967 }
6968 }
6969 if (! done_p)
6970 {
6971 struct lra_insn_reg *reg;
6972 bool restored_regs_p = false;
6973 bool kept_regs_p = false;
6974
6975 curr_id = lra_get_insn_recog_data (curr_insn);
6976 for (reg = curr_id->regs; reg != NULL; reg = reg->next)
6977 {
6978 regno = reg->regno;
8a8330b7
VM
6979 restore_rtx = lra_reg_info[regno].restore_rtx;
6980 if (restore_rtx != NULL_RTX)
55a2c322
VM
6981 {
6982 if (change_p && bitmap_bit_p (remove_pseudos, regno))
6983 {
ef87312e 6984 lra_substitute_pseudo_within_insn
8a8330b7 6985 (curr_insn, regno, restore_rtx, false);
55a2c322
VM
6986 restored_regs_p = true;
6987 }
6988 else
6989 kept_regs_p = true;
6990 }
6991 }
6992 if (NONDEBUG_INSN_P (curr_insn) && kept_regs_p)
6993 {
6994 /* The instruction has changed since the previous
6995 constraints pass. */
6996 lra_push_insn_and_update_insn_regno_info (curr_insn);
6997 lra_set_used_insn_alternative_by_uid
7874b7c5 6998 (INSN_UID (curr_insn), LRA_UNKNOWN_ALT);
55a2c322
VM
6999 }
7000 else if (restored_regs_p)
7001 /* The instruction has been restored to the form that
7002 it had during the previous constraints pass. */
7003 lra_update_insn_regno_info (curr_insn);
7004 if (restored_regs_p && lra_dump_file != NULL)
7005 {
7006 fprintf (lra_dump_file, " Insn after restoring regs:\n");
cfbeaedf 7007 dump_insn_slim (lra_dump_file, curr_insn);
55a2c322
VM
7008 }
7009 }
7010 }
7011 }
7012 return change_p;
7013}
7014
2b778c9d
VM
7015/* If optional reload pseudos failed to get a hard register or was not
7016 inherited, it is better to remove optional reloads. We do this
7017 transformation after undoing inheritance to figure out necessity to
7018 remove optional reloads easier. Return true if we do any
7019 change. */
7020static bool
7021undo_optional_reloads (void)
7022{
b0681c9e 7023 bool change_p, keep_p;
2b778c9d
VM
7024 unsigned int regno, uid;
7025 bitmap_iterator bi, bi2;
cfa434f6
DM
7026 rtx_insn *insn;
7027 rtx set, src, dest;
d648b5ff 7028 auto_bitmap removed_optional_reload_pseudos (&reg_obstack);
2b778c9d 7029
d648b5ff 7030 bitmap_copy (removed_optional_reload_pseudos, &lra_optional_reload_pseudos);
2b778c9d 7031 EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
aa940f7c
VM
7032 {
7033 keep_p = false;
080cbf9e 7034 /* Keep optional reloads from previous subpasses. */
8a8330b7 7035 if (lra_reg_info[regno].restore_rtx == NULL_RTX
080cbf9e
VM
7036 /* If the original pseudo changed its allocation, just
7037 removing the optional pseudo is dangerous as the original
7038 pseudo will have longer live range. */
8a8330b7 7039 || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] >= 0)
aa940f7c
VM
7040 keep_p = true;
7041 else if (reg_renumber[regno] >= 0)
7042 EXECUTE_IF_SET_IN_BITMAP (&lra_reg_info[regno].insn_bitmap, 0, uid, bi2)
b0681c9e 7043 {
aa940f7c
VM
7044 insn = lra_insn_recog_data[uid]->insn;
7045 if ((set = single_set (insn)) == NULL_RTX)
7046 continue;
7047 src = SET_SRC (set);
7048 dest = SET_DEST (set);
7049 if (! REG_P (src) || ! REG_P (dest))
7050 continue;
7051 if (REGNO (dest) == regno
7052 /* Ignore insn for optional reloads itself. */
8a8330b7 7053 && REGNO (lra_reg_info[regno].restore_rtx) != REGNO (src)
aa940f7c
VM
7054 /* Check only inheritance on last inheritance pass. */
7055 && (int) REGNO (src) >= new_regno_start
7056 /* Check that the optional reload was inherited. */
7057 && bitmap_bit_p (&lra_inheritance_pseudos, REGNO (src)))
7058 {
7059 keep_p = true;
7060 break;
7061 }
b0681c9e 7062 }
aa940f7c
VM
7063 if (keep_p)
7064 {
d648b5ff 7065 bitmap_clear_bit (removed_optional_reload_pseudos, regno);
aa940f7c
VM
7066 if (lra_dump_file != NULL)
7067 fprintf (lra_dump_file, "Keep optional reload reg %d\n", regno);
7068 }
7069 }
d648b5ff
TS
7070 change_p = ! bitmap_empty_p (removed_optional_reload_pseudos);
7071 auto_bitmap insn_bitmap (&reg_obstack);
7072 EXECUTE_IF_SET_IN_BITMAP (removed_optional_reload_pseudos, 0, regno, bi)
2b778c9d
VM
7073 {
7074 if (lra_dump_file != NULL)
7075 fprintf (lra_dump_file, "Remove optional reload reg %d\n", regno);
d648b5ff
TS
7076 bitmap_copy (insn_bitmap, &lra_reg_info[regno].insn_bitmap);
7077 EXECUTE_IF_SET_IN_BITMAP (insn_bitmap, 0, uid, bi2)
2b778c9d
VM
7078 {
7079 insn = lra_insn_recog_data[uid]->insn;
7080 if ((set = single_set (insn)) != NULL_RTX)
7081 {
7082 src = SET_SRC (set);
7083 dest = SET_DEST (set);
7084 if (REG_P (src) && REG_P (dest)
7085 && ((REGNO (src) == regno
8a8330b7
VM
7086 && (REGNO (lra_reg_info[regno].restore_rtx)
7087 == REGNO (dest)))
2b778c9d 7088 || (REGNO (dest) == regno
8a8330b7
VM
7089 && (REGNO (lra_reg_info[regno].restore_rtx)
7090 == REGNO (src)))))
2b778c9d
VM
7091 {
7092 if (lra_dump_file != NULL)
7093 {
7094 fprintf (lra_dump_file, " Deleting move %u\n",
7095 INSN_UID (insn));
7096 dump_insn_slim (lra_dump_file, insn);
7097 }
cefe08a4 7098 delete_move_and_clobber (insn, REGNO (dest));
2b778c9d
VM
7099 continue;
7100 }
7101 /* We should not worry about generation memory-memory
7102 moves here as if the corresponding inheritance did
7103 not work (inheritance pseudo did not get a hard reg),
7104 we remove the inheritance pseudo and the optional
7105 reload. */
7106 }
ef87312e 7107 lra_substitute_pseudo_within_insn
8a8330b7 7108 (insn, regno, lra_reg_info[regno].restore_rtx, false);
2b778c9d
VM
7109 lra_update_insn_regno_info (insn);
7110 if (lra_dump_file != NULL)
7111 {
7112 fprintf (lra_dump_file,
7113 " Restoring original insn:\n");
7114 dump_insn_slim (lra_dump_file, insn);
7115 }
7116 }
7117 }
7118 /* Clear restore_regnos. */
7119 EXECUTE_IF_SET_IN_BITMAP (&lra_optional_reload_pseudos, 0, regno, bi)
8a8330b7 7120 lra_reg_info[regno].restore_rtx = NULL_RTX;
2b778c9d
VM
7121 return change_p;
7122}
7123
55a2c322
VM
7124/* Entry function for undoing inheritance/split transformation. Return true
7125 if we did any RTL change in this pass. */
7126bool
7127lra_undo_inheritance (void)
7128{
7129 unsigned int regno;
8a8330b7 7130 int hard_regno;
55a2c322 7131 int n_all_inherit, n_inherit, n_all_split, n_split;
8a8330b7 7132 rtx restore_rtx;
55a2c322
VM
7133 bitmap_iterator bi;
7134 bool change_p;
7135
7136 lra_undo_inheritance_iter++;
8e3a4869 7137 if (lra_undo_inheritance_iter > LRA_MAX_INHERITANCE_PASSES)
e731262b 7138 return false;
55a2c322
VM
7139 if (lra_dump_file != NULL)
7140 fprintf (lra_dump_file,
7141 "\n********** Undoing inheritance #%d: **********\n\n",
7142 lra_undo_inheritance_iter);
d648b5ff 7143 auto_bitmap remove_pseudos (&reg_obstack);
55a2c322
VM
7144 n_inherit = n_all_inherit = 0;
7145 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
8a8330b7 7146 if (lra_reg_info[regno].restore_rtx != NULL_RTX)
55a2c322
VM
7147 {
7148 n_all_inherit++;
b0681c9e
VM
7149 if (reg_renumber[regno] < 0
7150 /* If the original pseudo changed its allocation, just
7151 removing inheritance is dangerous as for changing
7152 allocation we used shorter live-ranges. */
8a8330b7
VM
7153 && (! REG_P (lra_reg_info[regno].restore_rtx)
7154 || reg_renumber[REGNO (lra_reg_info[regno].restore_rtx)] < 0))
d648b5ff 7155 bitmap_set_bit (remove_pseudos, regno);
55a2c322
VM
7156 else
7157 n_inherit++;
7158 }
7159 if (lra_dump_file != NULL && n_all_inherit != 0)
7160 fprintf (lra_dump_file, "Inherit %d out of %d (%.2f%%)\n",
7161 n_inherit, n_all_inherit,
7162 (double) n_inherit / n_all_inherit * 100);
7163 n_split = n_all_split = 0;
7164 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
8a8330b7 7165 if ((restore_rtx = lra_reg_info[regno].restore_rtx) != NULL_RTX)
55a2c322 7166 {
8a8330b7
VM
7167 int restore_regno = REGNO (restore_rtx);
7168
55a2c322
VM
7169 n_all_split++;
7170 hard_regno = (restore_regno >= FIRST_PSEUDO_REGISTER
7171 ? reg_renumber[restore_regno] : restore_regno);
7172 if (hard_regno < 0 || reg_renumber[regno] == hard_regno)
d648b5ff 7173 bitmap_set_bit (remove_pseudos, regno);
55a2c322
VM
7174 else
7175 {
7176 n_split++;
7177 if (lra_dump_file != NULL)
7178 fprintf (lra_dump_file, " Keep split r%d (orig=r%d)\n",
7179 regno, restore_regno);
7180 }
7181 }
7182 if (lra_dump_file != NULL && n_all_split != 0)
7183 fprintf (lra_dump_file, "Split %d out of %d (%.2f%%)\n",
7184 n_split, n_all_split,
7185 (double) n_split / n_all_split * 100);
d648b5ff 7186 change_p = remove_inheritance_pseudos (remove_pseudos);
55a2c322
VM
7187 /* Clear restore_regnos. */
7188 EXECUTE_IF_SET_IN_BITMAP (&lra_inheritance_pseudos, 0, regno, bi)
8a8330b7 7189 lra_reg_info[regno].restore_rtx = NULL_RTX;
55a2c322 7190 EXECUTE_IF_SET_IN_BITMAP (&lra_split_regs, 0, regno, bi)
8a8330b7 7191 lra_reg_info[regno].restore_rtx = NULL_RTX;
2b778c9d 7192 change_p = undo_optional_reloads () || change_p;
55a2c322
VM
7193 return change_p;
7194}