]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload1.c
always define AUTO_INC_DEC
[thirdparty/gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "df.h"
27
28 #include "rtl-error.h"
29 #include "tm_p.h"
30 #include "obstack.h"
31 #include "insn-config.h"
32 #include "flags.h"
33 #include "alias.h"
34 #include "expmed.h"
35 #include "dojump.h"
36 #include "explow.h"
37 #include "calls.h"
38 #include "emit-rtl.h"
39 #include "varasm.h"
40 #include "stmt.h"
41 #include "expr.h"
42 #include "insn-codes.h"
43 #include "optabs.h"
44 #include "regs.h"
45 #include "addresses.h"
46 #include "cfgrtl.h"
47 #include "cfgbuild.h"
48 #include "reload.h"
49 #include "recog.h"
50 #include "except.h"
51 #include "ira.h"
52 #include "target.h"
53 #include "dumpfile.h"
54 #include "rtl-iter.h"
55
56 /* This file contains the reload pass of the compiler, which is
57 run after register allocation has been done. It checks that
58 each insn is valid (operands required to be in registers really
59 are in registers of the proper class) and fixes up invalid ones
60 by copying values temporarily into registers for the insns
61 that need them.
62
63 The results of register allocation are described by the vector
64 reg_renumber; the insns still contain pseudo regs, but reg_renumber
65 can be used to find which hard reg, if any, a pseudo reg is in.
66
67 The technique we always use is to free up a few hard regs that are
68 called ``reload regs'', and for each place where a pseudo reg
69 must be in a hard reg, copy it temporarily into one of the reload regs.
70
71 Reload regs are allocated locally for every instruction that needs
72 reloads. When there are pseudos which are allocated to a register that
73 has been chosen as a reload reg, such pseudos must be ``spilled''.
74 This means that they go to other hard regs, or to stack slots if no other
75 available hard regs can be found. Spilling can invalidate more
76 insns, requiring additional need for reloads, so we must keep checking
77 until the process stabilizes.
78
79 For machines with different classes of registers, we must keep track
80 of the register class needed for each reload, and make sure that
81 we allocate enough reload registers of each class.
82
83 The file reload.c contains the code that checks one insn for
84 validity and reports the reloads that it needs. This file
85 is in charge of scanning the entire rtl code, accumulating the
86 reload needs, spilling, assigning reload registers to use for
87 fixing up each insn, and generating the new insns to copy values
88 into the reload registers. */
89 \f
90 struct target_reload default_target_reload;
91 #if SWITCHABLE_TARGET
92 struct target_reload *this_target_reload = &default_target_reload;
93 #endif
94
95 #define spill_indirect_levels \
96 (this_target_reload->x_spill_indirect_levels)
97
98 /* During reload_as_needed, element N contains a REG rtx for the hard reg
99 into which reg N has been reloaded (perhaps for a previous insn). */
100 static rtx *reg_last_reload_reg;
101
102 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
103 for an output reload that stores into reg N. */
104 static regset_head reg_has_output_reload;
105
106 /* Indicates which hard regs are reload-registers for an output reload
107 in the current insn. */
108 static HARD_REG_SET reg_is_output_reload;
109
110 /* Widest width in which each pseudo reg is referred to (via subreg). */
111 static unsigned int *reg_max_ref_width;
112
113 /* Vector to remember old contents of reg_renumber before spilling. */
114 static short *reg_old_renumber;
115
116 /* During reload_as_needed, element N contains the last pseudo regno reloaded
117 into hard register N. If that pseudo reg occupied more than one register,
118 reg_reloaded_contents points to that pseudo for each spill register in
119 use; all of these must remain set for an inheritance to occur. */
120 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
121
122 /* During reload_as_needed, element N contains the insn for which
123 hard register N was last used. Its contents are significant only
124 when reg_reloaded_valid is set for this register. */
125 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
126
127 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
128 static HARD_REG_SET reg_reloaded_valid;
129 /* Indicate if the register was dead at the end of the reload.
130 This is only valid if reg_reloaded_contents is set and valid. */
131 static HARD_REG_SET reg_reloaded_dead;
132
133 /* Indicate whether the register's current value is one that is not
134 safe to retain across a call, even for registers that are normally
135 call-saved. This is only meaningful for members of reg_reloaded_valid. */
136 static HARD_REG_SET reg_reloaded_call_part_clobbered;
137
138 /* Number of spill-regs so far; number of valid elements of spill_regs. */
139 static int n_spills;
140
141 /* In parallel with spill_regs, contains REG rtx's for those regs.
142 Holds the last rtx used for any given reg, or 0 if it has never
143 been used for spilling yet. This rtx is reused, provided it has
144 the proper mode. */
145 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
146
147 /* In parallel with spill_regs, contains nonzero for a spill reg
148 that was stored after the last time it was used.
149 The precise value is the insn generated to do the store. */
150 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
151
152 /* This is the register that was stored with spill_reg_store. This is a
153 copy of reload_out / reload_out_reg when the value was stored; if
154 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
155 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
156
157 /* This table is the inverse mapping of spill_regs:
158 indexed by hard reg number,
159 it contains the position of that reg in spill_regs,
160 or -1 for something that is not in spill_regs.
161
162 ?!? This is no longer accurate. */
163 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
164
165 /* This reg set indicates registers that can't be used as spill registers for
166 the currently processed insn. These are the hard registers which are live
167 during the insn, but not allocated to pseudos, as well as fixed
168 registers. */
169 static HARD_REG_SET bad_spill_regs;
170
171 /* These are the hard registers that can't be used as spill register for any
172 insn. This includes registers used for user variables and registers that
173 we can't eliminate. A register that appears in this set also can't be used
174 to retry register allocation. */
175 static HARD_REG_SET bad_spill_regs_global;
176
177 /* Describes order of use of registers for reloading
178 of spilled pseudo-registers. `n_spills' is the number of
179 elements that are actually valid; new ones are added at the end.
180
181 Both spill_regs and spill_reg_order are used on two occasions:
182 once during find_reload_regs, where they keep track of the spill registers
183 for a single insn, but also during reload_as_needed where they show all
184 the registers ever used by reload. For the latter case, the information
185 is calculated during finish_spills. */
186 static short spill_regs[FIRST_PSEUDO_REGISTER];
187
188 /* This vector of reg sets indicates, for each pseudo, which hard registers
189 may not be used for retrying global allocation because the register was
190 formerly spilled from one of them. If we allowed reallocating a pseudo to
191 a register that it was already allocated to, reload might not
192 terminate. */
193 static HARD_REG_SET *pseudo_previous_regs;
194
195 /* This vector of reg sets indicates, for each pseudo, which hard
196 registers may not be used for retrying global allocation because they
197 are used as spill registers during one of the insns in which the
198 pseudo is live. */
199 static HARD_REG_SET *pseudo_forbidden_regs;
200
201 /* All hard regs that have been used as spill registers for any insn are
202 marked in this set. */
203 static HARD_REG_SET used_spill_regs;
204
205 /* Index of last register assigned as a spill register. We allocate in
206 a round-robin fashion. */
207 static int last_spill_reg;
208
209 /* Record the stack slot for each spilled hard register. */
210 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
211
212 /* Width allocated so far for that stack slot. */
213 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
214
215 /* Record which pseudos needed to be spilled. */
216 static regset_head spilled_pseudos;
217
218 /* Record which pseudos changed their allocation in finish_spills. */
219 static regset_head changed_allocation_pseudos;
220
221 /* Used for communication between order_regs_for_reload and count_pseudo.
222 Used to avoid counting one pseudo twice. */
223 static regset_head pseudos_counted;
224
225 /* First uid used by insns created by reload in this function.
226 Used in find_equiv_reg. */
227 int reload_first_uid;
228
229 /* Flag set by local-alloc or global-alloc if anything is live in
230 a call-clobbered reg across calls. */
231 int caller_save_needed;
232
233 /* Set to 1 while reload_as_needed is operating.
234 Required by some machines to handle any generated moves differently. */
235 int reload_in_progress = 0;
236
237 /* This obstack is used for allocation of rtl during register elimination.
238 The allocated storage can be freed once find_reloads has processed the
239 insn. */
240 static struct obstack reload_obstack;
241
242 /* Points to the beginning of the reload_obstack. All insn_chain structures
243 are allocated first. */
244 static char *reload_startobj;
245
246 /* The point after all insn_chain structures. Used to quickly deallocate
247 memory allocated in copy_reloads during calculate_needs_all_insns. */
248 static char *reload_firstobj;
249
250 /* This points before all local rtl generated by register elimination.
251 Used to quickly free all memory after processing one insn. */
252 static char *reload_insn_firstobj;
253
254 /* List of insn_chain instructions, one for every insn that reload needs to
255 examine. */
256 struct insn_chain *reload_insn_chain;
257
258 /* TRUE if we potentially left dead insns in the insn stream and want to
259 run DCE immediately after reload, FALSE otherwise. */
260 static bool need_dce;
261
262 /* List of all insns needing reloads. */
263 static struct insn_chain *insns_need_reload;
264 \f
265 /* This structure is used to record information about register eliminations.
266 Each array entry describes one possible way of eliminating a register
267 in favor of another. If there is more than one way of eliminating a
268 particular register, the most preferred should be specified first. */
269
270 struct elim_table
271 {
272 int from; /* Register number to be eliminated. */
273 int to; /* Register number used as replacement. */
274 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
275 int can_eliminate; /* Nonzero if this elimination can be done. */
276 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
277 target hook in previous scan over insns
278 made by reload. */
279 HOST_WIDE_INT offset; /* Current offset between the two regs. */
280 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
281 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
282 rtx from_rtx; /* REG rtx for the register to be eliminated.
283 We cannot simply compare the number since
284 we might then spuriously replace a hard
285 register corresponding to a pseudo
286 assigned to the reg to be eliminated. */
287 rtx to_rtx; /* REG rtx for the replacement. */
288 };
289
290 static struct elim_table *reg_eliminate = 0;
291
292 /* This is an intermediate structure to initialize the table. It has
293 exactly the members provided by ELIMINABLE_REGS. */
294 static const struct elim_table_1
295 {
296 const int from;
297 const int to;
298 } reg_eliminate_1[] =
299
300 /* If a set of eliminable registers was specified, define the table from it.
301 Otherwise, default to the normal case of the frame pointer being
302 replaced by the stack pointer. */
303
304 #ifdef ELIMINABLE_REGS
305 ELIMINABLE_REGS;
306 #else
307 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
308 #endif
309
310 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
311
312 /* Record the number of pending eliminations that have an offset not equal
313 to their initial offset. If nonzero, we use a new copy of each
314 replacement result in any insns encountered. */
315 int num_not_at_initial_offset;
316
317 /* Count the number of registers that we may be able to eliminate. */
318 static int num_eliminable;
319 /* And the number of registers that are equivalent to a constant that
320 can be eliminated to frame_pointer / arg_pointer + constant. */
321 static int num_eliminable_invariants;
322
323 /* For each label, we record the offset of each elimination. If we reach
324 a label by more than one path and an offset differs, we cannot do the
325 elimination. This information is indexed by the difference of the
326 number of the label and the first label number. We can't offset the
327 pointer itself as this can cause problems on machines with segmented
328 memory. The first table is an array of flags that records whether we
329 have yet encountered a label and the second table is an array of arrays,
330 one entry in the latter array for each elimination. */
331
332 static int first_label_num;
333 static char *offsets_known_at;
334 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
335
336 vec<reg_equivs_t, va_gc> *reg_equivs;
337
338 /* Stack of addresses where an rtx has been changed. We can undo the
339 changes by popping items off the stack and restoring the original
340 value at each location.
341
342 We use this simplistic undo capability rather than copy_rtx as copy_rtx
343 will not make a deep copy of a normally sharable rtx, such as
344 (const (plus (symbol_ref) (const_int))). If such an expression appears
345 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
346 rtx expression would be changed. See PR 42431. */
347
348 typedef rtx *rtx_p;
349 static vec<rtx_p> substitute_stack;
350
351 /* Number of labels in the current function. */
352
353 static int num_labels;
354 \f
355 static void replace_pseudos_in (rtx *, machine_mode, rtx);
356 static void maybe_fix_stack_asms (void);
357 static void copy_reloads (struct insn_chain *);
358 static void calculate_needs_all_insns (int);
359 static int find_reg (struct insn_chain *, int);
360 static void find_reload_regs (struct insn_chain *);
361 static void select_reload_regs (void);
362 static void delete_caller_save_insns (void);
363
364 static void spill_failure (rtx_insn *, enum reg_class);
365 static void count_spilled_pseudo (int, int, int);
366 static void delete_dead_insn (rtx_insn *);
367 static void alter_reg (int, int, bool);
368 static void set_label_offsets (rtx, rtx_insn *, int);
369 static void check_eliminable_occurrences (rtx);
370 static void elimination_effects (rtx, machine_mode);
371 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
372 static int eliminate_regs_in_insn (rtx_insn *, int);
373 static void update_eliminable_offsets (void);
374 static void mark_not_eliminable (rtx, const_rtx, void *);
375 static void set_initial_elim_offsets (void);
376 static bool verify_initial_elim_offsets (void);
377 static void set_initial_label_offsets (void);
378 static void set_offsets_for_label (rtx_insn *);
379 static void init_eliminable_invariants (rtx_insn *, bool);
380 static void init_elim_table (void);
381 static void free_reg_equiv (void);
382 static void update_eliminables (HARD_REG_SET *);
383 static bool update_eliminables_and_spill (void);
384 static void elimination_costs_in_insn (rtx_insn *);
385 static void spill_hard_reg (unsigned int, int);
386 static int finish_spills (int);
387 static void scan_paradoxical_subregs (rtx);
388 static void count_pseudo (int);
389 static void order_regs_for_reload (struct insn_chain *);
390 static void reload_as_needed (int);
391 static void forget_old_reloads_1 (rtx, const_rtx, void *);
392 static void forget_marked_reloads (regset);
393 static int reload_reg_class_lower (const void *, const void *);
394 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
395 machine_mode);
396 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
397 machine_mode);
398 static int reload_reg_free_p (unsigned int, int, enum reload_type);
399 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
400 rtx, rtx, int, int);
401 static int free_for_value_p (int, machine_mode, int, enum reload_type,
402 rtx, rtx, int, int);
403 static int allocate_reload_reg (struct insn_chain *, int, int);
404 static int conflicts_with_override (rtx);
405 static void failed_reload (rtx_insn *, int);
406 static int set_reload_reg (int, int);
407 static void choose_reload_regs_init (struct insn_chain *, rtx *);
408 static void choose_reload_regs (struct insn_chain *);
409 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
410 rtx, int);
411 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
412 int);
413 static void do_input_reload (struct insn_chain *, struct reload *, int);
414 static void do_output_reload (struct insn_chain *, struct reload *, int);
415 static void emit_reload_insns (struct insn_chain *);
416 static void delete_output_reload (rtx_insn *, int, int, rtx);
417 static void delete_address_reloads (rtx_insn *, rtx_insn *);
418 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
419 static void inc_for_reload (rtx, rtx, rtx, int);
420 #if AUTO_INC_DEC
421 static void add_auto_inc_notes (rtx_insn *, rtx);
422 #endif
423 static void substitute (rtx *, const_rtx, rtx);
424 static bool gen_reload_chain_without_interm_reg_p (int, int);
425 static int reloads_conflict (int, int);
426 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
427 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
428 \f
429 /* Initialize the reload pass. This is called at the beginning of compilation
430 and may be called again if the target is reinitialized. */
431
432 void
433 init_reload (void)
434 {
435 int i;
436
437 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
438 Set spill_indirect_levels to the number of levels such addressing is
439 permitted, zero if it is not permitted at all. */
440
441 rtx tem
442 = gen_rtx_MEM (Pmode,
443 gen_rtx_PLUS (Pmode,
444 gen_rtx_REG (Pmode,
445 LAST_VIRTUAL_REGISTER + 1),
446 gen_int_mode (4, Pmode)));
447 spill_indirect_levels = 0;
448
449 while (memory_address_p (QImode, tem))
450 {
451 spill_indirect_levels++;
452 tem = gen_rtx_MEM (Pmode, tem);
453 }
454
455 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
456
457 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
458 indirect_symref_ok = memory_address_p (QImode, tem);
459
460 /* See if reg+reg is a valid (and offsettable) address. */
461
462 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
463 {
464 tem = gen_rtx_PLUS (Pmode,
465 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
466 gen_rtx_REG (Pmode, i));
467
468 /* This way, we make sure that reg+reg is an offsettable address. */
469 tem = plus_constant (Pmode, tem, 4);
470
471 if (memory_address_p (QImode, tem))
472 {
473 double_reg_address_ok = 1;
474 break;
475 }
476 }
477
478 /* Initialize obstack for our rtl allocation. */
479 if (reload_startobj == NULL)
480 {
481 gcc_obstack_init (&reload_obstack);
482 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
483 }
484
485 INIT_REG_SET (&spilled_pseudos);
486 INIT_REG_SET (&changed_allocation_pseudos);
487 INIT_REG_SET (&pseudos_counted);
488 }
489
490 /* List of insn chains that are currently unused. */
491 static struct insn_chain *unused_insn_chains = 0;
492
493 /* Allocate an empty insn_chain structure. */
494 struct insn_chain *
495 new_insn_chain (void)
496 {
497 struct insn_chain *c;
498
499 if (unused_insn_chains == 0)
500 {
501 c = XOBNEW (&reload_obstack, struct insn_chain);
502 INIT_REG_SET (&c->live_throughout);
503 INIT_REG_SET (&c->dead_or_set);
504 }
505 else
506 {
507 c = unused_insn_chains;
508 unused_insn_chains = c->next;
509 }
510 c->is_caller_save_insn = 0;
511 c->need_operand_change = 0;
512 c->need_reload = 0;
513 c->need_elim = 0;
514 return c;
515 }
516
517 /* Small utility function to set all regs in hard reg set TO which are
518 allocated to pseudos in regset FROM. */
519
520 void
521 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
522 {
523 unsigned int regno;
524 reg_set_iterator rsi;
525
526 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
527 {
528 int r = reg_renumber[regno];
529
530 if (r < 0)
531 {
532 /* reload_combine uses the information from DF_LIVE_IN,
533 which might still contain registers that have not
534 actually been allocated since they have an
535 equivalence. */
536 gcc_assert (ira_conflicts_p || reload_completed);
537 }
538 else
539 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
540 }
541 }
542
543 /* Replace all pseudos found in LOC with their corresponding
544 equivalences. */
545
546 static void
547 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
548 {
549 rtx x = *loc;
550 enum rtx_code code;
551 const char *fmt;
552 int i, j;
553
554 if (! x)
555 return;
556
557 code = GET_CODE (x);
558 if (code == REG)
559 {
560 unsigned int regno = REGNO (x);
561
562 if (regno < FIRST_PSEUDO_REGISTER)
563 return;
564
565 x = eliminate_regs_1 (x, mem_mode, usage, true, false);
566 if (x != *loc)
567 {
568 *loc = x;
569 replace_pseudos_in (loc, mem_mode, usage);
570 return;
571 }
572
573 if (reg_equiv_constant (regno))
574 *loc = reg_equiv_constant (regno);
575 else if (reg_equiv_invariant (regno))
576 *loc = reg_equiv_invariant (regno);
577 else if (reg_equiv_mem (regno))
578 *loc = reg_equiv_mem (regno);
579 else if (reg_equiv_address (regno))
580 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
581 else
582 {
583 gcc_assert (!REG_P (regno_reg_rtx[regno])
584 || REGNO (regno_reg_rtx[regno]) != regno);
585 *loc = regno_reg_rtx[regno];
586 }
587
588 return;
589 }
590 else if (code == MEM)
591 {
592 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
593 return;
594 }
595
596 /* Process each of our operands recursively. */
597 fmt = GET_RTX_FORMAT (code);
598 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
599 if (*fmt == 'e')
600 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
601 else if (*fmt == 'E')
602 for (j = 0; j < XVECLEN (x, i); j++)
603 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
604 }
605
606 /* Determine if the current function has an exception receiver block
607 that reaches the exit block via non-exceptional edges */
608
609 static bool
610 has_nonexceptional_receiver (void)
611 {
612 edge e;
613 edge_iterator ei;
614 basic_block *tos, *worklist, bb;
615
616 /* If we're not optimizing, then just err on the safe side. */
617 if (!optimize)
618 return true;
619
620 /* First determine which blocks can reach exit via normal paths. */
621 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
622
623 FOR_EACH_BB_FN (bb, cfun)
624 bb->flags &= ~BB_REACHABLE;
625
626 /* Place the exit block on our worklist. */
627 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
628 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
629
630 /* Iterate: find everything reachable from what we've already seen. */
631 while (tos != worklist)
632 {
633 bb = *--tos;
634
635 FOR_EACH_EDGE (e, ei, bb->preds)
636 if (!(e->flags & EDGE_ABNORMAL))
637 {
638 basic_block src = e->src;
639
640 if (!(src->flags & BB_REACHABLE))
641 {
642 src->flags |= BB_REACHABLE;
643 *tos++ = src;
644 }
645 }
646 }
647 free (worklist);
648
649 /* Now see if there's a reachable block with an exceptional incoming
650 edge. */
651 FOR_EACH_BB_FN (bb, cfun)
652 if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
653 return true;
654
655 /* No exceptional block reached exit unexceptionally. */
656 return false;
657 }
658
659 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
660 zero elements) to MAX_REG_NUM elements.
661
662 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
663 void
664 grow_reg_equivs (void)
665 {
666 int old_size = vec_safe_length (reg_equivs);
667 int max_regno = max_reg_num ();
668 int i;
669 reg_equivs_t ze;
670
671 memset (&ze, 0, sizeof (reg_equivs_t));
672 vec_safe_reserve (reg_equivs, max_regno);
673 for (i = old_size; i < max_regno; i++)
674 reg_equivs->quick_insert (i, ze);
675 }
676
677 \f
678 /* Global variables used by reload and its subroutines. */
679
680 /* The current basic block while in calculate_elim_costs_all_insns. */
681 static basic_block elim_bb;
682
683 /* Set during calculate_needs if an insn needs register elimination. */
684 static int something_needs_elimination;
685 /* Set during calculate_needs if an insn needs an operand changed. */
686 static int something_needs_operands_changed;
687 /* Set by alter_regs if we spilled a register to the stack. */
688 static bool something_was_spilled;
689
690 /* Nonzero means we couldn't get enough spill regs. */
691 static int failure;
692
693 /* Temporary array of pseudo-register number. */
694 static int *temp_pseudo_reg_arr;
695
696 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
697 If that insn didn't set the register (i.e., it copied the register to
698 memory), just delete that insn instead of the equivalencing insn plus
699 anything now dead. If we call delete_dead_insn on that insn, we may
700 delete the insn that actually sets the register if the register dies
701 there and that is incorrect. */
702 static void
703 remove_init_insns ()
704 {
705 for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
706 {
707 if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
708 {
709 rtx list;
710 for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
711 {
712 rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
713
714 /* If we already deleted the insn or if it may trap, we can't
715 delete it. The latter case shouldn't happen, but can
716 if an insn has a variable address, gets a REG_EH_REGION
717 note added to it, and then gets converted into a load
718 from a constant address. */
719 if (NOTE_P (equiv_insn)
720 || can_throw_internal (equiv_insn))
721 ;
722 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
723 delete_dead_insn (equiv_insn);
724 else
725 SET_INSN_DELETED (equiv_insn);
726 }
727 }
728 }
729 }
730
731 /* Return true if remove_init_insns will delete INSN. */
732 static bool
733 will_delete_init_insn_p (rtx_insn *insn)
734 {
735 rtx set = single_set (insn);
736 if (!set || !REG_P (SET_DEST (set)))
737 return false;
738 unsigned regno = REGNO (SET_DEST (set));
739
740 if (can_throw_internal (insn))
741 return false;
742
743 if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
744 return false;
745
746 for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
747 {
748 rtx equiv_insn = XEXP (list, 0);
749 if (equiv_insn == insn)
750 return true;
751 }
752 return false;
753 }
754
755 /* Main entry point for the reload pass.
756
757 FIRST is the first insn of the function being compiled.
758
759 GLOBAL nonzero means we were called from global_alloc
760 and should attempt to reallocate any pseudoregs that we
761 displace from hard regs we will use for reloads.
762 If GLOBAL is zero, we do not have enough information to do that,
763 so any pseudo reg that is spilled must go to the stack.
764
765 Return value is TRUE if reload likely left dead insns in the
766 stream and a DCE pass should be run to elimiante them. Else the
767 return value is FALSE. */
768
769 bool
770 reload (rtx_insn *first, int global)
771 {
772 int i, n;
773 rtx_insn *insn;
774 struct elim_table *ep;
775 basic_block bb;
776 bool inserted;
777
778 /* Make sure even insns with volatile mem refs are recognizable. */
779 init_recog ();
780
781 failure = 0;
782
783 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
784
785 /* Make sure that the last insn in the chain
786 is not something that needs reloading. */
787 emit_note (NOTE_INSN_DELETED);
788
789 /* Enable find_equiv_reg to distinguish insns made by reload. */
790 reload_first_uid = get_max_uid ();
791
792 #ifdef SECONDARY_MEMORY_NEEDED
793 /* Initialize the secondary memory table. */
794 clear_secondary_mem ();
795 #endif
796
797 /* We don't have a stack slot for any spill reg yet. */
798 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
799 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
800
801 /* Initialize the save area information for caller-save, in case some
802 are needed. */
803 init_save_areas ();
804
805 /* Compute which hard registers are now in use
806 as homes for pseudo registers.
807 This is done here rather than (eg) in global_alloc
808 because this point is reached even if not optimizing. */
809 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
810 mark_home_live (i);
811
812 /* A function that has a nonlocal label that can reach the exit
813 block via non-exceptional paths must save all call-saved
814 registers. */
815 if (cfun->has_nonlocal_label
816 && has_nonexceptional_receiver ())
817 crtl->saves_all_registers = 1;
818
819 if (crtl->saves_all_registers)
820 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
821 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
822 df_set_regs_ever_live (i, true);
823
824 /* Find all the pseudo registers that didn't get hard regs
825 but do have known equivalent constants or memory slots.
826 These include parameters (known equivalent to parameter slots)
827 and cse'd or loop-moved constant memory addresses.
828
829 Record constant equivalents in reg_equiv_constant
830 so they will be substituted by find_reloads.
831 Record memory equivalents in reg_mem_equiv so they can
832 be substituted eventually by altering the REG-rtx's. */
833
834 grow_reg_equivs ();
835 reg_old_renumber = XCNEWVEC (short, max_regno);
836 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
837 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
838 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
839
840 CLEAR_HARD_REG_SET (bad_spill_regs_global);
841
842 init_eliminable_invariants (first, true);
843 init_elim_table ();
844
845 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
846 stack slots to the pseudos that lack hard regs or equivalents.
847 Do not touch virtual registers. */
848
849 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
850 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
851 temp_pseudo_reg_arr[n++] = i;
852
853 if (ira_conflicts_p)
854 /* Ask IRA to order pseudo-registers for better stack slot
855 sharing. */
856 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
857
858 for (i = 0; i < n; i++)
859 alter_reg (temp_pseudo_reg_arr[i], -1, false);
860
861 /* If we have some registers we think can be eliminated, scan all insns to
862 see if there is an insn that sets one of these registers to something
863 other than itself plus a constant. If so, the register cannot be
864 eliminated. Doing this scan here eliminates an extra pass through the
865 main reload loop in the most common case where register elimination
866 cannot be done. */
867 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
868 if (INSN_P (insn))
869 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
870
871 maybe_fix_stack_asms ();
872
873 insns_need_reload = 0;
874 something_needs_elimination = 0;
875
876 /* Initialize to -1, which means take the first spill register. */
877 last_spill_reg = -1;
878
879 /* Spill any hard regs that we know we can't eliminate. */
880 CLEAR_HARD_REG_SET (used_spill_regs);
881 /* There can be multiple ways to eliminate a register;
882 they should be listed adjacently.
883 Elimination for any register fails only if all possible ways fail. */
884 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
885 {
886 int from = ep->from;
887 int can_eliminate = 0;
888 do
889 {
890 can_eliminate |= ep->can_eliminate;
891 ep++;
892 }
893 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
894 if (! can_eliminate)
895 spill_hard_reg (from, 1);
896 }
897
898 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER && frame_pointer_needed)
899 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
900
901 finish_spills (global);
902
903 /* From now on, we may need to generate moves differently. We may also
904 allow modifications of insns which cause them to not be recognized.
905 Any such modifications will be cleaned up during reload itself. */
906 reload_in_progress = 1;
907
908 /* This loop scans the entire function each go-round
909 and repeats until one repetition spills no additional hard regs. */
910 for (;;)
911 {
912 int something_changed;
913 int did_spill;
914 HOST_WIDE_INT starting_frame_size;
915
916 starting_frame_size = get_frame_size ();
917 something_was_spilled = false;
918
919 set_initial_elim_offsets ();
920 set_initial_label_offsets ();
921
922 /* For each pseudo register that has an equivalent location defined,
923 try to eliminate any eliminable registers (such as the frame pointer)
924 assuming initial offsets for the replacement register, which
925 is the normal case.
926
927 If the resulting location is directly addressable, substitute
928 the MEM we just got directly for the old REG.
929
930 If it is not addressable but is a constant or the sum of a hard reg
931 and constant, it is probably not addressable because the constant is
932 out of range, in that case record the address; we will generate
933 hairy code to compute the address in a register each time it is
934 needed. Similarly if it is a hard register, but one that is not
935 valid as an address register.
936
937 If the location is not addressable, but does not have one of the
938 above forms, assign a stack slot. We have to do this to avoid the
939 potential of producing lots of reloads if, e.g., a location involves
940 a pseudo that didn't get a hard register and has an equivalent memory
941 location that also involves a pseudo that didn't get a hard register.
942
943 Perhaps at some point we will improve reload_when_needed handling
944 so this problem goes away. But that's very hairy. */
945
946 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
947 if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
948 {
949 rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
950 NULL_RTX);
951
952 if (strict_memory_address_addr_space_p
953 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
954 MEM_ADDR_SPACE (x)))
955 reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
956 else if (CONSTANT_P (XEXP (x, 0))
957 || (REG_P (XEXP (x, 0))
958 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
959 || (GET_CODE (XEXP (x, 0)) == PLUS
960 && REG_P (XEXP (XEXP (x, 0), 0))
961 && (REGNO (XEXP (XEXP (x, 0), 0))
962 < FIRST_PSEUDO_REGISTER)
963 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
964 reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
965 else
966 {
967 /* Make a new stack slot. Then indicate that something
968 changed so we go back and recompute offsets for
969 eliminable registers because the allocation of memory
970 below might change some offset. reg_equiv_{mem,address}
971 will be set up for this pseudo on the next pass around
972 the loop. */
973 reg_equiv_memory_loc (i) = 0;
974 reg_equiv_init (i) = 0;
975 alter_reg (i, -1, true);
976 }
977 }
978
979 if (caller_save_needed)
980 setup_save_areas ();
981
982 if (starting_frame_size && crtl->stack_alignment_needed)
983 {
984 /* If we have a stack frame, we must align it now. The
985 stack size may be a part of the offset computation for
986 register elimination. So if this changes the stack size,
987 then repeat the elimination bookkeeping. We don't
988 realign when there is no stack, as that will cause a
989 stack frame when none is needed should
990 STARTING_FRAME_OFFSET not be already aligned to
991 STACK_BOUNDARY. */
992 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
993 }
994 /* If we allocated another stack slot, redo elimination bookkeeping. */
995 if (something_was_spilled || starting_frame_size != get_frame_size ())
996 {
997 update_eliminables_and_spill ();
998 continue;
999 }
1000
1001 if (caller_save_needed)
1002 {
1003 save_call_clobbered_regs ();
1004 /* That might have allocated new insn_chain structures. */
1005 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1006 }
1007
1008 calculate_needs_all_insns (global);
1009
1010 if (! ira_conflicts_p)
1011 /* Don't do it for IRA. We need this info because we don't
1012 change live_throughout and dead_or_set for chains when IRA
1013 is used. */
1014 CLEAR_REG_SET (&spilled_pseudos);
1015
1016 did_spill = 0;
1017
1018 something_changed = 0;
1019
1020 /* If we allocated any new memory locations, make another pass
1021 since it might have changed elimination offsets. */
1022 if (something_was_spilled || starting_frame_size != get_frame_size ())
1023 something_changed = 1;
1024
1025 /* Even if the frame size remained the same, we might still have
1026 changed elimination offsets, e.g. if find_reloads called
1027 force_const_mem requiring the back end to allocate a constant
1028 pool base register that needs to be saved on the stack. */
1029 else if (!verify_initial_elim_offsets ())
1030 something_changed = 1;
1031
1032 if (update_eliminables_and_spill ())
1033 {
1034 did_spill = 1;
1035 something_changed = 1;
1036 }
1037
1038 select_reload_regs ();
1039 if (failure)
1040 goto failed;
1041
1042 if (insns_need_reload != 0 || did_spill)
1043 something_changed |= finish_spills (global);
1044
1045 if (! something_changed)
1046 break;
1047
1048 if (caller_save_needed)
1049 delete_caller_save_insns ();
1050
1051 obstack_free (&reload_obstack, reload_firstobj);
1052 }
1053
1054 /* If global-alloc was run, notify it of any register eliminations we have
1055 done. */
1056 if (global)
1057 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1058 if (ep->can_eliminate)
1059 mark_elimination (ep->from, ep->to);
1060
1061 remove_init_insns ();
1062
1063 /* Use the reload registers where necessary
1064 by generating move instructions to move the must-be-register
1065 values into or out of the reload registers. */
1066
1067 if (insns_need_reload != 0 || something_needs_elimination
1068 || something_needs_operands_changed)
1069 {
1070 HOST_WIDE_INT old_frame_size = get_frame_size ();
1071
1072 reload_as_needed (global);
1073
1074 gcc_assert (old_frame_size == get_frame_size ());
1075
1076 gcc_assert (verify_initial_elim_offsets ());
1077 }
1078
1079 /* If we were able to eliminate the frame pointer, show that it is no
1080 longer live at the start of any basic block. If it ls live by
1081 virtue of being in a pseudo, that pseudo will be marked live
1082 and hence the frame pointer will be known to be live via that
1083 pseudo. */
1084
1085 if (! frame_pointer_needed)
1086 FOR_EACH_BB_FN (bb, cfun)
1087 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1088
1089 /* Come here (with failure set nonzero) if we can't get enough spill
1090 regs. */
1091 failed:
1092
1093 CLEAR_REG_SET (&changed_allocation_pseudos);
1094 CLEAR_REG_SET (&spilled_pseudos);
1095 reload_in_progress = 0;
1096
1097 /* Now eliminate all pseudo regs by modifying them into
1098 their equivalent memory references.
1099 The REG-rtx's for the pseudos are modified in place,
1100 so all insns that used to refer to them now refer to memory.
1101
1102 For a reg that has a reg_equiv_address, all those insns
1103 were changed by reloading so that no insns refer to it any longer;
1104 but the DECL_RTL of a variable decl may refer to it,
1105 and if so this causes the debugging info to mention the variable. */
1106
1107 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1108 {
1109 rtx addr = 0;
1110
1111 if (reg_equiv_mem (i))
1112 addr = XEXP (reg_equiv_mem (i), 0);
1113
1114 if (reg_equiv_address (i))
1115 addr = reg_equiv_address (i);
1116
1117 if (addr)
1118 {
1119 if (reg_renumber[i] < 0)
1120 {
1121 rtx reg = regno_reg_rtx[i];
1122
1123 REG_USERVAR_P (reg) = 0;
1124 PUT_CODE (reg, MEM);
1125 XEXP (reg, 0) = addr;
1126 if (reg_equiv_memory_loc (i))
1127 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1128 else
1129 MEM_ATTRS (reg) = 0;
1130 MEM_NOTRAP_P (reg) = 1;
1131 }
1132 else if (reg_equiv_mem (i))
1133 XEXP (reg_equiv_mem (i), 0) = addr;
1134 }
1135
1136 /* We don't want complex addressing modes in debug insns
1137 if simpler ones will do, so delegitimize equivalences
1138 in debug insns. */
1139 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1140 {
1141 rtx reg = regno_reg_rtx[i];
1142 rtx equiv = 0;
1143 df_ref use, next;
1144
1145 if (reg_equiv_constant (i))
1146 equiv = reg_equiv_constant (i);
1147 else if (reg_equiv_invariant (i))
1148 equiv = reg_equiv_invariant (i);
1149 else if (reg && MEM_P (reg))
1150 equiv = targetm.delegitimize_address (reg);
1151 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1152 equiv = reg;
1153
1154 if (equiv == reg)
1155 continue;
1156
1157 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1158 {
1159 insn = DF_REF_INSN (use);
1160
1161 /* Make sure the next ref is for a different instruction,
1162 so that we're not affected by the rescan. */
1163 next = DF_REF_NEXT_REG (use);
1164 while (next && DF_REF_INSN (next) == insn)
1165 next = DF_REF_NEXT_REG (next);
1166
1167 if (DEBUG_INSN_P (insn))
1168 {
1169 if (!equiv)
1170 {
1171 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1172 df_insn_rescan_debug_internal (insn);
1173 }
1174 else
1175 INSN_VAR_LOCATION_LOC (insn)
1176 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1177 reg, equiv);
1178 }
1179 }
1180 }
1181 }
1182
1183 /* We must set reload_completed now since the cleanup_subreg_operands call
1184 below will re-recognize each insn and reload may have generated insns
1185 which are only valid during and after reload. */
1186 reload_completed = 1;
1187
1188 /* Make a pass over all the insns and delete all USEs which we inserted
1189 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1190 notes. Delete all CLOBBER insns, except those that refer to the return
1191 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1192 from misarranging variable-array code, and simplify (subreg (reg))
1193 operands. Strip and regenerate REG_INC notes that may have been moved
1194 around. */
1195
1196 for (insn = first; insn; insn = NEXT_INSN (insn))
1197 if (INSN_P (insn))
1198 {
1199 rtx *pnote;
1200
1201 if (CALL_P (insn))
1202 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1203 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1204
1205 if ((GET_CODE (PATTERN (insn)) == USE
1206 /* We mark with QImode USEs introduced by reload itself. */
1207 && (GET_MODE (insn) == QImode
1208 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1209 || (GET_CODE (PATTERN (insn)) == CLOBBER
1210 && (!MEM_P (XEXP (PATTERN (insn), 0))
1211 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1212 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1213 && XEXP (XEXP (PATTERN (insn), 0), 0)
1214 != stack_pointer_rtx))
1215 && (!REG_P (XEXP (PATTERN (insn), 0))
1216 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1217 {
1218 delete_insn (insn);
1219 continue;
1220 }
1221
1222 /* Some CLOBBERs may survive until here and still reference unassigned
1223 pseudos with const equivalent, which may in turn cause ICE in later
1224 passes if the reference remains in place. */
1225 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1226 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1227 VOIDmode, PATTERN (insn));
1228
1229 /* Discard obvious no-ops, even without -O. This optimization
1230 is fast and doesn't interfere with debugging. */
1231 if (NONJUMP_INSN_P (insn)
1232 && GET_CODE (PATTERN (insn)) == SET
1233 && REG_P (SET_SRC (PATTERN (insn)))
1234 && REG_P (SET_DEST (PATTERN (insn)))
1235 && (REGNO (SET_SRC (PATTERN (insn)))
1236 == REGNO (SET_DEST (PATTERN (insn)))))
1237 {
1238 delete_insn (insn);
1239 continue;
1240 }
1241
1242 pnote = &REG_NOTES (insn);
1243 while (*pnote != 0)
1244 {
1245 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1246 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1247 || REG_NOTE_KIND (*pnote) == REG_INC)
1248 *pnote = XEXP (*pnote, 1);
1249 else
1250 pnote = &XEXP (*pnote, 1);
1251 }
1252
1253 #if AUTO_INC_DEC
1254 add_auto_inc_notes (insn, PATTERN (insn));
1255 #endif
1256
1257 /* Simplify (subreg (reg)) if it appears as an operand. */
1258 cleanup_subreg_operands (insn);
1259
1260 /* Clean up invalid ASMs so that they don't confuse later passes.
1261 See PR 21299. */
1262 if (asm_noperands (PATTERN (insn)) >= 0)
1263 {
1264 extract_insn (insn);
1265 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1266 {
1267 error_for_asm (insn,
1268 "%<asm%> operand has impossible constraints");
1269 delete_insn (insn);
1270 continue;
1271 }
1272 }
1273 }
1274
1275 /* If we are doing generic stack checking, give a warning if this
1276 function's frame size is larger than we expect. */
1277 if (flag_stack_check == GENERIC_STACK_CHECK)
1278 {
1279 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1280 static int verbose_warned = 0;
1281
1282 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1283 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1284 size += UNITS_PER_WORD;
1285
1286 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1287 {
1288 warning (0, "frame size too large for reliable stack checking");
1289 if (! verbose_warned)
1290 {
1291 warning (0, "try reducing the number of local variables");
1292 verbose_warned = 1;
1293 }
1294 }
1295 }
1296
1297 free (temp_pseudo_reg_arr);
1298
1299 /* Indicate that we no longer have known memory locations or constants. */
1300 free_reg_equiv ();
1301
1302 free (reg_max_ref_width);
1303 free (reg_old_renumber);
1304 free (pseudo_previous_regs);
1305 free (pseudo_forbidden_regs);
1306
1307 CLEAR_HARD_REG_SET (used_spill_regs);
1308 for (i = 0; i < n_spills; i++)
1309 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1310
1311 /* Free all the insn_chain structures at once. */
1312 obstack_free (&reload_obstack, reload_startobj);
1313 unused_insn_chains = 0;
1314
1315 inserted = fixup_abnormal_edges ();
1316
1317 /* We've possibly turned single trapping insn into multiple ones. */
1318 if (cfun->can_throw_non_call_exceptions)
1319 {
1320 sbitmap blocks;
1321 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
1322 bitmap_ones (blocks);
1323 find_many_sub_basic_blocks (blocks);
1324 sbitmap_free (blocks);
1325 }
1326
1327 if (inserted)
1328 commit_edge_insertions ();
1329
1330 /* Replacing pseudos with their memory equivalents might have
1331 created shared rtx. Subsequent passes would get confused
1332 by this, so unshare everything here. */
1333 unshare_all_rtl_again (first);
1334
1335 #ifdef STACK_BOUNDARY
1336 /* init_emit has set the alignment of the hard frame pointer
1337 to STACK_BOUNDARY. It is very likely no longer valid if
1338 the hard frame pointer was used for register allocation. */
1339 if (!frame_pointer_needed)
1340 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1341 #endif
1342
1343 substitute_stack.release ();
1344
1345 gcc_assert (bitmap_empty_p (&spilled_pseudos));
1346
1347 reload_completed = !failure;
1348
1349 return need_dce;
1350 }
1351
1352 /* Yet another special case. Unfortunately, reg-stack forces people to
1353 write incorrect clobbers in asm statements. These clobbers must not
1354 cause the register to appear in bad_spill_regs, otherwise we'll call
1355 fatal_insn later. We clear the corresponding regnos in the live
1356 register sets to avoid this.
1357 The whole thing is rather sick, I'm afraid. */
1358
1359 static void
1360 maybe_fix_stack_asms (void)
1361 {
1362 #ifdef STACK_REGS
1363 const char *constraints[MAX_RECOG_OPERANDS];
1364 machine_mode operand_mode[MAX_RECOG_OPERANDS];
1365 struct insn_chain *chain;
1366
1367 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1368 {
1369 int i, noperands;
1370 HARD_REG_SET clobbered, allowed;
1371 rtx pat;
1372
1373 if (! INSN_P (chain->insn)
1374 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1375 continue;
1376 pat = PATTERN (chain->insn);
1377 if (GET_CODE (pat) != PARALLEL)
1378 continue;
1379
1380 CLEAR_HARD_REG_SET (clobbered);
1381 CLEAR_HARD_REG_SET (allowed);
1382
1383 /* First, make a mask of all stack regs that are clobbered. */
1384 for (i = 0; i < XVECLEN (pat, 0); i++)
1385 {
1386 rtx t = XVECEXP (pat, 0, i);
1387 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1388 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1389 }
1390
1391 /* Get the operand values and constraints out of the insn. */
1392 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1393 constraints, operand_mode, NULL);
1394
1395 /* For every operand, see what registers are allowed. */
1396 for (i = 0; i < noperands; i++)
1397 {
1398 const char *p = constraints[i];
1399 /* For every alternative, we compute the class of registers allowed
1400 for reloading in CLS, and merge its contents into the reg set
1401 ALLOWED. */
1402 int cls = (int) NO_REGS;
1403
1404 for (;;)
1405 {
1406 char c = *p;
1407
1408 if (c == '\0' || c == ',' || c == '#')
1409 {
1410 /* End of one alternative - mark the regs in the current
1411 class, and reset the class. */
1412 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1413 cls = NO_REGS;
1414 p++;
1415 if (c == '#')
1416 do {
1417 c = *p++;
1418 } while (c != '\0' && c != ',');
1419 if (c == '\0')
1420 break;
1421 continue;
1422 }
1423
1424 switch (c)
1425 {
1426 case 'g':
1427 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1428 break;
1429
1430 default:
1431 enum constraint_num cn = lookup_constraint (p);
1432 if (insn_extra_address_constraint (cn))
1433 cls = (int) reg_class_subunion[cls]
1434 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1435 ADDRESS, SCRATCH)];
1436 else
1437 cls = (int) reg_class_subunion[cls]
1438 [reg_class_for_constraint (cn)];
1439 break;
1440 }
1441 p += CONSTRAINT_LEN (c, p);
1442 }
1443 }
1444 /* Those of the registers which are clobbered, but allowed by the
1445 constraints, must be usable as reload registers. So clear them
1446 out of the life information. */
1447 AND_HARD_REG_SET (allowed, clobbered);
1448 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1449 if (TEST_HARD_REG_BIT (allowed, i))
1450 {
1451 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1452 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1453 }
1454 }
1455
1456 #endif
1457 }
1458 \f
1459 /* Copy the global variables n_reloads and rld into the corresponding elts
1460 of CHAIN. */
1461 static void
1462 copy_reloads (struct insn_chain *chain)
1463 {
1464 chain->n_reloads = n_reloads;
1465 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1466 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1467 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1468 }
1469
1470 /* Walk the chain of insns, and determine for each whether it needs reloads
1471 and/or eliminations. Build the corresponding insns_need_reload list, and
1472 set something_needs_elimination as appropriate. */
1473 static void
1474 calculate_needs_all_insns (int global)
1475 {
1476 struct insn_chain **pprev_reload = &insns_need_reload;
1477 struct insn_chain *chain, *next = 0;
1478
1479 something_needs_elimination = 0;
1480
1481 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1482 for (chain = reload_insn_chain; chain != 0; chain = next)
1483 {
1484 rtx_insn *insn = chain->insn;
1485
1486 next = chain->next;
1487
1488 /* Clear out the shortcuts. */
1489 chain->n_reloads = 0;
1490 chain->need_elim = 0;
1491 chain->need_reload = 0;
1492 chain->need_operand_change = 0;
1493
1494 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1495 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1496 what effects this has on the known offsets at labels. */
1497
1498 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1499 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1500 set_label_offsets (insn, insn, 0);
1501
1502 if (INSN_P (insn))
1503 {
1504 rtx old_body = PATTERN (insn);
1505 int old_code = INSN_CODE (insn);
1506 rtx old_notes = REG_NOTES (insn);
1507 int did_elimination = 0;
1508 int operands_changed = 0;
1509
1510 /* Skip insns that only set an equivalence. */
1511 if (will_delete_init_insn_p (insn))
1512 continue;
1513
1514 /* If needed, eliminate any eliminable registers. */
1515 if (num_eliminable || num_eliminable_invariants)
1516 did_elimination = eliminate_regs_in_insn (insn, 0);
1517
1518 /* Analyze the instruction. */
1519 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1520 global, spill_reg_order);
1521
1522 /* If a no-op set needs more than one reload, this is likely
1523 to be something that needs input address reloads. We
1524 can't get rid of this cleanly later, and it is of no use
1525 anyway, so discard it now.
1526 We only do this when expensive_optimizations is enabled,
1527 since this complements reload inheritance / output
1528 reload deletion, and it can make debugging harder. */
1529 if (flag_expensive_optimizations && n_reloads > 1)
1530 {
1531 rtx set = single_set (insn);
1532 if (set
1533 &&
1534 ((SET_SRC (set) == SET_DEST (set)
1535 && REG_P (SET_SRC (set))
1536 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1537 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1538 && reg_renumber[REGNO (SET_SRC (set))] < 0
1539 && reg_renumber[REGNO (SET_DEST (set))] < 0
1540 && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1541 && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1542 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1543 reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1544 {
1545 if (ira_conflicts_p)
1546 /* Inform IRA about the insn deletion. */
1547 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1548 REGNO (SET_SRC (set)));
1549 delete_insn (insn);
1550 /* Delete it from the reload chain. */
1551 if (chain->prev)
1552 chain->prev->next = next;
1553 else
1554 reload_insn_chain = next;
1555 if (next)
1556 next->prev = chain->prev;
1557 chain->next = unused_insn_chains;
1558 unused_insn_chains = chain;
1559 continue;
1560 }
1561 }
1562 if (num_eliminable)
1563 update_eliminable_offsets ();
1564
1565 /* Remember for later shortcuts which insns had any reloads or
1566 register eliminations. */
1567 chain->need_elim = did_elimination;
1568 chain->need_reload = n_reloads > 0;
1569 chain->need_operand_change = operands_changed;
1570
1571 /* Discard any register replacements done. */
1572 if (did_elimination)
1573 {
1574 obstack_free (&reload_obstack, reload_insn_firstobj);
1575 PATTERN (insn) = old_body;
1576 INSN_CODE (insn) = old_code;
1577 REG_NOTES (insn) = old_notes;
1578 something_needs_elimination = 1;
1579 }
1580
1581 something_needs_operands_changed |= operands_changed;
1582
1583 if (n_reloads != 0)
1584 {
1585 copy_reloads (chain);
1586 *pprev_reload = chain;
1587 pprev_reload = &chain->next_need_reload;
1588 }
1589 }
1590 }
1591 *pprev_reload = 0;
1592 }
1593 \f
1594 /* This function is called from the register allocator to set up estimates
1595 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1596 an invariant. The structure is similar to calculate_needs_all_insns. */
1597
1598 void
1599 calculate_elim_costs_all_insns (void)
1600 {
1601 int *reg_equiv_init_cost;
1602 basic_block bb;
1603 int i;
1604
1605 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1606 init_elim_table ();
1607 init_eliminable_invariants (get_insns (), false);
1608
1609 set_initial_elim_offsets ();
1610 set_initial_label_offsets ();
1611
1612 FOR_EACH_BB_FN (bb, cfun)
1613 {
1614 rtx_insn *insn;
1615 elim_bb = bb;
1616
1617 FOR_BB_INSNS (bb, insn)
1618 {
1619 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1620 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1621 what effects this has on the known offsets at labels. */
1622
1623 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1624 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1625 set_label_offsets (insn, insn, 0);
1626
1627 if (INSN_P (insn))
1628 {
1629 rtx set = single_set (insn);
1630
1631 /* Skip insns that only set an equivalence. */
1632 if (set && REG_P (SET_DEST (set))
1633 && reg_renumber[REGNO (SET_DEST (set))] < 0
1634 && (reg_equiv_constant (REGNO (SET_DEST (set)))
1635 || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1636 {
1637 unsigned regno = REGNO (SET_DEST (set));
1638 rtx_insn_list *init = reg_equiv_init (regno);
1639 if (init)
1640 {
1641 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1642 false, true);
1643 machine_mode mode = GET_MODE (SET_DEST (set));
1644 int cost = set_src_cost (t, mode,
1645 optimize_bb_for_speed_p (bb));
1646 int freq = REG_FREQ_FROM_BB (bb);
1647
1648 reg_equiv_init_cost[regno] = cost * freq;
1649 continue;
1650 }
1651 }
1652 /* If needed, eliminate any eliminable registers. */
1653 if (num_eliminable || num_eliminable_invariants)
1654 elimination_costs_in_insn (insn);
1655
1656 if (num_eliminable)
1657 update_eliminable_offsets ();
1658 }
1659 }
1660 }
1661 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1662 {
1663 if (reg_equiv_invariant (i))
1664 {
1665 if (reg_equiv_init (i))
1666 {
1667 int cost = reg_equiv_init_cost[i];
1668 if (dump_file)
1669 fprintf (dump_file,
1670 "Reg %d has equivalence, initial gains %d\n", i, cost);
1671 if (cost != 0)
1672 ira_adjust_equiv_reg_cost (i, cost);
1673 }
1674 else
1675 {
1676 if (dump_file)
1677 fprintf (dump_file,
1678 "Reg %d had equivalence, but can't be eliminated\n",
1679 i);
1680 ira_adjust_equiv_reg_cost (i, 0);
1681 }
1682 }
1683 }
1684
1685 free (reg_equiv_init_cost);
1686 free (offsets_known_at);
1687 free (offsets_at);
1688 offsets_at = NULL;
1689 offsets_known_at = NULL;
1690 }
1691 \f
1692 /* Comparison function for qsort to decide which of two reloads
1693 should be handled first. *P1 and *P2 are the reload numbers. */
1694
1695 static int
1696 reload_reg_class_lower (const void *r1p, const void *r2p)
1697 {
1698 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1699 int t;
1700
1701 /* Consider required reloads before optional ones. */
1702 t = rld[r1].optional - rld[r2].optional;
1703 if (t != 0)
1704 return t;
1705
1706 /* Count all solitary classes before non-solitary ones. */
1707 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1708 - (reg_class_size[(int) rld[r1].rclass] == 1));
1709 if (t != 0)
1710 return t;
1711
1712 /* Aside from solitaires, consider all multi-reg groups first. */
1713 t = rld[r2].nregs - rld[r1].nregs;
1714 if (t != 0)
1715 return t;
1716
1717 /* Consider reloads in order of increasing reg-class number. */
1718 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1719 if (t != 0)
1720 return t;
1721
1722 /* If reloads are equally urgent, sort by reload number,
1723 so that the results of qsort leave nothing to chance. */
1724 return r1 - r2;
1725 }
1726 \f
1727 /* The cost of spilling each hard reg. */
1728 static int spill_cost[FIRST_PSEUDO_REGISTER];
1729
1730 /* When spilling multiple hard registers, we use SPILL_COST for the first
1731 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1732 only the first hard reg for a multi-reg pseudo. */
1733 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1734
1735 /* Map of hard regno to pseudo regno currently occupying the hard
1736 reg. */
1737 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1738
1739 /* Update the spill cost arrays, considering that pseudo REG is live. */
1740
1741 static void
1742 count_pseudo (int reg)
1743 {
1744 int freq = REG_FREQ (reg);
1745 int r = reg_renumber[reg];
1746 int nregs;
1747
1748 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1749 if (ira_conflicts_p && r < 0)
1750 return;
1751
1752 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1753 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1754 return;
1755
1756 SET_REGNO_REG_SET (&pseudos_counted, reg);
1757
1758 gcc_assert (r >= 0);
1759
1760 spill_add_cost[r] += freq;
1761 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1762 while (nregs-- > 0)
1763 {
1764 hard_regno_to_pseudo_regno[r + nregs] = reg;
1765 spill_cost[r + nregs] += freq;
1766 }
1767 }
1768
1769 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1770 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1771
1772 static void
1773 order_regs_for_reload (struct insn_chain *chain)
1774 {
1775 unsigned i;
1776 HARD_REG_SET used_by_pseudos;
1777 HARD_REG_SET used_by_pseudos2;
1778 reg_set_iterator rsi;
1779
1780 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1781
1782 memset (spill_cost, 0, sizeof spill_cost);
1783 memset (spill_add_cost, 0, sizeof spill_add_cost);
1784 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1785 hard_regno_to_pseudo_regno[i] = -1;
1786
1787 /* Count number of uses of each hard reg by pseudo regs allocated to it
1788 and then order them by decreasing use. First exclude hard registers
1789 that are live in or across this insn. */
1790
1791 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1792 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1793 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1794 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1795
1796 /* Now find out which pseudos are allocated to it, and update
1797 hard_reg_n_uses. */
1798 CLEAR_REG_SET (&pseudos_counted);
1799
1800 EXECUTE_IF_SET_IN_REG_SET
1801 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1802 {
1803 count_pseudo (i);
1804 }
1805 EXECUTE_IF_SET_IN_REG_SET
1806 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1807 {
1808 count_pseudo (i);
1809 }
1810 CLEAR_REG_SET (&pseudos_counted);
1811 }
1812 \f
1813 /* Vector of reload-numbers showing the order in which the reloads should
1814 be processed. */
1815 static short reload_order[MAX_RELOADS];
1816
1817 /* This is used to keep track of the spill regs used in one insn. */
1818 static HARD_REG_SET used_spill_regs_local;
1819
1820 /* We decided to spill hard register SPILLED, which has a size of
1821 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1822 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1823 update SPILL_COST/SPILL_ADD_COST. */
1824
1825 static void
1826 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1827 {
1828 int freq = REG_FREQ (reg);
1829 int r = reg_renumber[reg];
1830 int nregs;
1831
1832 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1833 if (ira_conflicts_p && r < 0)
1834 return;
1835
1836 gcc_assert (r >= 0);
1837
1838 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1839
1840 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1841 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1842 return;
1843
1844 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1845
1846 spill_add_cost[r] -= freq;
1847 while (nregs-- > 0)
1848 {
1849 hard_regno_to_pseudo_regno[r + nregs] = -1;
1850 spill_cost[r + nregs] -= freq;
1851 }
1852 }
1853
1854 /* Find reload register to use for reload number ORDER. */
1855
1856 static int
1857 find_reg (struct insn_chain *chain, int order)
1858 {
1859 int rnum = reload_order[order];
1860 struct reload *rl = rld + rnum;
1861 int best_cost = INT_MAX;
1862 int best_reg = -1;
1863 unsigned int i, j, n;
1864 int k;
1865 HARD_REG_SET not_usable;
1866 HARD_REG_SET used_by_other_reload;
1867 reg_set_iterator rsi;
1868 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1869 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1870
1871 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1872 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1873 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1874
1875 CLEAR_HARD_REG_SET (used_by_other_reload);
1876 for (k = 0; k < order; k++)
1877 {
1878 int other = reload_order[k];
1879
1880 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1881 for (j = 0; j < rld[other].nregs; j++)
1882 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1883 }
1884
1885 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1886 {
1887 #ifdef REG_ALLOC_ORDER
1888 unsigned int regno = reg_alloc_order[i];
1889 #else
1890 unsigned int regno = i;
1891 #endif
1892
1893 if (! TEST_HARD_REG_BIT (not_usable, regno)
1894 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1895 && HARD_REGNO_MODE_OK (regno, rl->mode))
1896 {
1897 int this_cost = spill_cost[regno];
1898 int ok = 1;
1899 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1900
1901 for (j = 1; j < this_nregs; j++)
1902 {
1903 this_cost += spill_add_cost[regno + j];
1904 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1905 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1906 ok = 0;
1907 }
1908 if (! ok)
1909 continue;
1910
1911 if (ira_conflicts_p)
1912 {
1913 /* Ask IRA to find a better pseudo-register for
1914 spilling. */
1915 for (n = j = 0; j < this_nregs; j++)
1916 {
1917 int r = hard_regno_to_pseudo_regno[regno + j];
1918
1919 if (r < 0)
1920 continue;
1921 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1922 regno_pseudo_regs[n++] = r;
1923 }
1924 regno_pseudo_regs[n++] = -1;
1925 if (best_reg < 0
1926 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1927 best_regno_pseudo_regs,
1928 rl->in, rl->out,
1929 chain->insn))
1930 {
1931 best_reg = regno;
1932 for (j = 0;; j++)
1933 {
1934 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1935 if (regno_pseudo_regs[j] < 0)
1936 break;
1937 }
1938 }
1939 continue;
1940 }
1941
1942 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1943 this_cost--;
1944 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1945 this_cost--;
1946 if (this_cost < best_cost
1947 /* Among registers with equal cost, prefer caller-saved ones, or
1948 use REG_ALLOC_ORDER if it is defined. */
1949 || (this_cost == best_cost
1950 #ifdef REG_ALLOC_ORDER
1951 && (inv_reg_alloc_order[regno]
1952 < inv_reg_alloc_order[best_reg])
1953 #else
1954 && call_used_regs[regno]
1955 && ! call_used_regs[best_reg]
1956 #endif
1957 ))
1958 {
1959 best_reg = regno;
1960 best_cost = this_cost;
1961 }
1962 }
1963 }
1964 if (best_reg == -1)
1965 return 0;
1966
1967 if (dump_file)
1968 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1969
1970 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1971 rl->regno = best_reg;
1972
1973 EXECUTE_IF_SET_IN_REG_SET
1974 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1975 {
1976 count_spilled_pseudo (best_reg, rl->nregs, j);
1977 }
1978
1979 EXECUTE_IF_SET_IN_REG_SET
1980 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1981 {
1982 count_spilled_pseudo (best_reg, rl->nregs, j);
1983 }
1984
1985 for (i = 0; i < rl->nregs; i++)
1986 {
1987 gcc_assert (spill_cost[best_reg + i] == 0);
1988 gcc_assert (spill_add_cost[best_reg + i] == 0);
1989 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1990 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1991 }
1992 return 1;
1993 }
1994
1995 /* Find more reload regs to satisfy the remaining need of an insn, which
1996 is given by CHAIN.
1997 Do it by ascending class number, since otherwise a reg
1998 might be spilled for a big class and might fail to count
1999 for a smaller class even though it belongs to that class. */
2000
2001 static void
2002 find_reload_regs (struct insn_chain *chain)
2003 {
2004 int i;
2005
2006 /* In order to be certain of getting the registers we need,
2007 we must sort the reloads into order of increasing register class.
2008 Then our grabbing of reload registers will parallel the process
2009 that provided the reload registers. */
2010 for (i = 0; i < chain->n_reloads; i++)
2011 {
2012 /* Show whether this reload already has a hard reg. */
2013 if (chain->rld[i].reg_rtx)
2014 {
2015 int regno = REGNO (chain->rld[i].reg_rtx);
2016 chain->rld[i].regno = regno;
2017 chain->rld[i].nregs
2018 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2019 }
2020 else
2021 chain->rld[i].regno = -1;
2022 reload_order[i] = i;
2023 }
2024
2025 n_reloads = chain->n_reloads;
2026 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2027
2028 CLEAR_HARD_REG_SET (used_spill_regs_local);
2029
2030 if (dump_file)
2031 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2032
2033 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2034
2035 /* Compute the order of preference for hard registers to spill. */
2036
2037 order_regs_for_reload (chain);
2038
2039 for (i = 0; i < n_reloads; i++)
2040 {
2041 int r = reload_order[i];
2042
2043 /* Ignore reloads that got marked inoperative. */
2044 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2045 && ! rld[r].optional
2046 && rld[r].regno == -1)
2047 if (! find_reg (chain, i))
2048 {
2049 if (dump_file)
2050 fprintf (dump_file, "reload failure for reload %d\n", r);
2051 spill_failure (chain->insn, rld[r].rclass);
2052 failure = 1;
2053 return;
2054 }
2055 }
2056
2057 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2058 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2059
2060 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2061 }
2062
2063 static void
2064 select_reload_regs (void)
2065 {
2066 struct insn_chain *chain;
2067
2068 /* Try to satisfy the needs for each insn. */
2069 for (chain = insns_need_reload; chain != 0;
2070 chain = chain->next_need_reload)
2071 find_reload_regs (chain);
2072 }
2073 \f
2074 /* Delete all insns that were inserted by emit_caller_save_insns during
2075 this iteration. */
2076 static void
2077 delete_caller_save_insns (void)
2078 {
2079 struct insn_chain *c = reload_insn_chain;
2080
2081 while (c != 0)
2082 {
2083 while (c != 0 && c->is_caller_save_insn)
2084 {
2085 struct insn_chain *next = c->next;
2086 rtx_insn *insn = c->insn;
2087
2088 if (c == reload_insn_chain)
2089 reload_insn_chain = next;
2090 delete_insn (insn);
2091
2092 if (next)
2093 next->prev = c->prev;
2094 if (c->prev)
2095 c->prev->next = next;
2096 c->next = unused_insn_chains;
2097 unused_insn_chains = c;
2098 c = next;
2099 }
2100 if (c != 0)
2101 c = c->next;
2102 }
2103 }
2104 \f
2105 /* Handle the failure to find a register to spill.
2106 INSN should be one of the insns which needed this particular spill reg. */
2107
2108 static void
2109 spill_failure (rtx_insn *insn, enum reg_class rclass)
2110 {
2111 if (asm_noperands (PATTERN (insn)) >= 0)
2112 error_for_asm (insn, "can%'t find a register in class %qs while "
2113 "reloading %<asm%>",
2114 reg_class_names[rclass]);
2115 else
2116 {
2117 error ("unable to find a register to spill in class %qs",
2118 reg_class_names[rclass]);
2119
2120 if (dump_file)
2121 {
2122 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2123 debug_reload_to_stream (dump_file);
2124 }
2125 fatal_insn ("this is the insn:", insn);
2126 }
2127 }
2128 \f
2129 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2130 data that is dead in INSN. */
2131
2132 static void
2133 delete_dead_insn (rtx_insn *insn)
2134 {
2135 rtx_insn *prev = prev_active_insn (insn);
2136 rtx prev_dest;
2137
2138 /* If the previous insn sets a register that dies in our insn make
2139 a note that we want to run DCE immediately after reload.
2140
2141 We used to delete the previous insn & recurse, but that's wrong for
2142 block local equivalences. Instead of trying to figure out the exact
2143 circumstances where we can delete the potentially dead insns, just
2144 let DCE do the job. */
2145 if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2146 && GET_CODE (PATTERN (prev)) == SET
2147 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2148 && reg_mentioned_p (prev_dest, PATTERN (insn))
2149 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2150 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2151 need_dce = 1;
2152
2153 SET_INSN_DELETED (insn);
2154 }
2155
2156 /* Modify the home of pseudo-reg I.
2157 The new home is present in reg_renumber[I].
2158
2159 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2160 or it may be -1, meaning there is none or it is not relevant.
2161 This is used so that all pseudos spilled from a given hard reg
2162 can share one stack slot. */
2163
2164 static void
2165 alter_reg (int i, int from_reg, bool dont_share_p)
2166 {
2167 /* When outputting an inline function, this can happen
2168 for a reg that isn't actually used. */
2169 if (regno_reg_rtx[i] == 0)
2170 return;
2171
2172 /* If the reg got changed to a MEM at rtl-generation time,
2173 ignore it. */
2174 if (!REG_P (regno_reg_rtx[i]))
2175 return;
2176
2177 /* Modify the reg-rtx to contain the new hard reg
2178 number or else to contain its pseudo reg number. */
2179 SET_REGNO (regno_reg_rtx[i],
2180 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2181
2182 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2183 allocate a stack slot for it. */
2184
2185 if (reg_renumber[i] < 0
2186 && REG_N_REFS (i) > 0
2187 && reg_equiv_constant (i) == 0
2188 && (reg_equiv_invariant (i) == 0
2189 || reg_equiv_init (i) == 0)
2190 && reg_equiv_memory_loc (i) == 0)
2191 {
2192 rtx x = NULL_RTX;
2193 machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2194 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2195 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2196 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2197 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2198 int adjust = 0;
2199
2200 something_was_spilled = true;
2201
2202 if (ira_conflicts_p)
2203 {
2204 /* Mark the spill for IRA. */
2205 SET_REGNO_REG_SET (&spilled_pseudos, i);
2206 if (!dont_share_p)
2207 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2208 }
2209
2210 if (x)
2211 ;
2212
2213 /* Each pseudo reg has an inherent size which comes from its own mode,
2214 and a total size which provides room for paradoxical subregs
2215 which refer to the pseudo reg in wider modes.
2216
2217 We can use a slot already allocated if it provides both
2218 enough inherent space and enough total space.
2219 Otherwise, we allocate a new slot, making sure that it has no less
2220 inherent space, and no less total space, then the previous slot. */
2221 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2222 {
2223 rtx stack_slot;
2224
2225 /* No known place to spill from => no slot to reuse. */
2226 x = assign_stack_local (mode, total_size,
2227 min_align > inherent_align
2228 || total_size > inherent_size ? -1 : 0);
2229
2230 stack_slot = x;
2231
2232 /* Cancel the big-endian correction done in assign_stack_local.
2233 Get the address of the beginning of the slot. This is so we
2234 can do a big-endian correction unconditionally below. */
2235 if (BYTES_BIG_ENDIAN)
2236 {
2237 adjust = inherent_size - total_size;
2238 if (adjust)
2239 stack_slot
2240 = adjust_address_nv (x, mode_for_size (total_size
2241 * BITS_PER_UNIT,
2242 MODE_INT, 1),
2243 adjust);
2244 }
2245
2246 if (! dont_share_p && ira_conflicts_p)
2247 /* Inform IRA about allocation a new stack slot. */
2248 ira_mark_new_stack_slot (stack_slot, i, total_size);
2249 }
2250
2251 /* Reuse a stack slot if possible. */
2252 else if (spill_stack_slot[from_reg] != 0
2253 && spill_stack_slot_width[from_reg] >= total_size
2254 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2255 >= inherent_size)
2256 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2257 x = spill_stack_slot[from_reg];
2258
2259 /* Allocate a bigger slot. */
2260 else
2261 {
2262 /* Compute maximum size needed, both for inherent size
2263 and for total size. */
2264 rtx stack_slot;
2265
2266 if (spill_stack_slot[from_reg])
2267 {
2268 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2269 > inherent_size)
2270 mode = GET_MODE (spill_stack_slot[from_reg]);
2271 if (spill_stack_slot_width[from_reg] > total_size)
2272 total_size = spill_stack_slot_width[from_reg];
2273 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2274 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2275 }
2276
2277 /* Make a slot with that size. */
2278 x = assign_stack_local (mode, total_size,
2279 min_align > inherent_align
2280 || total_size > inherent_size ? -1 : 0);
2281 stack_slot = x;
2282
2283 /* Cancel the big-endian correction done in assign_stack_local.
2284 Get the address of the beginning of the slot. This is so we
2285 can do a big-endian correction unconditionally below. */
2286 if (BYTES_BIG_ENDIAN)
2287 {
2288 adjust = GET_MODE_SIZE (mode) - total_size;
2289 if (adjust)
2290 stack_slot
2291 = adjust_address_nv (x, mode_for_size (total_size
2292 * BITS_PER_UNIT,
2293 MODE_INT, 1),
2294 adjust);
2295 }
2296
2297 spill_stack_slot[from_reg] = stack_slot;
2298 spill_stack_slot_width[from_reg] = total_size;
2299 }
2300
2301 /* On a big endian machine, the "address" of the slot
2302 is the address of the low part that fits its inherent mode. */
2303 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2304 adjust += (total_size - inherent_size);
2305
2306 /* If we have any adjustment to make, or if the stack slot is the
2307 wrong mode, make a new stack slot. */
2308 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2309
2310 /* Set all of the memory attributes as appropriate for a spill. */
2311 set_mem_attrs_for_spill (x);
2312
2313 /* Save the stack slot for later. */
2314 reg_equiv_memory_loc (i) = x;
2315 }
2316 }
2317
2318 /* Mark the slots in regs_ever_live for the hard regs used by
2319 pseudo-reg number REGNO, accessed in MODE. */
2320
2321 static void
2322 mark_home_live_1 (int regno, machine_mode mode)
2323 {
2324 int i, lim;
2325
2326 i = reg_renumber[regno];
2327 if (i < 0)
2328 return;
2329 lim = end_hard_regno (mode, i);
2330 while (i < lim)
2331 df_set_regs_ever_live (i++, true);
2332 }
2333
2334 /* Mark the slots in regs_ever_live for the hard regs
2335 used by pseudo-reg number REGNO. */
2336
2337 void
2338 mark_home_live (int regno)
2339 {
2340 if (reg_renumber[regno] >= 0)
2341 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2342 }
2343 \f
2344 /* This function handles the tracking of elimination offsets around branches.
2345
2346 X is a piece of RTL being scanned.
2347
2348 INSN is the insn that it came from, if any.
2349
2350 INITIAL_P is nonzero if we are to set the offset to be the initial
2351 offset and zero if we are setting the offset of the label to be the
2352 current offset. */
2353
2354 static void
2355 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2356 {
2357 enum rtx_code code = GET_CODE (x);
2358 rtx tem;
2359 unsigned int i;
2360 struct elim_table *p;
2361
2362 switch (code)
2363 {
2364 case LABEL_REF:
2365 if (LABEL_REF_NONLOCAL_P (x))
2366 return;
2367
2368 x = LABEL_REF_LABEL (x);
2369
2370 /* ... fall through ... */
2371
2372 case CODE_LABEL:
2373 /* If we know nothing about this label, set the desired offsets. Note
2374 that this sets the offset at a label to be the offset before a label
2375 if we don't know anything about the label. This is not correct for
2376 the label after a BARRIER, but is the best guess we can make. If
2377 we guessed wrong, we will suppress an elimination that might have
2378 been possible had we been able to guess correctly. */
2379
2380 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2381 {
2382 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2383 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2384 = (initial_p ? reg_eliminate[i].initial_offset
2385 : reg_eliminate[i].offset);
2386 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2387 }
2388
2389 /* Otherwise, if this is the definition of a label and it is
2390 preceded by a BARRIER, set our offsets to the known offset of
2391 that label. */
2392
2393 else if (x == insn
2394 && (tem = prev_nonnote_insn (insn)) != 0
2395 && BARRIER_P (tem))
2396 set_offsets_for_label (insn);
2397 else
2398 /* If neither of the above cases is true, compare each offset
2399 with those previously recorded and suppress any eliminations
2400 where the offsets disagree. */
2401
2402 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2403 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2404 != (initial_p ? reg_eliminate[i].initial_offset
2405 : reg_eliminate[i].offset))
2406 reg_eliminate[i].can_eliminate = 0;
2407
2408 return;
2409
2410 case JUMP_TABLE_DATA:
2411 set_label_offsets (PATTERN (insn), insn, initial_p);
2412 return;
2413
2414 case JUMP_INSN:
2415 set_label_offsets (PATTERN (insn), insn, initial_p);
2416
2417 /* ... fall through ... */
2418
2419 case INSN:
2420 case CALL_INSN:
2421 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2422 to indirectly and hence must have all eliminations at their
2423 initial offsets. */
2424 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2425 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2426 set_label_offsets (XEXP (tem, 0), insn, 1);
2427 return;
2428
2429 case PARALLEL:
2430 case ADDR_VEC:
2431 case ADDR_DIFF_VEC:
2432 /* Each of the labels in the parallel or address vector must be
2433 at their initial offsets. We want the first field for PARALLEL
2434 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2435
2436 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2437 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2438 insn, initial_p);
2439 return;
2440
2441 case SET:
2442 /* We only care about setting PC. If the source is not RETURN,
2443 IF_THEN_ELSE, or a label, disable any eliminations not at
2444 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2445 isn't one of those possibilities. For branches to a label,
2446 call ourselves recursively.
2447
2448 Note that this can disable elimination unnecessarily when we have
2449 a non-local goto since it will look like a non-constant jump to
2450 someplace in the current function. This isn't a significant
2451 problem since such jumps will normally be when all elimination
2452 pairs are back to their initial offsets. */
2453
2454 if (SET_DEST (x) != pc_rtx)
2455 return;
2456
2457 switch (GET_CODE (SET_SRC (x)))
2458 {
2459 case PC:
2460 case RETURN:
2461 return;
2462
2463 case LABEL_REF:
2464 set_label_offsets (SET_SRC (x), insn, initial_p);
2465 return;
2466
2467 case IF_THEN_ELSE:
2468 tem = XEXP (SET_SRC (x), 1);
2469 if (GET_CODE (tem) == LABEL_REF)
2470 set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2471 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2472 break;
2473
2474 tem = XEXP (SET_SRC (x), 2);
2475 if (GET_CODE (tem) == LABEL_REF)
2476 set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2477 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2478 break;
2479 return;
2480
2481 default:
2482 break;
2483 }
2484
2485 /* If we reach here, all eliminations must be at their initial
2486 offset because we are doing a jump to a variable address. */
2487 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2488 if (p->offset != p->initial_offset)
2489 p->can_eliminate = 0;
2490 break;
2491
2492 default:
2493 break;
2494 }
2495 }
2496 \f
2497 /* This function examines every reg that occurs in X and adjusts the
2498 costs for its elimination which are gathered by IRA. INSN is the
2499 insn in which X occurs. We do not recurse into MEM expressions. */
2500
2501 static void
2502 note_reg_elim_costly (const_rtx x, rtx insn)
2503 {
2504 subrtx_iterator::array_type array;
2505 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2506 {
2507 const_rtx x = *iter;
2508 if (MEM_P (x))
2509 iter.skip_subrtxes ();
2510 else if (REG_P (x)
2511 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2512 && reg_equiv_init (REGNO (x))
2513 && reg_equiv_invariant (REGNO (x)))
2514 {
2515 rtx t = reg_equiv_invariant (REGNO (x));
2516 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2517 int cost = set_src_cost (new_rtx, Pmode,
2518 optimize_bb_for_speed_p (elim_bb));
2519 int freq = REG_FREQ_FROM_BB (elim_bb);
2520
2521 if (cost != 0)
2522 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2523 }
2524 }
2525 }
2526
2527 /* Scan X and replace any eliminable registers (such as fp) with a
2528 replacement (such as sp), plus an offset.
2529
2530 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2531 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2532 MEM, we are allowed to replace a sum of a register and the constant zero
2533 with the register, which we cannot do outside a MEM. In addition, we need
2534 to record the fact that a register is referenced outside a MEM.
2535
2536 If INSN is an insn, it is the insn containing X. If we replace a REG
2537 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2538 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2539 the REG is being modified.
2540
2541 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2542 That's used when we eliminate in expressions stored in notes.
2543 This means, do not set ref_outside_mem even if the reference
2544 is outside of MEMs.
2545
2546 If FOR_COSTS is true, we are being called before reload in order to
2547 estimate the costs of keeping registers with an equivalence unallocated.
2548
2549 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2550 replacements done assuming all offsets are at their initial values. If
2551 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2552 encounter, return the actual location so that find_reloads will do
2553 the proper thing. */
2554
2555 static rtx
2556 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2557 bool may_use_invariant, bool for_costs)
2558 {
2559 enum rtx_code code = GET_CODE (x);
2560 struct elim_table *ep;
2561 int regno;
2562 rtx new_rtx;
2563 int i, j;
2564 const char *fmt;
2565 int copied = 0;
2566
2567 if (! current_function_decl)
2568 return x;
2569
2570 switch (code)
2571 {
2572 CASE_CONST_ANY:
2573 case CONST:
2574 case SYMBOL_REF:
2575 case CODE_LABEL:
2576 case PC:
2577 case CC0:
2578 case ASM_INPUT:
2579 case ADDR_VEC:
2580 case ADDR_DIFF_VEC:
2581 case RETURN:
2582 return x;
2583
2584 case REG:
2585 regno = REGNO (x);
2586
2587 /* First handle the case where we encounter a bare register that
2588 is eliminable. Replace it with a PLUS. */
2589 if (regno < FIRST_PSEUDO_REGISTER)
2590 {
2591 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2592 ep++)
2593 if (ep->from_rtx == x && ep->can_eliminate)
2594 return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2595
2596 }
2597 else if (reg_renumber && reg_renumber[regno] < 0
2598 && reg_equivs
2599 && reg_equiv_invariant (regno))
2600 {
2601 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2602 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2603 mem_mode, insn, true, for_costs);
2604 /* There exists at least one use of REGNO that cannot be
2605 eliminated. Prevent the defining insn from being deleted. */
2606 reg_equiv_init (regno) = NULL;
2607 if (!for_costs)
2608 alter_reg (regno, -1, true);
2609 }
2610 return x;
2611
2612 /* You might think handling MINUS in a manner similar to PLUS is a
2613 good idea. It is not. It has been tried multiple times and every
2614 time the change has had to have been reverted.
2615
2616 Other parts of reload know a PLUS is special (gen_reload for example)
2617 and require special code to handle code a reloaded PLUS operand.
2618
2619 Also consider backends where the flags register is clobbered by a
2620 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2621 lea instruction comes to mind). If we try to reload a MINUS, we
2622 may kill the flags register that was holding a useful value.
2623
2624 So, please before trying to handle MINUS, consider reload as a
2625 whole instead of this little section as well as the backend issues. */
2626 case PLUS:
2627 /* If this is the sum of an eliminable register and a constant, rework
2628 the sum. */
2629 if (REG_P (XEXP (x, 0))
2630 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2631 && CONSTANT_P (XEXP (x, 1)))
2632 {
2633 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2634 ep++)
2635 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2636 {
2637 /* The only time we want to replace a PLUS with a REG (this
2638 occurs when the constant operand of the PLUS is the negative
2639 of the offset) is when we are inside a MEM. We won't want
2640 to do so at other times because that would change the
2641 structure of the insn in a way that reload can't handle.
2642 We special-case the commonest situation in
2643 eliminate_regs_in_insn, so just replace a PLUS with a
2644 PLUS here, unless inside a MEM. */
2645 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2646 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2647 return ep->to_rtx;
2648 else
2649 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2650 plus_constant (Pmode, XEXP (x, 1),
2651 ep->previous_offset));
2652 }
2653
2654 /* If the register is not eliminable, we are done since the other
2655 operand is a constant. */
2656 return x;
2657 }
2658
2659 /* If this is part of an address, we want to bring any constant to the
2660 outermost PLUS. We will do this by doing register replacement in
2661 our operands and seeing if a constant shows up in one of them.
2662
2663 Note that there is no risk of modifying the structure of the insn,
2664 since we only get called for its operands, thus we are either
2665 modifying the address inside a MEM, or something like an address
2666 operand of a load-address insn. */
2667
2668 {
2669 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2670 for_costs);
2671 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2672 for_costs);
2673
2674 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2675 {
2676 /* If one side is a PLUS and the other side is a pseudo that
2677 didn't get a hard register but has a reg_equiv_constant,
2678 we must replace the constant here since it may no longer
2679 be in the position of any operand. */
2680 if (GET_CODE (new0) == PLUS && REG_P (new1)
2681 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2682 && reg_renumber[REGNO (new1)] < 0
2683 && reg_equivs
2684 && reg_equiv_constant (REGNO (new1)) != 0)
2685 new1 = reg_equiv_constant (REGNO (new1));
2686 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2687 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2688 && reg_renumber[REGNO (new0)] < 0
2689 && reg_equiv_constant (REGNO (new0)) != 0)
2690 new0 = reg_equiv_constant (REGNO (new0));
2691
2692 new_rtx = form_sum (GET_MODE (x), new0, new1);
2693
2694 /* As above, if we are not inside a MEM we do not want to
2695 turn a PLUS into something else. We might try to do so here
2696 for an addition of 0 if we aren't optimizing. */
2697 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2698 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2699 else
2700 return new_rtx;
2701 }
2702 }
2703 return x;
2704
2705 case MULT:
2706 /* If this is the product of an eliminable register and a
2707 constant, apply the distribute law and move the constant out
2708 so that we have (plus (mult ..) ..). This is needed in order
2709 to keep load-address insns valid. This case is pathological.
2710 We ignore the possibility of overflow here. */
2711 if (REG_P (XEXP (x, 0))
2712 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2713 && CONST_INT_P (XEXP (x, 1)))
2714 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2715 ep++)
2716 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2717 {
2718 if (! mem_mode
2719 /* Refs inside notes or in DEBUG_INSNs don't count for
2720 this purpose. */
2721 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2722 || GET_CODE (insn) == INSN_LIST
2723 || DEBUG_INSN_P (insn))))
2724 ep->ref_outside_mem = 1;
2725
2726 return
2727 plus_constant (Pmode,
2728 gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2729 ep->previous_offset * INTVAL (XEXP (x, 1)));
2730 }
2731
2732 /* ... fall through ... */
2733
2734 case CALL:
2735 case COMPARE:
2736 /* See comments before PLUS about handling MINUS. */
2737 case MINUS:
2738 case DIV: case UDIV:
2739 case MOD: case UMOD:
2740 case AND: case IOR: case XOR:
2741 case ROTATERT: case ROTATE:
2742 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2743 case NE: case EQ:
2744 case GE: case GT: case GEU: case GTU:
2745 case LE: case LT: case LEU: case LTU:
2746 {
2747 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2748 for_costs);
2749 rtx new1 = XEXP (x, 1)
2750 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2751 for_costs) : 0;
2752
2753 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2754 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2755 }
2756 return x;
2757
2758 case EXPR_LIST:
2759 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2760 if (XEXP (x, 0))
2761 {
2762 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2763 for_costs);
2764 if (new_rtx != XEXP (x, 0))
2765 {
2766 /* If this is a REG_DEAD note, it is not valid anymore.
2767 Using the eliminated version could result in creating a
2768 REG_DEAD note for the stack or frame pointer. */
2769 if (REG_NOTE_KIND (x) == REG_DEAD)
2770 return (XEXP (x, 1)
2771 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2772 for_costs)
2773 : NULL_RTX);
2774
2775 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2776 }
2777 }
2778
2779 /* ... fall through ... */
2780
2781 case INSN_LIST:
2782 case INT_LIST:
2783 /* Now do eliminations in the rest of the chain. If this was
2784 an EXPR_LIST, this might result in allocating more memory than is
2785 strictly needed, but it simplifies the code. */
2786 if (XEXP (x, 1))
2787 {
2788 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2789 for_costs);
2790 if (new_rtx != XEXP (x, 1))
2791 return
2792 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2793 }
2794 return x;
2795
2796 case PRE_INC:
2797 case POST_INC:
2798 case PRE_DEC:
2799 case POST_DEC:
2800 /* We do not support elimination of a register that is modified.
2801 elimination_effects has already make sure that this does not
2802 happen. */
2803 return x;
2804
2805 case PRE_MODIFY:
2806 case POST_MODIFY:
2807 /* We do not support elimination of a register that is modified.
2808 elimination_effects has already make sure that this does not
2809 happen. The only remaining case we need to consider here is
2810 that the increment value may be an eliminable register. */
2811 if (GET_CODE (XEXP (x, 1)) == PLUS
2812 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2813 {
2814 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2815 insn, true, for_costs);
2816
2817 if (new_rtx != XEXP (XEXP (x, 1), 1))
2818 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2819 gen_rtx_PLUS (GET_MODE (x),
2820 XEXP (x, 0), new_rtx));
2821 }
2822 return x;
2823
2824 case STRICT_LOW_PART:
2825 case NEG: case NOT:
2826 case SIGN_EXTEND: case ZERO_EXTEND:
2827 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2828 case FLOAT: case FIX:
2829 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2830 case ABS:
2831 case SQRT:
2832 case FFS:
2833 case CLZ:
2834 case CTZ:
2835 case POPCOUNT:
2836 case PARITY:
2837 case BSWAP:
2838 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2839 for_costs);
2840 if (new_rtx != XEXP (x, 0))
2841 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2842 return x;
2843
2844 case SUBREG:
2845 /* Similar to above processing, but preserve SUBREG_BYTE.
2846 Convert (subreg (mem)) to (mem) if not paradoxical.
2847 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2848 pseudo didn't get a hard reg, we must replace this with the
2849 eliminated version of the memory location because push_reload
2850 may do the replacement in certain circumstances. */
2851 if (REG_P (SUBREG_REG (x))
2852 && !paradoxical_subreg_p (x)
2853 && reg_equivs
2854 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2855 {
2856 new_rtx = SUBREG_REG (x);
2857 }
2858 else
2859 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2860
2861 if (new_rtx != SUBREG_REG (x))
2862 {
2863 int x_size = GET_MODE_SIZE (GET_MODE (x));
2864 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2865
2866 if (MEM_P (new_rtx)
2867 && ((x_size < new_size
2868 #ifdef WORD_REGISTER_OPERATIONS
2869 /* On these machines, combine can create rtl of the form
2870 (set (subreg:m1 (reg:m2 R) 0) ...)
2871 where m1 < m2, and expects something interesting to
2872 happen to the entire word. Moreover, it will use the
2873 (reg:m2 R) later, expecting all bits to be preserved.
2874 So if the number of words is the same, preserve the
2875 subreg so that push_reload can see it. */
2876 && ! ((x_size - 1) / UNITS_PER_WORD
2877 == (new_size -1 ) / UNITS_PER_WORD)
2878 #endif
2879 )
2880 || x_size == new_size)
2881 )
2882 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2883 else
2884 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2885 }
2886
2887 return x;
2888
2889 case MEM:
2890 /* Our only special processing is to pass the mode of the MEM to our
2891 recursive call and copy the flags. While we are here, handle this
2892 case more efficiently. */
2893
2894 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2895 for_costs);
2896 if (for_costs
2897 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2898 && !memory_address_p (GET_MODE (x), new_rtx))
2899 note_reg_elim_costly (XEXP (x, 0), insn);
2900
2901 return replace_equiv_address_nv (x, new_rtx);
2902
2903 case USE:
2904 /* Handle insn_list USE that a call to a pure function may generate. */
2905 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2906 for_costs);
2907 if (new_rtx != XEXP (x, 0))
2908 return gen_rtx_USE (GET_MODE (x), new_rtx);
2909 return x;
2910
2911 case CLOBBER:
2912 case ASM_OPERANDS:
2913 gcc_assert (insn && DEBUG_INSN_P (insn));
2914 break;
2915
2916 case SET:
2917 gcc_unreachable ();
2918
2919 default:
2920 break;
2921 }
2922
2923 /* Process each of our operands recursively. If any have changed, make a
2924 copy of the rtx. */
2925 fmt = GET_RTX_FORMAT (code);
2926 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2927 {
2928 if (*fmt == 'e')
2929 {
2930 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2931 for_costs);
2932 if (new_rtx != XEXP (x, i) && ! copied)
2933 {
2934 x = shallow_copy_rtx (x);
2935 copied = 1;
2936 }
2937 XEXP (x, i) = new_rtx;
2938 }
2939 else if (*fmt == 'E')
2940 {
2941 int copied_vec = 0;
2942 for (j = 0; j < XVECLEN (x, i); j++)
2943 {
2944 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2945 for_costs);
2946 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2947 {
2948 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2949 XVEC (x, i)->elem);
2950 if (! copied)
2951 {
2952 x = shallow_copy_rtx (x);
2953 copied = 1;
2954 }
2955 XVEC (x, i) = new_v;
2956 copied_vec = 1;
2957 }
2958 XVECEXP (x, i, j) = new_rtx;
2959 }
2960 }
2961 }
2962
2963 return x;
2964 }
2965
2966 rtx
2967 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2968 {
2969 if (reg_eliminate == NULL)
2970 {
2971 gcc_assert (targetm.no_register_allocation);
2972 return x;
2973 }
2974 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2975 }
2976
2977 /* Scan rtx X for modifications of elimination target registers. Update
2978 the table of eliminables to reflect the changed state. MEM_MODE is
2979 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2980
2981 static void
2982 elimination_effects (rtx x, machine_mode mem_mode)
2983 {
2984 enum rtx_code code = GET_CODE (x);
2985 struct elim_table *ep;
2986 int regno;
2987 int i, j;
2988 const char *fmt;
2989
2990 switch (code)
2991 {
2992 CASE_CONST_ANY:
2993 case CONST:
2994 case SYMBOL_REF:
2995 case CODE_LABEL:
2996 case PC:
2997 case CC0:
2998 case ASM_INPUT:
2999 case ADDR_VEC:
3000 case ADDR_DIFF_VEC:
3001 case RETURN:
3002 return;
3003
3004 case REG:
3005 regno = REGNO (x);
3006
3007 /* First handle the case where we encounter a bare register that
3008 is eliminable. Replace it with a PLUS. */
3009 if (regno < FIRST_PSEUDO_REGISTER)
3010 {
3011 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3012 ep++)
3013 if (ep->from_rtx == x && ep->can_eliminate)
3014 {
3015 if (! mem_mode)
3016 ep->ref_outside_mem = 1;
3017 return;
3018 }
3019
3020 }
3021 else if (reg_renumber[regno] < 0
3022 && reg_equivs
3023 && reg_equiv_constant (regno)
3024 && ! function_invariant_p (reg_equiv_constant (regno)))
3025 elimination_effects (reg_equiv_constant (regno), mem_mode);
3026 return;
3027
3028 case PRE_INC:
3029 case POST_INC:
3030 case PRE_DEC:
3031 case POST_DEC:
3032 case POST_MODIFY:
3033 case PRE_MODIFY:
3034 /* If we modify the source of an elimination rule, disable it. */
3035 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3036 if (ep->from_rtx == XEXP (x, 0))
3037 ep->can_eliminate = 0;
3038
3039 /* If we modify the target of an elimination rule by adding a constant,
3040 update its offset. If we modify the target in any other way, we'll
3041 have to disable the rule as well. */
3042 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3043 if (ep->to_rtx == XEXP (x, 0))
3044 {
3045 int size = GET_MODE_SIZE (mem_mode);
3046
3047 /* If more bytes than MEM_MODE are pushed, account for them. */
3048 #ifdef PUSH_ROUNDING
3049 if (ep->to_rtx == stack_pointer_rtx)
3050 size = PUSH_ROUNDING (size);
3051 #endif
3052 if (code == PRE_DEC || code == POST_DEC)
3053 ep->offset += size;
3054 else if (code == PRE_INC || code == POST_INC)
3055 ep->offset -= size;
3056 else if (code == PRE_MODIFY || code == POST_MODIFY)
3057 {
3058 if (GET_CODE (XEXP (x, 1)) == PLUS
3059 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3060 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3061 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3062 else
3063 ep->can_eliminate = 0;
3064 }
3065 }
3066
3067 /* These two aren't unary operators. */
3068 if (code == POST_MODIFY || code == PRE_MODIFY)
3069 break;
3070
3071 /* Fall through to generic unary operation case. */
3072 case STRICT_LOW_PART:
3073 case NEG: case NOT:
3074 case SIGN_EXTEND: case ZERO_EXTEND:
3075 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3076 case FLOAT: case FIX:
3077 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3078 case ABS:
3079 case SQRT:
3080 case FFS:
3081 case CLZ:
3082 case CTZ:
3083 case POPCOUNT:
3084 case PARITY:
3085 case BSWAP:
3086 elimination_effects (XEXP (x, 0), mem_mode);
3087 return;
3088
3089 case SUBREG:
3090 if (REG_P (SUBREG_REG (x))
3091 && (GET_MODE_SIZE (GET_MODE (x))
3092 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3093 && reg_equivs
3094 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3095 return;
3096
3097 elimination_effects (SUBREG_REG (x), mem_mode);
3098 return;
3099
3100 case USE:
3101 /* If using a register that is the source of an eliminate we still
3102 think can be performed, note it cannot be performed since we don't
3103 know how this register is used. */
3104 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3105 if (ep->from_rtx == XEXP (x, 0))
3106 ep->can_eliminate = 0;
3107
3108 elimination_effects (XEXP (x, 0), mem_mode);
3109 return;
3110
3111 case CLOBBER:
3112 /* If clobbering a register that is the replacement register for an
3113 elimination we still think can be performed, note that it cannot
3114 be performed. Otherwise, we need not be concerned about it. */
3115 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3116 if (ep->to_rtx == XEXP (x, 0))
3117 ep->can_eliminate = 0;
3118
3119 elimination_effects (XEXP (x, 0), mem_mode);
3120 return;
3121
3122 case SET:
3123 /* Check for setting a register that we know about. */
3124 if (REG_P (SET_DEST (x)))
3125 {
3126 /* See if this is setting the replacement register for an
3127 elimination.
3128
3129 If DEST is the hard frame pointer, we do nothing because we
3130 assume that all assignments to the frame pointer are for
3131 non-local gotos and are being done at a time when they are valid
3132 and do not disturb anything else. Some machines want to
3133 eliminate a fake argument pointer (or even a fake frame pointer)
3134 with either the real frame or the stack pointer. Assignments to
3135 the hard frame pointer must not prevent this elimination. */
3136
3137 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3138 ep++)
3139 if (ep->to_rtx == SET_DEST (x)
3140 && SET_DEST (x) != hard_frame_pointer_rtx)
3141 {
3142 /* If it is being incremented, adjust the offset. Otherwise,
3143 this elimination can't be done. */
3144 rtx src = SET_SRC (x);
3145
3146 if (GET_CODE (src) == PLUS
3147 && XEXP (src, 0) == SET_DEST (x)
3148 && CONST_INT_P (XEXP (src, 1)))
3149 ep->offset -= INTVAL (XEXP (src, 1));
3150 else
3151 ep->can_eliminate = 0;
3152 }
3153 }
3154
3155 elimination_effects (SET_DEST (x), VOIDmode);
3156 elimination_effects (SET_SRC (x), VOIDmode);
3157 return;
3158
3159 case MEM:
3160 /* Our only special processing is to pass the mode of the MEM to our
3161 recursive call. */
3162 elimination_effects (XEXP (x, 0), GET_MODE (x));
3163 return;
3164
3165 default:
3166 break;
3167 }
3168
3169 fmt = GET_RTX_FORMAT (code);
3170 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3171 {
3172 if (*fmt == 'e')
3173 elimination_effects (XEXP (x, i), mem_mode);
3174 else if (*fmt == 'E')
3175 for (j = 0; j < XVECLEN (x, i); j++)
3176 elimination_effects (XVECEXP (x, i, j), mem_mode);
3177 }
3178 }
3179
3180 /* Descend through rtx X and verify that no references to eliminable registers
3181 remain. If any do remain, mark the involved register as not
3182 eliminable. */
3183
3184 static void
3185 check_eliminable_occurrences (rtx x)
3186 {
3187 const char *fmt;
3188 int i;
3189 enum rtx_code code;
3190
3191 if (x == 0)
3192 return;
3193
3194 code = GET_CODE (x);
3195
3196 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3197 {
3198 struct elim_table *ep;
3199
3200 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3201 if (ep->from_rtx == x)
3202 ep->can_eliminate = 0;
3203 return;
3204 }
3205
3206 fmt = GET_RTX_FORMAT (code);
3207 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3208 {
3209 if (*fmt == 'e')
3210 check_eliminable_occurrences (XEXP (x, i));
3211 else if (*fmt == 'E')
3212 {
3213 int j;
3214 for (j = 0; j < XVECLEN (x, i); j++)
3215 check_eliminable_occurrences (XVECEXP (x, i, j));
3216 }
3217 }
3218 }
3219 \f
3220 /* Scan INSN and eliminate all eliminable registers in it.
3221
3222 If REPLACE is nonzero, do the replacement destructively. Also
3223 delete the insn as dead it if it is setting an eliminable register.
3224
3225 If REPLACE is zero, do all our allocations in reload_obstack.
3226
3227 If no eliminations were done and this insn doesn't require any elimination
3228 processing (these are not identical conditions: it might be updating sp,
3229 but not referencing fp; this needs to be seen during reload_as_needed so
3230 that the offset between fp and sp can be taken into consideration), zero
3231 is returned. Otherwise, 1 is returned. */
3232
3233 static int
3234 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3235 {
3236 int icode = recog_memoized (insn);
3237 rtx old_body = PATTERN (insn);
3238 int insn_is_asm = asm_noperands (old_body) >= 0;
3239 rtx old_set = single_set (insn);
3240 rtx new_body;
3241 int val = 0;
3242 int i;
3243 rtx substed_operand[MAX_RECOG_OPERANDS];
3244 rtx orig_operand[MAX_RECOG_OPERANDS];
3245 struct elim_table *ep;
3246 rtx plus_src, plus_cst_src;
3247
3248 if (! insn_is_asm && icode < 0)
3249 {
3250 gcc_assert (DEBUG_INSN_P (insn)
3251 || GET_CODE (PATTERN (insn)) == USE
3252 || GET_CODE (PATTERN (insn)) == CLOBBER
3253 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3254 if (DEBUG_INSN_P (insn))
3255 INSN_VAR_LOCATION_LOC (insn)
3256 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3257 return 0;
3258 }
3259
3260 if (old_set != 0 && REG_P (SET_DEST (old_set))
3261 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3262 {
3263 /* Check for setting an eliminable register. */
3264 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3265 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3266 {
3267 /* If this is setting the frame pointer register to the
3268 hardware frame pointer register and this is an elimination
3269 that will be done (tested above), this insn is really
3270 adjusting the frame pointer downward to compensate for
3271 the adjustment done before a nonlocal goto. */
3272 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3273 && ep->from == FRAME_POINTER_REGNUM
3274 && ep->to == HARD_FRAME_POINTER_REGNUM)
3275 {
3276 rtx base = SET_SRC (old_set);
3277 rtx_insn *base_insn = insn;
3278 HOST_WIDE_INT offset = 0;
3279
3280 while (base != ep->to_rtx)
3281 {
3282 rtx_insn *prev_insn;
3283 rtx prev_set;
3284
3285 if (GET_CODE (base) == PLUS
3286 && CONST_INT_P (XEXP (base, 1)))
3287 {
3288 offset += INTVAL (XEXP (base, 1));
3289 base = XEXP (base, 0);
3290 }
3291 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3292 && (prev_set = single_set (prev_insn)) != 0
3293 && rtx_equal_p (SET_DEST (prev_set), base))
3294 {
3295 base = SET_SRC (prev_set);
3296 base_insn = prev_insn;
3297 }
3298 else
3299 break;
3300 }
3301
3302 if (base == ep->to_rtx)
3303 {
3304 rtx src = plus_constant (Pmode, ep->to_rtx,
3305 offset - ep->offset);
3306
3307 new_body = old_body;
3308 if (! replace)
3309 {
3310 new_body = copy_insn (old_body);
3311 if (REG_NOTES (insn))
3312 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3313 }
3314 PATTERN (insn) = new_body;
3315 old_set = single_set (insn);
3316
3317 /* First see if this insn remains valid when we
3318 make the change. If not, keep the INSN_CODE
3319 the same and let reload fit it up. */
3320 validate_change (insn, &SET_SRC (old_set), src, 1);
3321 validate_change (insn, &SET_DEST (old_set),
3322 ep->to_rtx, 1);
3323 if (! apply_change_group ())
3324 {
3325 SET_SRC (old_set) = src;
3326 SET_DEST (old_set) = ep->to_rtx;
3327 }
3328
3329 val = 1;
3330 goto done;
3331 }
3332 }
3333
3334 /* In this case this insn isn't serving a useful purpose. We
3335 will delete it in reload_as_needed once we know that this
3336 elimination is, in fact, being done.
3337
3338 If REPLACE isn't set, we can't delete this insn, but needn't
3339 process it since it won't be used unless something changes. */
3340 if (replace)
3341 {
3342 delete_dead_insn (insn);
3343 return 1;
3344 }
3345 val = 1;
3346 goto done;
3347 }
3348 }
3349
3350 /* We allow one special case which happens to work on all machines we
3351 currently support: a single set with the source or a REG_EQUAL
3352 note being a PLUS of an eliminable register and a constant. */
3353 plus_src = plus_cst_src = 0;
3354 if (old_set && REG_P (SET_DEST (old_set)))
3355 {
3356 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3357 plus_src = SET_SRC (old_set);
3358 /* First see if the source is of the form (plus (...) CST). */
3359 if (plus_src
3360 && CONST_INT_P (XEXP (plus_src, 1)))
3361 plus_cst_src = plus_src;
3362 else if (REG_P (SET_SRC (old_set))
3363 || plus_src)
3364 {
3365 /* Otherwise, see if we have a REG_EQUAL note of the form
3366 (plus (...) CST). */
3367 rtx links;
3368 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3369 {
3370 if ((REG_NOTE_KIND (links) == REG_EQUAL
3371 || REG_NOTE_KIND (links) == REG_EQUIV)
3372 && GET_CODE (XEXP (links, 0)) == PLUS
3373 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3374 {
3375 plus_cst_src = XEXP (links, 0);
3376 break;
3377 }
3378 }
3379 }
3380
3381 /* Check that the first operand of the PLUS is a hard reg or
3382 the lowpart subreg of one. */
3383 if (plus_cst_src)
3384 {
3385 rtx reg = XEXP (plus_cst_src, 0);
3386 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3387 reg = SUBREG_REG (reg);
3388
3389 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3390 plus_cst_src = 0;
3391 }
3392 }
3393 if (plus_cst_src)
3394 {
3395 rtx reg = XEXP (plus_cst_src, 0);
3396 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3397
3398 if (GET_CODE (reg) == SUBREG)
3399 reg = SUBREG_REG (reg);
3400
3401 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3402 if (ep->from_rtx == reg && ep->can_eliminate)
3403 {
3404 rtx to_rtx = ep->to_rtx;
3405 offset += ep->offset;
3406 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3407
3408 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3409 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3410 to_rtx);
3411 /* If we have a nonzero offset, and the source is already
3412 a simple REG, the following transformation would
3413 increase the cost of the insn by replacing a simple REG
3414 with (plus (reg sp) CST). So try only when we already
3415 had a PLUS before. */
3416 if (offset == 0 || plus_src)
3417 {
3418 rtx new_src = plus_constant (GET_MODE (to_rtx),
3419 to_rtx, offset);
3420
3421 new_body = old_body;
3422 if (! replace)
3423 {
3424 new_body = copy_insn (old_body);
3425 if (REG_NOTES (insn))
3426 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3427 }
3428 PATTERN (insn) = new_body;
3429 old_set = single_set (insn);
3430
3431 /* First see if this insn remains valid when we make the
3432 change. If not, try to replace the whole pattern with
3433 a simple set (this may help if the original insn was a
3434 PARALLEL that was only recognized as single_set due to
3435 REG_UNUSED notes). If this isn't valid either, keep
3436 the INSN_CODE the same and let reload fix it up. */
3437 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3438 {
3439 rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src);
3440
3441 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3442 SET_SRC (old_set) = new_src;
3443 }
3444 }
3445 else
3446 break;
3447
3448 val = 1;
3449 /* This can't have an effect on elimination offsets, so skip right
3450 to the end. */
3451 goto done;
3452 }
3453 }
3454
3455 /* Determine the effects of this insn on elimination offsets. */
3456 elimination_effects (old_body, VOIDmode);
3457
3458 /* Eliminate all eliminable registers occurring in operands that
3459 can be handled by reload. */
3460 extract_insn (insn);
3461 for (i = 0; i < recog_data.n_operands; i++)
3462 {
3463 orig_operand[i] = recog_data.operand[i];
3464 substed_operand[i] = recog_data.operand[i];
3465
3466 /* For an asm statement, every operand is eliminable. */
3467 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3468 {
3469 bool is_set_src, in_plus;
3470
3471 /* Check for setting a register that we know about. */
3472 if (recog_data.operand_type[i] != OP_IN
3473 && REG_P (orig_operand[i]))
3474 {
3475 /* If we are assigning to a register that can be eliminated, it
3476 must be as part of a PARALLEL, since the code above handles
3477 single SETs. We must indicate that we can no longer
3478 eliminate this reg. */
3479 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3480 ep++)
3481 if (ep->from_rtx == orig_operand[i])
3482 ep->can_eliminate = 0;
3483 }
3484
3485 /* Companion to the above plus substitution, we can allow
3486 invariants as the source of a plain move. */
3487 is_set_src = false;
3488 if (old_set
3489 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3490 is_set_src = true;
3491 in_plus = false;
3492 if (plus_src
3493 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3494 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3495 in_plus = true;
3496
3497 substed_operand[i]
3498 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3499 replace ? insn : NULL_RTX,
3500 is_set_src || in_plus, false);
3501 if (substed_operand[i] != orig_operand[i])
3502 val = 1;
3503 /* Terminate the search in check_eliminable_occurrences at
3504 this point. */
3505 *recog_data.operand_loc[i] = 0;
3506
3507 /* If an output operand changed from a REG to a MEM and INSN is an
3508 insn, write a CLOBBER insn. */
3509 if (recog_data.operand_type[i] != OP_IN
3510 && REG_P (orig_operand[i])
3511 && MEM_P (substed_operand[i])
3512 && replace)
3513 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3514 }
3515 }
3516
3517 for (i = 0; i < recog_data.n_dups; i++)
3518 *recog_data.dup_loc[i]
3519 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3520
3521 /* If any eliminable remain, they aren't eliminable anymore. */
3522 check_eliminable_occurrences (old_body);
3523
3524 /* Substitute the operands; the new values are in the substed_operand
3525 array. */
3526 for (i = 0; i < recog_data.n_operands; i++)
3527 *recog_data.operand_loc[i] = substed_operand[i];
3528 for (i = 0; i < recog_data.n_dups; i++)
3529 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3530
3531 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3532 re-recognize the insn. We do this in case we had a simple addition
3533 but now can do this as a load-address. This saves an insn in this
3534 common case.
3535 If re-recognition fails, the old insn code number will still be used,
3536 and some register operands may have changed into PLUS expressions.
3537 These will be handled by find_reloads by loading them into a register
3538 again. */
3539
3540 if (val)
3541 {
3542 /* If we aren't replacing things permanently and we changed something,
3543 make another copy to ensure that all the RTL is new. Otherwise
3544 things can go wrong if find_reload swaps commutative operands
3545 and one is inside RTL that has been copied while the other is not. */
3546 new_body = old_body;
3547 if (! replace)
3548 {
3549 new_body = copy_insn (old_body);
3550 if (REG_NOTES (insn))
3551 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3552 }
3553 PATTERN (insn) = new_body;
3554
3555 /* If we had a move insn but now we don't, rerecognize it. This will
3556 cause spurious re-recognition if the old move had a PARALLEL since
3557 the new one still will, but we can't call single_set without
3558 having put NEW_BODY into the insn and the re-recognition won't
3559 hurt in this rare case. */
3560 /* ??? Why this huge if statement - why don't we just rerecognize the
3561 thing always? */
3562 if (! insn_is_asm
3563 && old_set != 0
3564 && ((REG_P (SET_SRC (old_set))
3565 && (GET_CODE (new_body) != SET
3566 || !REG_P (SET_SRC (new_body))))
3567 /* If this was a load from or store to memory, compare
3568 the MEM in recog_data.operand to the one in the insn.
3569 If they are not equal, then rerecognize the insn. */
3570 || (old_set != 0
3571 && ((MEM_P (SET_SRC (old_set))
3572 && SET_SRC (old_set) != recog_data.operand[1])
3573 || (MEM_P (SET_DEST (old_set))
3574 && SET_DEST (old_set) != recog_data.operand[0])))
3575 /* If this was an add insn before, rerecognize. */
3576 || GET_CODE (SET_SRC (old_set)) == PLUS))
3577 {
3578 int new_icode = recog (PATTERN (insn), insn, 0);
3579 if (new_icode >= 0)
3580 INSN_CODE (insn) = new_icode;
3581 }
3582 }
3583
3584 /* Restore the old body. If there were any changes to it, we made a copy
3585 of it while the changes were still in place, so we'll correctly return
3586 a modified insn below. */
3587 if (! replace)
3588 {
3589 /* Restore the old body. */
3590 for (i = 0; i < recog_data.n_operands; i++)
3591 /* Restoring a top-level match_parallel would clobber the new_body
3592 we installed in the insn. */
3593 if (recog_data.operand_loc[i] != &PATTERN (insn))
3594 *recog_data.operand_loc[i] = orig_operand[i];
3595 for (i = 0; i < recog_data.n_dups; i++)
3596 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3597 }
3598
3599 /* Update all elimination pairs to reflect the status after the current
3600 insn. The changes we make were determined by the earlier call to
3601 elimination_effects.
3602
3603 We also detect cases where register elimination cannot be done,
3604 namely, if a register would be both changed and referenced outside a MEM
3605 in the resulting insn since such an insn is often undefined and, even if
3606 not, we cannot know what meaning will be given to it. Note that it is
3607 valid to have a register used in an address in an insn that changes it
3608 (presumably with a pre- or post-increment or decrement).
3609
3610 If anything changes, return nonzero. */
3611
3612 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3613 {
3614 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3615 ep->can_eliminate = 0;
3616
3617 ep->ref_outside_mem = 0;
3618
3619 if (ep->previous_offset != ep->offset)
3620 val = 1;
3621 }
3622
3623 done:
3624 /* If we changed something, perform elimination in REG_NOTES. This is
3625 needed even when REPLACE is zero because a REG_DEAD note might refer
3626 to a register that we eliminate and could cause a different number
3627 of spill registers to be needed in the final reload pass than in
3628 the pre-passes. */
3629 if (val && REG_NOTES (insn) != 0)
3630 REG_NOTES (insn)
3631 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3632 false);
3633
3634 return val;
3635 }
3636
3637 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3638 register allocator. INSN is the instruction we need to examine, we perform
3639 eliminations in its operands and record cases where eliminating a reg with
3640 an invariant equivalence would add extra cost. */
3641
3642 static void
3643 elimination_costs_in_insn (rtx_insn *insn)
3644 {
3645 int icode = recog_memoized (insn);
3646 rtx old_body = PATTERN (insn);
3647 int insn_is_asm = asm_noperands (old_body) >= 0;
3648 rtx old_set = single_set (insn);
3649 int i;
3650 rtx orig_operand[MAX_RECOG_OPERANDS];
3651 rtx orig_dup[MAX_RECOG_OPERANDS];
3652 struct elim_table *ep;
3653 rtx plus_src, plus_cst_src;
3654 bool sets_reg_p;
3655
3656 if (! insn_is_asm && icode < 0)
3657 {
3658 gcc_assert (DEBUG_INSN_P (insn)
3659 || GET_CODE (PATTERN (insn)) == USE
3660 || GET_CODE (PATTERN (insn)) == CLOBBER
3661 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3662 return;
3663 }
3664
3665 if (old_set != 0 && REG_P (SET_DEST (old_set))
3666 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3667 {
3668 /* Check for setting an eliminable register. */
3669 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3670 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3671 return;
3672 }
3673
3674 /* We allow one special case which happens to work on all machines we
3675 currently support: a single set with the source or a REG_EQUAL
3676 note being a PLUS of an eliminable register and a constant. */
3677 plus_src = plus_cst_src = 0;
3678 sets_reg_p = false;
3679 if (old_set && REG_P (SET_DEST (old_set)))
3680 {
3681 sets_reg_p = true;
3682 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3683 plus_src = SET_SRC (old_set);
3684 /* First see if the source is of the form (plus (...) CST). */
3685 if (plus_src
3686 && CONST_INT_P (XEXP (plus_src, 1)))
3687 plus_cst_src = plus_src;
3688 else if (REG_P (SET_SRC (old_set))
3689 || plus_src)
3690 {
3691 /* Otherwise, see if we have a REG_EQUAL note of the form
3692 (plus (...) CST). */
3693 rtx links;
3694 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3695 {
3696 if ((REG_NOTE_KIND (links) == REG_EQUAL
3697 || REG_NOTE_KIND (links) == REG_EQUIV)
3698 && GET_CODE (XEXP (links, 0)) == PLUS
3699 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3700 {
3701 plus_cst_src = XEXP (links, 0);
3702 break;
3703 }
3704 }
3705 }
3706 }
3707
3708 /* Determine the effects of this insn on elimination offsets. */
3709 elimination_effects (old_body, VOIDmode);
3710
3711 /* Eliminate all eliminable registers occurring in operands that
3712 can be handled by reload. */
3713 extract_insn (insn);
3714 for (i = 0; i < recog_data.n_dups; i++)
3715 orig_dup[i] = *recog_data.dup_loc[i];
3716
3717 for (i = 0; i < recog_data.n_operands; i++)
3718 {
3719 orig_operand[i] = recog_data.operand[i];
3720
3721 /* For an asm statement, every operand is eliminable. */
3722 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3723 {
3724 bool is_set_src, in_plus;
3725
3726 /* Check for setting a register that we know about. */
3727 if (recog_data.operand_type[i] != OP_IN
3728 && REG_P (orig_operand[i]))
3729 {
3730 /* If we are assigning to a register that can be eliminated, it
3731 must be as part of a PARALLEL, since the code above handles
3732 single SETs. We must indicate that we can no longer
3733 eliminate this reg. */
3734 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3735 ep++)
3736 if (ep->from_rtx == orig_operand[i])
3737 ep->can_eliminate = 0;
3738 }
3739
3740 /* Companion to the above plus substitution, we can allow
3741 invariants as the source of a plain move. */
3742 is_set_src = false;
3743 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3744 is_set_src = true;
3745 if (is_set_src && !sets_reg_p)
3746 note_reg_elim_costly (SET_SRC (old_set), insn);
3747 in_plus = false;
3748 if (plus_src && sets_reg_p
3749 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3750 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3751 in_plus = true;
3752
3753 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3754 NULL_RTX,
3755 is_set_src || in_plus, true);
3756 /* Terminate the search in check_eliminable_occurrences at
3757 this point. */
3758 *recog_data.operand_loc[i] = 0;
3759 }
3760 }
3761
3762 for (i = 0; i < recog_data.n_dups; i++)
3763 *recog_data.dup_loc[i]
3764 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3765
3766 /* If any eliminable remain, they aren't eliminable anymore. */
3767 check_eliminable_occurrences (old_body);
3768
3769 /* Restore the old body. */
3770 for (i = 0; i < recog_data.n_operands; i++)
3771 *recog_data.operand_loc[i] = orig_operand[i];
3772 for (i = 0; i < recog_data.n_dups; i++)
3773 *recog_data.dup_loc[i] = orig_dup[i];
3774
3775 /* Update all elimination pairs to reflect the status after the current
3776 insn. The changes we make were determined by the earlier call to
3777 elimination_effects. */
3778
3779 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3780 {
3781 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3782 ep->can_eliminate = 0;
3783
3784 ep->ref_outside_mem = 0;
3785 }
3786
3787 return;
3788 }
3789
3790 /* Loop through all elimination pairs.
3791 Recalculate the number not at initial offset.
3792
3793 Compute the maximum offset (minimum offset if the stack does not
3794 grow downward) for each elimination pair. */
3795
3796 static void
3797 update_eliminable_offsets (void)
3798 {
3799 struct elim_table *ep;
3800
3801 num_not_at_initial_offset = 0;
3802 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3803 {
3804 ep->previous_offset = ep->offset;
3805 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3806 num_not_at_initial_offset++;
3807 }
3808 }
3809
3810 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3811 replacement we currently believe is valid, mark it as not eliminable if X
3812 modifies DEST in any way other than by adding a constant integer to it.
3813
3814 If DEST is the frame pointer, we do nothing because we assume that
3815 all assignments to the hard frame pointer are nonlocal gotos and are being
3816 done at a time when they are valid and do not disturb anything else.
3817 Some machines want to eliminate a fake argument pointer with either the
3818 frame or stack pointer. Assignments to the hard frame pointer must not
3819 prevent this elimination.
3820
3821 Called via note_stores from reload before starting its passes to scan
3822 the insns of the function. */
3823
3824 static void
3825 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3826 {
3827 unsigned int i;
3828
3829 /* A SUBREG of a hard register here is just changing its mode. We should
3830 not see a SUBREG of an eliminable hard register, but check just in
3831 case. */
3832 if (GET_CODE (dest) == SUBREG)
3833 dest = SUBREG_REG (dest);
3834
3835 if (dest == hard_frame_pointer_rtx)
3836 return;
3837
3838 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3839 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3840 && (GET_CODE (x) != SET
3841 || GET_CODE (SET_SRC (x)) != PLUS
3842 || XEXP (SET_SRC (x), 0) != dest
3843 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3844 {
3845 reg_eliminate[i].can_eliminate_previous
3846 = reg_eliminate[i].can_eliminate = 0;
3847 num_eliminable--;
3848 }
3849 }
3850
3851 /* Verify that the initial elimination offsets did not change since the
3852 last call to set_initial_elim_offsets. This is used to catch cases
3853 where something illegal happened during reload_as_needed that could
3854 cause incorrect code to be generated if we did not check for it. */
3855
3856 static bool
3857 verify_initial_elim_offsets (void)
3858 {
3859 HOST_WIDE_INT t;
3860
3861 if (!num_eliminable)
3862 return true;
3863
3864 #ifdef ELIMINABLE_REGS
3865 {
3866 struct elim_table *ep;
3867
3868 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3869 {
3870 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3871 if (t != ep->initial_offset)
3872 return false;
3873 }
3874 }
3875 #else
3876 INITIAL_FRAME_POINTER_OFFSET (t);
3877 if (t != reg_eliminate[0].initial_offset)
3878 return false;
3879 #endif
3880
3881 return true;
3882 }
3883
3884 /* Reset all offsets on eliminable registers to their initial values. */
3885
3886 static void
3887 set_initial_elim_offsets (void)
3888 {
3889 struct elim_table *ep = reg_eliminate;
3890
3891 #ifdef ELIMINABLE_REGS
3892 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3893 {
3894 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3895 ep->previous_offset = ep->offset = ep->initial_offset;
3896 }
3897 #else
3898 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3899 ep->previous_offset = ep->offset = ep->initial_offset;
3900 #endif
3901
3902 num_not_at_initial_offset = 0;
3903 }
3904
3905 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3906
3907 static void
3908 set_initial_eh_label_offset (rtx label)
3909 {
3910 set_label_offsets (label, NULL, 1);
3911 }
3912
3913 /* Initialize the known label offsets.
3914 Set a known offset for each forced label to be at the initial offset
3915 of each elimination. We do this because we assume that all
3916 computed jumps occur from a location where each elimination is
3917 at its initial offset.
3918 For all other labels, show that we don't know the offsets. */
3919
3920 static void
3921 set_initial_label_offsets (void)
3922 {
3923 memset (offsets_known_at, 0, num_labels);
3924
3925 for (rtx_insn_list *x = forced_labels; x; x = x->next ())
3926 if (x->insn ())
3927 set_label_offsets (x->insn (), NULL, 1);
3928
3929 for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3930 if (x->insn ())
3931 set_label_offsets (x->insn (), NULL, 1);
3932
3933 for_each_eh_label (set_initial_eh_label_offset);
3934 }
3935
3936 /* Set all elimination offsets to the known values for the code label given
3937 by INSN. */
3938
3939 static void
3940 set_offsets_for_label (rtx_insn *insn)
3941 {
3942 unsigned int i;
3943 int label_nr = CODE_LABEL_NUMBER (insn);
3944 struct elim_table *ep;
3945
3946 num_not_at_initial_offset = 0;
3947 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3948 {
3949 ep->offset = ep->previous_offset
3950 = offsets_at[label_nr - first_label_num][i];
3951 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3952 num_not_at_initial_offset++;
3953 }
3954 }
3955
3956 /* See if anything that happened changes which eliminations are valid.
3957 For example, on the SPARC, whether or not the frame pointer can
3958 be eliminated can depend on what registers have been used. We need
3959 not check some conditions again (such as flag_omit_frame_pointer)
3960 since they can't have changed. */
3961
3962 static void
3963 update_eliminables (HARD_REG_SET *pset)
3964 {
3965 int previous_frame_pointer_needed = frame_pointer_needed;
3966 struct elim_table *ep;
3967
3968 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3969 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3970 && targetm.frame_pointer_required ())
3971 #ifdef ELIMINABLE_REGS
3972 || ! targetm.can_eliminate (ep->from, ep->to)
3973 #endif
3974 )
3975 ep->can_eliminate = 0;
3976
3977 /* Look for the case where we have discovered that we can't replace
3978 register A with register B and that means that we will now be
3979 trying to replace register A with register C. This means we can
3980 no longer replace register C with register B and we need to disable
3981 such an elimination, if it exists. This occurs often with A == ap,
3982 B == sp, and C == fp. */
3983
3984 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3985 {
3986 struct elim_table *op;
3987 int new_to = -1;
3988
3989 if (! ep->can_eliminate && ep->can_eliminate_previous)
3990 {
3991 /* Find the current elimination for ep->from, if there is a
3992 new one. */
3993 for (op = reg_eliminate;
3994 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3995 if (op->from == ep->from && op->can_eliminate)
3996 {
3997 new_to = op->to;
3998 break;
3999 }
4000
4001 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
4002 disable it. */
4003 for (op = reg_eliminate;
4004 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
4005 if (op->from == new_to && op->to == ep->to)
4006 op->can_eliminate = 0;
4007 }
4008 }
4009
4010 /* See if any registers that we thought we could eliminate the previous
4011 time are no longer eliminable. If so, something has changed and we
4012 must spill the register. Also, recompute the number of eliminable
4013 registers and see if the frame pointer is needed; it is if there is
4014 no elimination of the frame pointer that we can perform. */
4015
4016 frame_pointer_needed = 1;
4017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4018 {
4019 if (ep->can_eliminate
4020 && ep->from == FRAME_POINTER_REGNUM
4021 && ep->to != HARD_FRAME_POINTER_REGNUM
4022 && (! SUPPORTS_STACK_ALIGNMENT
4023 || ! crtl->stack_realign_needed))
4024 frame_pointer_needed = 0;
4025
4026 if (! ep->can_eliminate && ep->can_eliminate_previous)
4027 {
4028 ep->can_eliminate_previous = 0;
4029 SET_HARD_REG_BIT (*pset, ep->from);
4030 num_eliminable--;
4031 }
4032 }
4033
4034 /* If we didn't need a frame pointer last time, but we do now, spill
4035 the hard frame pointer. */
4036 if (frame_pointer_needed && ! previous_frame_pointer_needed)
4037 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4038 }
4039
4040 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4041 Return true iff a register was spilled. */
4042
4043 static bool
4044 update_eliminables_and_spill (void)
4045 {
4046 int i;
4047 bool did_spill = false;
4048 HARD_REG_SET to_spill;
4049 CLEAR_HARD_REG_SET (to_spill);
4050 update_eliminables (&to_spill);
4051 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4052
4053 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4054 if (TEST_HARD_REG_BIT (to_spill, i))
4055 {
4056 spill_hard_reg (i, 1);
4057 did_spill = true;
4058
4059 /* Regardless of the state of spills, if we previously had
4060 a register that we thought we could eliminate, but now can
4061 not eliminate, we must run another pass.
4062
4063 Consider pseudos which have an entry in reg_equiv_* which
4064 reference an eliminable register. We must make another pass
4065 to update reg_equiv_* so that we do not substitute in the
4066 old value from when we thought the elimination could be
4067 performed. */
4068 }
4069 return did_spill;
4070 }
4071
4072 /* Return true if X is used as the target register of an elimination. */
4073
4074 bool
4075 elimination_target_reg_p (rtx x)
4076 {
4077 struct elim_table *ep;
4078
4079 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4080 if (ep->to_rtx == x && ep->can_eliminate)
4081 return true;
4082
4083 return false;
4084 }
4085
4086 /* Initialize the table of registers to eliminate.
4087 Pre-condition: global flag frame_pointer_needed has been set before
4088 calling this function. */
4089
4090 static void
4091 init_elim_table (void)
4092 {
4093 struct elim_table *ep;
4094 #ifdef ELIMINABLE_REGS
4095 const struct elim_table_1 *ep1;
4096 #endif
4097
4098 if (!reg_eliminate)
4099 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4100
4101 num_eliminable = 0;
4102
4103 #ifdef ELIMINABLE_REGS
4104 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4105 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4106 {
4107 ep->from = ep1->from;
4108 ep->to = ep1->to;
4109 ep->can_eliminate = ep->can_eliminate_previous
4110 = (targetm.can_eliminate (ep->from, ep->to)
4111 && ! (ep->to == STACK_POINTER_REGNUM
4112 && frame_pointer_needed
4113 && (! SUPPORTS_STACK_ALIGNMENT
4114 || ! stack_realign_fp)));
4115 }
4116 #else
4117 reg_eliminate[0].from = reg_eliminate_1[0].from;
4118 reg_eliminate[0].to = reg_eliminate_1[0].to;
4119 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4120 = ! frame_pointer_needed;
4121 #endif
4122
4123 /* Count the number of eliminable registers and build the FROM and TO
4124 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4125 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4126 We depend on this. */
4127 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4128 {
4129 num_eliminable += ep->can_eliminate;
4130 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4131 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4132 }
4133 }
4134
4135 /* Find all the pseudo registers that didn't get hard regs
4136 but do have known equivalent constants or memory slots.
4137 These include parameters (known equivalent to parameter slots)
4138 and cse'd or loop-moved constant memory addresses.
4139
4140 Record constant equivalents in reg_equiv_constant
4141 so they will be substituted by find_reloads.
4142 Record memory equivalents in reg_mem_equiv so they can
4143 be substituted eventually by altering the REG-rtx's. */
4144
4145 static void
4146 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4147 {
4148 int i;
4149 rtx_insn *insn;
4150
4151 grow_reg_equivs ();
4152 if (do_subregs)
4153 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4154 else
4155 reg_max_ref_width = NULL;
4156
4157 num_eliminable_invariants = 0;
4158
4159 first_label_num = get_first_label_num ();
4160 num_labels = max_label_num () - first_label_num;
4161
4162 /* Allocate the tables used to store offset information at labels. */
4163 offsets_known_at = XNEWVEC (char, num_labels);
4164 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4165
4166 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4167 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4168 find largest such for each pseudo. FIRST is the head of the insn
4169 list. */
4170
4171 for (insn = first; insn; insn = NEXT_INSN (insn))
4172 {
4173 rtx set = single_set (insn);
4174
4175 /* We may introduce USEs that we want to remove at the end, so
4176 we'll mark them with QImode. Make sure there are no
4177 previously-marked insns left by say regmove. */
4178 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4179 && GET_MODE (insn) != VOIDmode)
4180 PUT_MODE (insn, VOIDmode);
4181
4182 if (do_subregs && NONDEBUG_INSN_P (insn))
4183 scan_paradoxical_subregs (PATTERN (insn));
4184
4185 if (set != 0 && REG_P (SET_DEST (set)))
4186 {
4187 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4188 rtx x;
4189
4190 if (! note)
4191 continue;
4192
4193 i = REGNO (SET_DEST (set));
4194 x = XEXP (note, 0);
4195
4196 if (i <= LAST_VIRTUAL_REGISTER)
4197 continue;
4198
4199 /* If flag_pic and we have constant, verify it's legitimate. */
4200 if (!CONSTANT_P (x)
4201 || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4202 {
4203 /* It can happen that a REG_EQUIV note contains a MEM
4204 that is not a legitimate memory operand. As later
4205 stages of reload assume that all addresses found
4206 in the reg_equiv_* arrays were originally legitimate,
4207 we ignore such REG_EQUIV notes. */
4208 if (memory_operand (x, VOIDmode))
4209 {
4210 /* Always unshare the equivalence, so we can
4211 substitute into this insn without touching the
4212 equivalence. */
4213 reg_equiv_memory_loc (i) = copy_rtx (x);
4214 }
4215 else if (function_invariant_p (x))
4216 {
4217 machine_mode mode;
4218
4219 mode = GET_MODE (SET_DEST (set));
4220 if (GET_CODE (x) == PLUS)
4221 {
4222 /* This is PLUS of frame pointer and a constant,
4223 and might be shared. Unshare it. */
4224 reg_equiv_invariant (i) = copy_rtx (x);
4225 num_eliminable_invariants++;
4226 }
4227 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4228 {
4229 reg_equiv_invariant (i) = x;
4230 num_eliminable_invariants++;
4231 }
4232 else if (targetm.legitimate_constant_p (mode, x))
4233 reg_equiv_constant (i) = x;
4234 else
4235 {
4236 reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4237 if (! reg_equiv_memory_loc (i))
4238 reg_equiv_init (i) = NULL;
4239 }
4240 }
4241 else
4242 {
4243 reg_equiv_init (i) = NULL;
4244 continue;
4245 }
4246 }
4247 else
4248 reg_equiv_init (i) = NULL;
4249 }
4250 }
4251
4252 if (dump_file)
4253 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4254 if (reg_equiv_init (i))
4255 {
4256 fprintf (dump_file, "init_insns for %u: ", i);
4257 print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4258 fprintf (dump_file, "\n");
4259 }
4260 }
4261
4262 /* Indicate that we no longer have known memory locations or constants.
4263 Free all data involved in tracking these. */
4264
4265 static void
4266 free_reg_equiv (void)
4267 {
4268 int i;
4269
4270 free (offsets_known_at);
4271 free (offsets_at);
4272 offsets_at = 0;
4273 offsets_known_at = 0;
4274
4275 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4276 if (reg_equiv_alt_mem_list (i))
4277 free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4278 vec_free (reg_equivs);
4279 }
4280 \f
4281 /* Kick all pseudos out of hard register REGNO.
4282
4283 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4284 because we found we can't eliminate some register. In the case, no pseudos
4285 are allowed to be in the register, even if they are only in a block that
4286 doesn't require spill registers, unlike the case when we are spilling this
4287 hard reg to produce another spill register.
4288
4289 Return nonzero if any pseudos needed to be kicked out. */
4290
4291 static void
4292 spill_hard_reg (unsigned int regno, int cant_eliminate)
4293 {
4294 int i;
4295
4296 if (cant_eliminate)
4297 {
4298 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4299 df_set_regs_ever_live (regno, true);
4300 }
4301
4302 /* Spill every pseudo reg that was allocated to this reg
4303 or to something that overlaps this reg. */
4304
4305 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4306 if (reg_renumber[i] >= 0
4307 && (unsigned int) reg_renumber[i] <= regno
4308 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4309 SET_REGNO_REG_SET (&spilled_pseudos, i);
4310 }
4311
4312 /* After find_reload_regs has been run for all insn that need reloads,
4313 and/or spill_hard_regs was called, this function is used to actually
4314 spill pseudo registers and try to reallocate them. It also sets up the
4315 spill_regs array for use by choose_reload_regs. */
4316
4317 static int
4318 finish_spills (int global)
4319 {
4320 struct insn_chain *chain;
4321 int something_changed = 0;
4322 unsigned i;
4323 reg_set_iterator rsi;
4324
4325 /* Build the spill_regs array for the function. */
4326 /* If there are some registers still to eliminate and one of the spill regs
4327 wasn't ever used before, additional stack space may have to be
4328 allocated to store this register. Thus, we may have changed the offset
4329 between the stack and frame pointers, so mark that something has changed.
4330
4331 One might think that we need only set VAL to 1 if this is a call-used
4332 register. However, the set of registers that must be saved by the
4333 prologue is not identical to the call-used set. For example, the
4334 register used by the call insn for the return PC is a call-used register,
4335 but must be saved by the prologue. */
4336
4337 n_spills = 0;
4338 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4339 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4340 {
4341 spill_reg_order[i] = n_spills;
4342 spill_regs[n_spills++] = i;
4343 if (num_eliminable && ! df_regs_ever_live_p (i))
4344 something_changed = 1;
4345 df_set_regs_ever_live (i, true);
4346 }
4347 else
4348 spill_reg_order[i] = -1;
4349
4350 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4351 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4352 {
4353 /* Record the current hard register the pseudo is allocated to
4354 in pseudo_previous_regs so we avoid reallocating it to the
4355 same hard reg in a later pass. */
4356 gcc_assert (reg_renumber[i] >= 0);
4357
4358 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4359 /* Mark it as no longer having a hard register home. */
4360 reg_renumber[i] = -1;
4361 if (ira_conflicts_p)
4362 /* Inform IRA about the change. */
4363 ira_mark_allocation_change (i);
4364 /* We will need to scan everything again. */
4365 something_changed = 1;
4366 }
4367
4368 /* Retry global register allocation if possible. */
4369 if (global && ira_conflicts_p)
4370 {
4371 unsigned int n;
4372
4373 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4374 /* For every insn that needs reloads, set the registers used as spill
4375 regs in pseudo_forbidden_regs for every pseudo live across the
4376 insn. */
4377 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4378 {
4379 EXECUTE_IF_SET_IN_REG_SET
4380 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4381 {
4382 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4383 chain->used_spill_regs);
4384 }
4385 EXECUTE_IF_SET_IN_REG_SET
4386 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4387 {
4388 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4389 chain->used_spill_regs);
4390 }
4391 }
4392
4393 /* Retry allocating the pseudos spilled in IRA and the
4394 reload. For each reg, merge the various reg sets that
4395 indicate which hard regs can't be used, and call
4396 ira_reassign_pseudos. */
4397 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4398 if (reg_old_renumber[i] != reg_renumber[i])
4399 {
4400 if (reg_renumber[i] < 0)
4401 temp_pseudo_reg_arr[n++] = i;
4402 else
4403 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4404 }
4405 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4406 bad_spill_regs_global,
4407 pseudo_forbidden_regs, pseudo_previous_regs,
4408 &spilled_pseudos))
4409 something_changed = 1;
4410 }
4411 /* Fix up the register information in the insn chain.
4412 This involves deleting those of the spilled pseudos which did not get
4413 a new hard register home from the live_{before,after} sets. */
4414 for (chain = reload_insn_chain; chain; chain = chain->next)
4415 {
4416 HARD_REG_SET used_by_pseudos;
4417 HARD_REG_SET used_by_pseudos2;
4418
4419 if (! ira_conflicts_p)
4420 {
4421 /* Don't do it for IRA because IRA and the reload still can
4422 assign hard registers to the spilled pseudos on next
4423 reload iterations. */
4424 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4425 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4426 }
4427 /* Mark any unallocated hard regs as available for spills. That
4428 makes inheritance work somewhat better. */
4429 if (chain->need_reload)
4430 {
4431 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4432 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4433 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4434
4435 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4436 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4437 /* Value of chain->used_spill_regs from previous iteration
4438 may be not included in the value calculated here because
4439 of possible removing caller-saves insns (see function
4440 delete_caller_save_insns. */
4441 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4442 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4443 }
4444 }
4445
4446 CLEAR_REG_SET (&changed_allocation_pseudos);
4447 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4448 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4449 {
4450 int regno = reg_renumber[i];
4451 if (reg_old_renumber[i] == regno)
4452 continue;
4453
4454 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4455
4456 alter_reg (i, reg_old_renumber[i], false);
4457 reg_old_renumber[i] = regno;
4458 if (dump_file)
4459 {
4460 if (regno == -1)
4461 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4462 else
4463 fprintf (dump_file, " Register %d now in %d.\n\n",
4464 i, reg_renumber[i]);
4465 }
4466 }
4467
4468 return something_changed;
4469 }
4470 \f
4471 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4472
4473 static void
4474 scan_paradoxical_subregs (rtx x)
4475 {
4476 int i;
4477 const char *fmt;
4478 enum rtx_code code = GET_CODE (x);
4479
4480 switch (code)
4481 {
4482 case REG:
4483 case CONST:
4484 case SYMBOL_REF:
4485 case LABEL_REF:
4486 CASE_CONST_ANY:
4487 case CC0:
4488 case PC:
4489 case USE:
4490 case CLOBBER:
4491 return;
4492
4493 case SUBREG:
4494 if (REG_P (SUBREG_REG (x))
4495 && (GET_MODE_SIZE (GET_MODE (x))
4496 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4497 {
4498 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4499 = GET_MODE_SIZE (GET_MODE (x));
4500 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4501 }
4502 return;
4503
4504 default:
4505 break;
4506 }
4507
4508 fmt = GET_RTX_FORMAT (code);
4509 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4510 {
4511 if (fmt[i] == 'e')
4512 scan_paradoxical_subregs (XEXP (x, i));
4513 else if (fmt[i] == 'E')
4514 {
4515 int j;
4516 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4517 scan_paradoxical_subregs (XVECEXP (x, i, j));
4518 }
4519 }
4520 }
4521
4522 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4523 If *OP_PTR is a paradoxical subreg, try to remove that subreg
4524 and apply the corresponding narrowing subreg to *OTHER_PTR.
4525 Return true if the operands were changed, false otherwise. */
4526
4527 static bool
4528 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4529 {
4530 rtx op, inner, other, tem;
4531
4532 op = *op_ptr;
4533 if (!paradoxical_subreg_p (op))
4534 return false;
4535 inner = SUBREG_REG (op);
4536
4537 other = *other_ptr;
4538 tem = gen_lowpart_common (GET_MODE (inner), other);
4539 if (!tem)
4540 return false;
4541
4542 /* If the lowpart operation turned a hard register into a subreg,
4543 rather than simplifying it to another hard register, then the
4544 mode change cannot be properly represented. For example, OTHER
4545 might be valid in its current mode, but not in the new one. */
4546 if (GET_CODE (tem) == SUBREG
4547 && REG_P (other)
4548 && HARD_REGISTER_P (other))
4549 return false;
4550
4551 *op_ptr = inner;
4552 *other_ptr = tem;
4553 return true;
4554 }
4555 \f
4556 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4557 examine all of the reload insns between PREV and NEXT exclusive, and
4558 annotate all that may trap. */
4559
4560 static void
4561 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4562 {
4563 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4564 if (note == NULL)
4565 return;
4566 if (!insn_could_throw_p (insn))
4567 remove_note (insn, note);
4568 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4569 }
4570
4571 /* Reload pseudo-registers into hard regs around each insn as needed.
4572 Additional register load insns are output before the insn that needs it
4573 and perhaps store insns after insns that modify the reloaded pseudo reg.
4574
4575 reg_last_reload_reg and reg_reloaded_contents keep track of
4576 which registers are already available in reload registers.
4577 We update these for the reloads that we perform,
4578 as the insns are scanned. */
4579
4580 static void
4581 reload_as_needed (int live_known)
4582 {
4583 struct insn_chain *chain;
4584 #if AUTO_INC_DEC
4585 int i;
4586 #endif
4587 rtx_note *marker;
4588
4589 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4590 memset (spill_reg_store, 0, sizeof spill_reg_store);
4591 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4592 INIT_REG_SET (&reg_has_output_reload);
4593 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4594 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4595
4596 set_initial_elim_offsets ();
4597
4598 /* Generate a marker insn that we will move around. */
4599 marker = emit_note (NOTE_INSN_DELETED);
4600 unlink_insn_chain (marker, marker);
4601
4602 for (chain = reload_insn_chain; chain; chain = chain->next)
4603 {
4604 rtx_insn *prev = 0;
4605 rtx_insn *insn = chain->insn;
4606 rtx_insn *old_next = NEXT_INSN (insn);
4607 #if AUTO_INC_DEC
4608 rtx_insn *old_prev = PREV_INSN (insn);
4609 #endif
4610
4611 if (will_delete_init_insn_p (insn))
4612 continue;
4613
4614 /* If we pass a label, copy the offsets from the label information
4615 into the current offsets of each elimination. */
4616 if (LABEL_P (insn))
4617 set_offsets_for_label (insn);
4618
4619 else if (INSN_P (insn))
4620 {
4621 regset_head regs_to_forget;
4622 INIT_REG_SET (&regs_to_forget);
4623 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4624
4625 /* If this is a USE and CLOBBER of a MEM, ensure that any
4626 references to eliminable registers have been removed. */
4627
4628 if ((GET_CODE (PATTERN (insn)) == USE
4629 || GET_CODE (PATTERN (insn)) == CLOBBER)
4630 && MEM_P (XEXP (PATTERN (insn), 0)))
4631 XEXP (XEXP (PATTERN (insn), 0), 0)
4632 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4633 GET_MODE (XEXP (PATTERN (insn), 0)),
4634 NULL_RTX);
4635
4636 /* If we need to do register elimination processing, do so.
4637 This might delete the insn, in which case we are done. */
4638 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4639 {
4640 eliminate_regs_in_insn (insn, 1);
4641 if (NOTE_P (insn))
4642 {
4643 update_eliminable_offsets ();
4644 CLEAR_REG_SET (&regs_to_forget);
4645 continue;
4646 }
4647 }
4648
4649 /* If need_elim is nonzero but need_reload is zero, one might think
4650 that we could simply set n_reloads to 0. However, find_reloads
4651 could have done some manipulation of the insn (such as swapping
4652 commutative operands), and these manipulations are lost during
4653 the first pass for every insn that needs register elimination.
4654 So the actions of find_reloads must be redone here. */
4655
4656 if (! chain->need_elim && ! chain->need_reload
4657 && ! chain->need_operand_change)
4658 n_reloads = 0;
4659 /* First find the pseudo regs that must be reloaded for this insn.
4660 This info is returned in the tables reload_... (see reload.h).
4661 Also modify the body of INSN by substituting RELOAD
4662 rtx's for those pseudo regs. */
4663 else
4664 {
4665 CLEAR_REG_SET (&reg_has_output_reload);
4666 CLEAR_HARD_REG_SET (reg_is_output_reload);
4667
4668 find_reloads (insn, 1, spill_indirect_levels, live_known,
4669 spill_reg_order);
4670 }
4671
4672 if (n_reloads > 0)
4673 {
4674 rtx_insn *next = NEXT_INSN (insn);
4675
4676 /* ??? PREV can get deleted by reload inheritance.
4677 Work around this by emitting a marker note. */
4678 prev = PREV_INSN (insn);
4679 reorder_insns_nobb (marker, marker, prev);
4680
4681 /* Now compute which reload regs to reload them into. Perhaps
4682 reusing reload regs from previous insns, or else output
4683 load insns to reload them. Maybe output store insns too.
4684 Record the choices of reload reg in reload_reg_rtx. */
4685 choose_reload_regs (chain);
4686
4687 /* Generate the insns to reload operands into or out of
4688 their reload regs. */
4689 emit_reload_insns (chain);
4690
4691 /* Substitute the chosen reload regs from reload_reg_rtx
4692 into the insn's body (or perhaps into the bodies of other
4693 load and store insn that we just made for reloading
4694 and that we moved the structure into). */
4695 subst_reloads (insn);
4696
4697 prev = PREV_INSN (marker);
4698 unlink_insn_chain (marker, marker);
4699
4700 /* Adjust the exception region notes for loads and stores. */
4701 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4702 fixup_eh_region_note (insn, prev, next);
4703
4704 /* Adjust the location of REG_ARGS_SIZE. */
4705 rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4706 if (p)
4707 {
4708 remove_note (insn, p);
4709 fixup_args_size_notes (prev, PREV_INSN (next),
4710 INTVAL (XEXP (p, 0)));
4711 }
4712
4713 /* If this was an ASM, make sure that all the reload insns
4714 we have generated are valid. If not, give an error
4715 and delete them. */
4716 if (asm_noperands (PATTERN (insn)) >= 0)
4717 for (rtx_insn *p = NEXT_INSN (prev);
4718 p != next;
4719 p = NEXT_INSN (p))
4720 if (p != insn && INSN_P (p)
4721 && GET_CODE (PATTERN (p)) != USE
4722 && (recog_memoized (p) < 0
4723 || (extract_insn (p),
4724 !(constrain_operands (1,
4725 get_enabled_alternatives (p))))))
4726 {
4727 error_for_asm (insn,
4728 "%<asm%> operand requires "
4729 "impossible reload");
4730 delete_insn (p);
4731 }
4732 }
4733
4734 if (num_eliminable && chain->need_elim)
4735 update_eliminable_offsets ();
4736
4737 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4738 is no longer validly lying around to save a future reload.
4739 Note that this does not detect pseudos that were reloaded
4740 for this insn in order to be stored in
4741 (obeying register constraints). That is correct; such reload
4742 registers ARE still valid. */
4743 forget_marked_reloads (&regs_to_forget);
4744 CLEAR_REG_SET (&regs_to_forget);
4745
4746 /* There may have been CLOBBER insns placed after INSN. So scan
4747 between INSN and NEXT and use them to forget old reloads. */
4748 for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4749 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4750 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4751
4752 #if AUTO_INC_DEC
4753 /* Likewise for regs altered by auto-increment in this insn.
4754 REG_INC notes have been changed by reloading:
4755 find_reloads_address_1 records substitutions for them,
4756 which have been performed by subst_reloads above. */
4757 for (i = n_reloads - 1; i >= 0; i--)
4758 {
4759 rtx in_reg = rld[i].in_reg;
4760 if (in_reg)
4761 {
4762 enum rtx_code code = GET_CODE (in_reg);
4763 /* PRE_INC / PRE_DEC will have the reload register ending up
4764 with the same value as the stack slot, but that doesn't
4765 hold true for POST_INC / POST_DEC. Either we have to
4766 convert the memory access to a true POST_INC / POST_DEC,
4767 or we can't use the reload register for inheritance. */
4768 if ((code == POST_INC || code == POST_DEC)
4769 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4770 REGNO (rld[i].reg_rtx))
4771 /* Make sure it is the inc/dec pseudo, and not
4772 some other (e.g. output operand) pseudo. */
4773 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4774 == REGNO (XEXP (in_reg, 0))))
4775
4776 {
4777 rtx reload_reg = rld[i].reg_rtx;
4778 machine_mode mode = GET_MODE (reload_reg);
4779 int n = 0;
4780 rtx_insn *p;
4781
4782 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4783 {
4784 /* We really want to ignore REG_INC notes here, so
4785 use PATTERN (p) as argument to reg_set_p . */
4786 if (reg_set_p (reload_reg, PATTERN (p)))
4787 break;
4788 n = count_occurrences (PATTERN (p), reload_reg, 0);
4789 if (! n)
4790 continue;
4791 if (n == 1)
4792 {
4793 rtx replace_reg
4794 = gen_rtx_fmt_e (code, mode, reload_reg);
4795
4796 validate_replace_rtx_group (reload_reg,
4797 replace_reg, p);
4798 n = verify_changes (0);
4799
4800 /* We must also verify that the constraints
4801 are met after the replacement. Make sure
4802 extract_insn is only called for an insn
4803 where the replacements were found to be
4804 valid so far. */
4805 if (n)
4806 {
4807 extract_insn (p);
4808 n = constrain_operands (1,
4809 get_enabled_alternatives (p));
4810 }
4811
4812 /* If the constraints were not met, then
4813 undo the replacement, else confirm it. */
4814 if (!n)
4815 cancel_changes (0);
4816 else
4817 confirm_change_group ();
4818 }
4819 break;
4820 }
4821 if (n == 1)
4822 {
4823 add_reg_note (p, REG_INC, reload_reg);
4824 /* Mark this as having an output reload so that the
4825 REG_INC processing code below won't invalidate
4826 the reload for inheritance. */
4827 SET_HARD_REG_BIT (reg_is_output_reload,
4828 REGNO (reload_reg));
4829 SET_REGNO_REG_SET (&reg_has_output_reload,
4830 REGNO (XEXP (in_reg, 0)));
4831 }
4832 else
4833 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4834 NULL);
4835 }
4836 else if ((code == PRE_INC || code == PRE_DEC)
4837 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4838 REGNO (rld[i].reg_rtx))
4839 /* Make sure it is the inc/dec pseudo, and not
4840 some other (e.g. output operand) pseudo. */
4841 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4842 == REGNO (XEXP (in_reg, 0))))
4843 {
4844 SET_HARD_REG_BIT (reg_is_output_reload,
4845 REGNO (rld[i].reg_rtx));
4846 SET_REGNO_REG_SET (&reg_has_output_reload,
4847 REGNO (XEXP (in_reg, 0)));
4848 }
4849 else if (code == PRE_INC || code == PRE_DEC
4850 || code == POST_INC || code == POST_DEC)
4851 {
4852 int in_regno = REGNO (XEXP (in_reg, 0));
4853
4854 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4855 {
4856 int in_hard_regno;
4857 bool forget_p = true;
4858
4859 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4860 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4861 in_hard_regno))
4862 {
4863 for (rtx_insn *x = (old_prev ?
4864 NEXT_INSN (old_prev) : insn);
4865 x != old_next;
4866 x = NEXT_INSN (x))
4867 if (x == reg_reloaded_insn[in_hard_regno])
4868 {
4869 forget_p = false;
4870 break;
4871 }
4872 }
4873 /* If for some reasons, we didn't set up
4874 reg_last_reload_reg in this insn,
4875 invalidate inheritance from previous
4876 insns for the incremented/decremented
4877 register. Such registers will be not in
4878 reg_has_output_reload. Invalidate it
4879 also if the corresponding element in
4880 reg_reloaded_insn is also
4881 invalidated. */
4882 if (forget_p)
4883 forget_old_reloads_1 (XEXP (in_reg, 0),
4884 NULL_RTX, NULL);
4885 }
4886 }
4887 }
4888 }
4889 /* If a pseudo that got a hard register is auto-incremented,
4890 we must purge records of copying it into pseudos without
4891 hard registers. */
4892 for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4893 if (REG_NOTE_KIND (x) == REG_INC)
4894 {
4895 /* See if this pseudo reg was reloaded in this insn.
4896 If so, its last-reload info is still valid
4897 because it is based on this insn's reload. */
4898 for (i = 0; i < n_reloads; i++)
4899 if (rld[i].out == XEXP (x, 0))
4900 break;
4901
4902 if (i == n_reloads)
4903 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4904 }
4905 #endif
4906 }
4907 /* A reload reg's contents are unknown after a label. */
4908 if (LABEL_P (insn))
4909 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4910
4911 /* Don't assume a reload reg is still good after a call insn
4912 if it is a call-used reg, or if it contains a value that will
4913 be partially clobbered by the call. */
4914 else if (CALL_P (insn))
4915 {
4916 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4917 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4918
4919 /* If this is a call to a setjmp-type function, we must not
4920 reuse any reload reg contents across the call; that will
4921 just be clobbered by other uses of the register in later
4922 code, before the longjmp. */
4923 if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4924 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4925 }
4926 }
4927
4928 /* Clean up. */
4929 free (reg_last_reload_reg);
4930 CLEAR_REG_SET (&reg_has_output_reload);
4931 }
4932
4933 /* Discard all record of any value reloaded from X,
4934 or reloaded in X from someplace else;
4935 unless X is an output reload reg of the current insn.
4936
4937 X may be a hard reg (the reload reg)
4938 or it may be a pseudo reg that was reloaded from.
4939
4940 When DATA is non-NULL just mark the registers in regset
4941 to be forgotten later. */
4942
4943 static void
4944 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4945 void *data)
4946 {
4947 unsigned int regno;
4948 unsigned int nr;
4949 regset regs = (regset) data;
4950
4951 /* note_stores does give us subregs of hard regs,
4952 subreg_regno_offset requires a hard reg. */
4953 while (GET_CODE (x) == SUBREG)
4954 {
4955 /* We ignore the subreg offset when calculating the regno,
4956 because we are using the entire underlying hard register
4957 below. */
4958 x = SUBREG_REG (x);
4959 }
4960
4961 if (!REG_P (x))
4962 return;
4963
4964 regno = REGNO (x);
4965
4966 if (regno >= FIRST_PSEUDO_REGISTER)
4967 nr = 1;
4968 else
4969 {
4970 unsigned int i;
4971
4972 nr = hard_regno_nregs[regno][GET_MODE (x)];
4973 /* Storing into a spilled-reg invalidates its contents.
4974 This can happen if a block-local pseudo is allocated to that reg
4975 and it wasn't spilled because this block's total need is 0.
4976 Then some insn might have an optional reload and use this reg. */
4977 if (!regs)
4978 for (i = 0; i < nr; i++)
4979 /* But don't do this if the reg actually serves as an output
4980 reload reg in the current instruction. */
4981 if (n_reloads == 0
4982 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4983 {
4984 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4985 spill_reg_store[regno + i] = 0;
4986 }
4987 }
4988
4989 if (regs)
4990 while (nr-- > 0)
4991 SET_REGNO_REG_SET (regs, regno + nr);
4992 else
4993 {
4994 /* Since value of X has changed,
4995 forget any value previously copied from it. */
4996
4997 while (nr-- > 0)
4998 /* But don't forget a copy if this is the output reload
4999 that establishes the copy's validity. */
5000 if (n_reloads == 0
5001 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
5002 reg_last_reload_reg[regno + nr] = 0;
5003 }
5004 }
5005
5006 /* Forget the reloads marked in regset by previous function. */
5007 static void
5008 forget_marked_reloads (regset regs)
5009 {
5010 unsigned int reg;
5011 reg_set_iterator rsi;
5012 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
5013 {
5014 if (reg < FIRST_PSEUDO_REGISTER
5015 /* But don't do this if the reg actually serves as an output
5016 reload reg in the current instruction. */
5017 && (n_reloads == 0
5018 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
5019 {
5020 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
5021 spill_reg_store[reg] = 0;
5022 }
5023 if (n_reloads == 0
5024 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
5025 reg_last_reload_reg[reg] = 0;
5026 }
5027 }
5028 \f
5029 /* The following HARD_REG_SETs indicate when each hard register is
5030 used for a reload of various parts of the current insn. */
5031
5032 /* If reg is unavailable for all reloads. */
5033 static HARD_REG_SET reload_reg_unavailable;
5034 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
5035 static HARD_REG_SET reload_reg_used;
5036 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
5037 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5038 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
5039 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5040 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
5041 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5042 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
5043 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5044 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
5045 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5046 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
5047 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5048 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
5049 static HARD_REG_SET reload_reg_used_in_op_addr;
5050 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
5051 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5052 /* If reg is in use for a RELOAD_FOR_INSN reload. */
5053 static HARD_REG_SET reload_reg_used_in_insn;
5054 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
5055 static HARD_REG_SET reload_reg_used_in_other_addr;
5056
5057 /* If reg is in use as a reload reg for any sort of reload. */
5058 static HARD_REG_SET reload_reg_used_at_all;
5059
5060 /* If reg is use as an inherited reload. We just mark the first register
5061 in the group. */
5062 static HARD_REG_SET reload_reg_used_for_inherit;
5063
5064 /* Records which hard regs are used in any way, either as explicit use or
5065 by being allocated to a pseudo during any point of the current insn. */
5066 static HARD_REG_SET reg_used_in_insn;
5067
5068 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5069 TYPE. MODE is used to indicate how many consecutive regs are
5070 actually used. */
5071
5072 static void
5073 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5074 machine_mode mode)
5075 {
5076 switch (type)
5077 {
5078 case RELOAD_OTHER:
5079 add_to_hard_reg_set (&reload_reg_used, mode, regno);
5080 break;
5081
5082 case RELOAD_FOR_INPUT_ADDRESS:
5083 add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5084 break;
5085
5086 case RELOAD_FOR_INPADDR_ADDRESS:
5087 add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5088 break;
5089
5090 case RELOAD_FOR_OUTPUT_ADDRESS:
5091 add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5092 break;
5093
5094 case RELOAD_FOR_OUTADDR_ADDRESS:
5095 add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5096 break;
5097
5098 case RELOAD_FOR_OPERAND_ADDRESS:
5099 add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5100 break;
5101
5102 case RELOAD_FOR_OPADDR_ADDR:
5103 add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5104 break;
5105
5106 case RELOAD_FOR_OTHER_ADDRESS:
5107 add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5108 break;
5109
5110 case RELOAD_FOR_INPUT:
5111 add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5112 break;
5113
5114 case RELOAD_FOR_OUTPUT:
5115 add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5116 break;
5117
5118 case RELOAD_FOR_INSN:
5119 add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno);
5120 break;
5121 }
5122
5123 add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5124 }
5125
5126 /* Similarly, but show REGNO is no longer in use for a reload. */
5127
5128 static void
5129 clear_reload_reg_in_use (unsigned int regno, int opnum,
5130 enum reload_type type, machine_mode mode)
5131 {
5132 unsigned int nregs = hard_regno_nregs[regno][mode];
5133 unsigned int start_regno, end_regno, r;
5134 int i;
5135 /* A complication is that for some reload types, inheritance might
5136 allow multiple reloads of the same types to share a reload register.
5137 We set check_opnum if we have to check only reloads with the same
5138 operand number, and check_any if we have to check all reloads. */
5139 int check_opnum = 0;
5140 int check_any = 0;
5141 HARD_REG_SET *used_in_set;
5142
5143 switch (type)
5144 {
5145 case RELOAD_OTHER:
5146 used_in_set = &reload_reg_used;
5147 break;
5148
5149 case RELOAD_FOR_INPUT_ADDRESS:
5150 used_in_set = &reload_reg_used_in_input_addr[opnum];
5151 break;
5152
5153 case RELOAD_FOR_INPADDR_ADDRESS:
5154 check_opnum = 1;
5155 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5156 break;
5157
5158 case RELOAD_FOR_OUTPUT_ADDRESS:
5159 used_in_set = &reload_reg_used_in_output_addr[opnum];
5160 break;
5161
5162 case RELOAD_FOR_OUTADDR_ADDRESS:
5163 check_opnum = 1;
5164 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5165 break;
5166
5167 case RELOAD_FOR_OPERAND_ADDRESS:
5168 used_in_set = &reload_reg_used_in_op_addr;
5169 break;
5170
5171 case RELOAD_FOR_OPADDR_ADDR:
5172 check_any = 1;
5173 used_in_set = &reload_reg_used_in_op_addr_reload;
5174 break;
5175
5176 case RELOAD_FOR_OTHER_ADDRESS:
5177 used_in_set = &reload_reg_used_in_other_addr;
5178 check_any = 1;
5179 break;
5180
5181 case RELOAD_FOR_INPUT:
5182 used_in_set = &reload_reg_used_in_input[opnum];
5183 break;
5184
5185 case RELOAD_FOR_OUTPUT:
5186 used_in_set = &reload_reg_used_in_output[opnum];
5187 break;
5188
5189 case RELOAD_FOR_INSN:
5190 used_in_set = &reload_reg_used_in_insn;
5191 break;
5192 default:
5193 gcc_unreachable ();
5194 }
5195 /* We resolve conflicts with remaining reloads of the same type by
5196 excluding the intervals of reload registers by them from the
5197 interval of freed reload registers. Since we only keep track of
5198 one set of interval bounds, we might have to exclude somewhat
5199 more than what would be necessary if we used a HARD_REG_SET here.
5200 But this should only happen very infrequently, so there should
5201 be no reason to worry about it. */
5202
5203 start_regno = regno;
5204 end_regno = regno + nregs;
5205 if (check_opnum || check_any)
5206 {
5207 for (i = n_reloads - 1; i >= 0; i--)
5208 {
5209 if (rld[i].when_needed == type
5210 && (check_any || rld[i].opnum == opnum)
5211 && rld[i].reg_rtx)
5212 {
5213 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5214 unsigned int conflict_end
5215 = end_hard_regno (rld[i].mode, conflict_start);
5216
5217 /* If there is an overlap with the first to-be-freed register,
5218 adjust the interval start. */
5219 if (conflict_start <= start_regno && conflict_end > start_regno)
5220 start_regno = conflict_end;
5221 /* Otherwise, if there is a conflict with one of the other
5222 to-be-freed registers, adjust the interval end. */
5223 if (conflict_start > start_regno && conflict_start < end_regno)
5224 end_regno = conflict_start;
5225 }
5226 }
5227 }
5228
5229 for (r = start_regno; r < end_regno; r++)
5230 CLEAR_HARD_REG_BIT (*used_in_set, r);
5231 }
5232
5233 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5234 specified by OPNUM and TYPE. */
5235
5236 static int
5237 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5238 {
5239 int i;
5240
5241 /* In use for a RELOAD_OTHER means it's not available for anything. */
5242 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5243 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5244 return 0;
5245
5246 switch (type)
5247 {
5248 case RELOAD_OTHER:
5249 /* In use for anything means we can't use it for RELOAD_OTHER. */
5250 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5251 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5252 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5253 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5254 return 0;
5255
5256 for (i = 0; i < reload_n_operands; i++)
5257 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5258 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5259 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5260 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5261 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5262 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5263 return 0;
5264
5265 return 1;
5266
5267 case RELOAD_FOR_INPUT:
5268 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5269 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5270 return 0;
5271
5272 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5273 return 0;
5274
5275 /* If it is used for some other input, can't use it. */
5276 for (i = 0; i < reload_n_operands; i++)
5277 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5278 return 0;
5279
5280 /* If it is used in a later operand's address, can't use it. */
5281 for (i = opnum + 1; i < reload_n_operands; i++)
5282 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5283 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5284 return 0;
5285
5286 return 1;
5287
5288 case RELOAD_FOR_INPUT_ADDRESS:
5289 /* Can't use a register if it is used for an input address for this
5290 operand or used as an input in an earlier one. */
5291 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5292 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5293 return 0;
5294
5295 for (i = 0; i < opnum; i++)
5296 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5297 return 0;
5298
5299 return 1;
5300
5301 case RELOAD_FOR_INPADDR_ADDRESS:
5302 /* Can't use a register if it is used for an input address
5303 for this operand or used as an input in an earlier
5304 one. */
5305 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5306 return 0;
5307
5308 for (i = 0; i < opnum; i++)
5309 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5310 return 0;
5311
5312 return 1;
5313
5314 case RELOAD_FOR_OUTPUT_ADDRESS:
5315 /* Can't use a register if it is used for an output address for this
5316 operand or used as an output in this or a later operand. Note
5317 that multiple output operands are emitted in reverse order, so
5318 the conflicting ones are those with lower indices. */
5319 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5320 return 0;
5321
5322 for (i = 0; i <= opnum; i++)
5323 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5324 return 0;
5325
5326 return 1;
5327
5328 case RELOAD_FOR_OUTADDR_ADDRESS:
5329 /* Can't use a register if it is used for an output address
5330 for this operand or used as an output in this or a
5331 later operand. Note that multiple output operands are
5332 emitted in reverse order, so the conflicting ones are
5333 those with lower indices. */
5334 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5335 return 0;
5336
5337 for (i = 0; i <= opnum; i++)
5338 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5339 return 0;
5340
5341 return 1;
5342
5343 case RELOAD_FOR_OPERAND_ADDRESS:
5344 for (i = 0; i < reload_n_operands; i++)
5345 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5346 return 0;
5347
5348 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5349 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5350
5351 case RELOAD_FOR_OPADDR_ADDR:
5352 for (i = 0; i < reload_n_operands; i++)
5353 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5354 return 0;
5355
5356 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5357
5358 case RELOAD_FOR_OUTPUT:
5359 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5360 outputs, or an operand address for this or an earlier output.
5361 Note that multiple output operands are emitted in reverse order,
5362 so the conflicting ones are those with higher indices. */
5363 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5364 return 0;
5365
5366 for (i = 0; i < reload_n_operands; i++)
5367 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5368 return 0;
5369
5370 for (i = opnum; i < reload_n_operands; i++)
5371 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5372 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5373 return 0;
5374
5375 return 1;
5376
5377 case RELOAD_FOR_INSN:
5378 for (i = 0; i < reload_n_operands; i++)
5379 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5380 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5381 return 0;
5382
5383 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5384 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5385
5386 case RELOAD_FOR_OTHER_ADDRESS:
5387 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5388
5389 default:
5390 gcc_unreachable ();
5391 }
5392 }
5393
5394 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5395 the number RELOADNUM, is still available in REGNO at the end of the insn.
5396
5397 We can assume that the reload reg was already tested for availability
5398 at the time it is needed, and we should not check this again,
5399 in case the reg has already been marked in use. */
5400
5401 static int
5402 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5403 {
5404 int opnum = rld[reloadnum].opnum;
5405 enum reload_type type = rld[reloadnum].when_needed;
5406 int i;
5407
5408 /* See if there is a reload with the same type for this operand, using
5409 the same register. This case is not handled by the code below. */
5410 for (i = reloadnum + 1; i < n_reloads; i++)
5411 {
5412 rtx reg;
5413 int nregs;
5414
5415 if (rld[i].opnum != opnum || rld[i].when_needed != type)
5416 continue;
5417 reg = rld[i].reg_rtx;
5418 if (reg == NULL_RTX)
5419 continue;
5420 nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5421 if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5422 return 0;
5423 }
5424
5425 switch (type)
5426 {
5427 case RELOAD_OTHER:
5428 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5429 its value must reach the end. */
5430 return 1;
5431
5432 /* If this use is for part of the insn,
5433 its value reaches if no subsequent part uses the same register.
5434 Just like the above function, don't try to do this with lots
5435 of fallthroughs. */
5436
5437 case RELOAD_FOR_OTHER_ADDRESS:
5438 /* Here we check for everything else, since these don't conflict
5439 with anything else and everything comes later. */
5440
5441 for (i = 0; i < reload_n_operands; i++)
5442 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5443 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5444 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5445 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5446 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5447 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5448 return 0;
5449
5450 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5451 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5452 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5453 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5454
5455 case RELOAD_FOR_INPUT_ADDRESS:
5456 case RELOAD_FOR_INPADDR_ADDRESS:
5457 /* Similar, except that we check only for this and subsequent inputs
5458 and the address of only subsequent inputs and we do not need
5459 to check for RELOAD_OTHER objects since they are known not to
5460 conflict. */
5461
5462 for (i = opnum; i < reload_n_operands; i++)
5463 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5464 return 0;
5465
5466 /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5467 could be killed if the register is also used by reload with type
5468 RELOAD_FOR_INPUT_ADDRESS, so check it. */
5469 if (type == RELOAD_FOR_INPADDR_ADDRESS
5470 && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5471 return 0;
5472
5473 for (i = opnum + 1; i < reload_n_operands; i++)
5474 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5475 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5476 return 0;
5477
5478 for (i = 0; i < reload_n_operands; i++)
5479 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5480 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5481 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5482 return 0;
5483
5484 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5485 return 0;
5486
5487 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5488 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5489 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5490
5491 case RELOAD_FOR_INPUT:
5492 /* Similar to input address, except we start at the next operand for
5493 both input and input address and we do not check for
5494 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5495 would conflict. */
5496
5497 for (i = opnum + 1; i < reload_n_operands; i++)
5498 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5499 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5500 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5501 return 0;
5502
5503 /* ... fall through ... */
5504
5505 case RELOAD_FOR_OPERAND_ADDRESS:
5506 /* Check outputs and their addresses. */
5507
5508 for (i = 0; i < reload_n_operands; i++)
5509 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5510 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5511 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5512 return 0;
5513
5514 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5515
5516 case RELOAD_FOR_OPADDR_ADDR:
5517 for (i = 0; i < reload_n_operands; i++)
5518 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5519 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5520 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5521 return 0;
5522
5523 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5524 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5525 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5526
5527 case RELOAD_FOR_INSN:
5528 /* These conflict with other outputs with RELOAD_OTHER. So
5529 we need only check for output addresses. */
5530
5531 opnum = reload_n_operands;
5532
5533 /* ... fall through ... */
5534
5535 case RELOAD_FOR_OUTPUT:
5536 case RELOAD_FOR_OUTPUT_ADDRESS:
5537 case RELOAD_FOR_OUTADDR_ADDRESS:
5538 /* We already know these can't conflict with a later output. So the
5539 only thing to check are later output addresses.
5540 Note that multiple output operands are emitted in reverse order,
5541 so the conflicting ones are those with lower indices. */
5542 for (i = 0; i < opnum; i++)
5543 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5544 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5545 return 0;
5546
5547 /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5548 could be killed if the register is also used by reload with type
5549 RELOAD_FOR_OUTPUT_ADDRESS, so check it. */
5550 if (type == RELOAD_FOR_OUTADDR_ADDRESS
5551 && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5552 return 0;
5553
5554 return 1;
5555
5556 default:
5557 gcc_unreachable ();
5558 }
5559 }
5560
5561 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5562 every register in REG. */
5563
5564 static bool
5565 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5566 {
5567 unsigned int i;
5568
5569 for (i = REGNO (reg); i < END_REGNO (reg); i++)
5570 if (!reload_reg_reaches_end_p (i, reloadnum))
5571 return false;
5572 return true;
5573 }
5574 \f
5575
5576 /* Returns whether R1 and R2 are uniquely chained: the value of one
5577 is used by the other, and that value is not used by any other
5578 reload for this insn. This is used to partially undo the decision
5579 made in find_reloads when in the case of multiple
5580 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5581 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5582 reloads. This code tries to avoid the conflict created by that
5583 change. It might be cleaner to explicitly keep track of which
5584 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5585 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5586 this after the fact. */
5587 static bool
5588 reloads_unique_chain_p (int r1, int r2)
5589 {
5590 int i;
5591
5592 /* We only check input reloads. */
5593 if (! rld[r1].in || ! rld[r2].in)
5594 return false;
5595
5596 /* Avoid anything with output reloads. */
5597 if (rld[r1].out || rld[r2].out)
5598 return false;
5599
5600 /* "chained" means one reload is a component of the other reload,
5601 not the same as the other reload. */
5602 if (rld[r1].opnum != rld[r2].opnum
5603 || rtx_equal_p (rld[r1].in, rld[r2].in)
5604 || rld[r1].optional || rld[r2].optional
5605 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5606 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5607 return false;
5608
5609 /* The following loop assumes that r1 is the reload that feeds r2. */
5610 if (r1 > r2)
5611 std::swap (r1, r2);
5612
5613 for (i = 0; i < n_reloads; i ++)
5614 /* Look for input reloads that aren't our two */
5615 if (i != r1 && i != r2 && rld[i].in)
5616 {
5617 /* If our reload is mentioned at all, it isn't a simple chain. */
5618 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5619 return false;
5620 }
5621 return true;
5622 }
5623
5624 /* The recursive function change all occurrences of WHAT in *WHERE
5625 to REPL. */
5626 static void
5627 substitute (rtx *where, const_rtx what, rtx repl)
5628 {
5629 const char *fmt;
5630 int i;
5631 enum rtx_code code;
5632
5633 if (*where == 0)
5634 return;
5635
5636 if (*where == what || rtx_equal_p (*where, what))
5637 {
5638 /* Record the location of the changed rtx. */
5639 substitute_stack.safe_push (where);
5640 *where = repl;
5641 return;
5642 }
5643
5644 code = GET_CODE (*where);
5645 fmt = GET_RTX_FORMAT (code);
5646 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5647 {
5648 if (fmt[i] == 'E')
5649 {
5650 int j;
5651
5652 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5653 substitute (&XVECEXP (*where, i, j), what, repl);
5654 }
5655 else if (fmt[i] == 'e')
5656 substitute (&XEXP (*where, i), what, repl);
5657 }
5658 }
5659
5660 /* The function returns TRUE if chain of reload R1 and R2 (in any
5661 order) can be evaluated without usage of intermediate register for
5662 the reload containing another reload. It is important to see
5663 gen_reload to understand what the function is trying to do. As an
5664 example, let us have reload chain
5665
5666 r2: const
5667 r1: <something> + const
5668
5669 and reload R2 got reload reg HR. The function returns true if
5670 there is a correct insn HR = HR + <something>. Otherwise,
5671 gen_reload will use intermediate register (and this is the reload
5672 reg for R1) to reload <something>.
5673
5674 We need this function to find a conflict for chain reloads. In our
5675 example, if HR = HR + <something> is incorrect insn, then we cannot
5676 use HR as a reload register for R2. If we do use it then we get a
5677 wrong code:
5678
5679 HR = const
5680 HR = <something>
5681 HR = HR + HR
5682
5683 */
5684 static bool
5685 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5686 {
5687 /* Assume other cases in gen_reload are not possible for
5688 chain reloads or do need an intermediate hard registers. */
5689 bool result = true;
5690 int regno, code;
5691 rtx out, in;
5692 rtx_insn *insn;
5693 rtx_insn *last = get_last_insn ();
5694
5695 /* Make r2 a component of r1. */
5696 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5697 std::swap (r1, r2);
5698
5699 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5700 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5701 gcc_assert (regno >= 0);
5702 out = gen_rtx_REG (rld[r1].mode, regno);
5703 in = rld[r1].in;
5704 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5705
5706 /* If IN is a paradoxical SUBREG, remove it and try to put the
5707 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5708 strip_paradoxical_subreg (&in, &out);
5709
5710 if (GET_CODE (in) == PLUS
5711 && (REG_P (XEXP (in, 0))
5712 || GET_CODE (XEXP (in, 0)) == SUBREG
5713 || MEM_P (XEXP (in, 0)))
5714 && (REG_P (XEXP (in, 1))
5715 || GET_CODE (XEXP (in, 1)) == SUBREG
5716 || CONSTANT_P (XEXP (in, 1))
5717 || MEM_P (XEXP (in, 1))))
5718 {
5719 insn = emit_insn (gen_rtx_SET (out, in));
5720 code = recog_memoized (insn);
5721 result = false;
5722
5723 if (code >= 0)
5724 {
5725 extract_insn (insn);
5726 /* We want constrain operands to treat this insn strictly in
5727 its validity determination, i.e., the way it would after
5728 reload has completed. */
5729 result = constrain_operands (1, get_enabled_alternatives (insn));
5730 }
5731
5732 delete_insns_since (last);
5733 }
5734
5735 /* Restore the original value at each changed address within R1. */
5736 while (!substitute_stack.is_empty ())
5737 {
5738 rtx *where = substitute_stack.pop ();
5739 *where = rld[r2].in;
5740 }
5741
5742 return result;
5743 }
5744
5745 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5746 Return 0 otherwise.
5747
5748 This function uses the same algorithm as reload_reg_free_p above. */
5749
5750 static int
5751 reloads_conflict (int r1, int r2)
5752 {
5753 enum reload_type r1_type = rld[r1].when_needed;
5754 enum reload_type r2_type = rld[r2].when_needed;
5755 int r1_opnum = rld[r1].opnum;
5756 int r2_opnum = rld[r2].opnum;
5757
5758 /* RELOAD_OTHER conflicts with everything. */
5759 if (r2_type == RELOAD_OTHER)
5760 return 1;
5761
5762 /* Otherwise, check conflicts differently for each type. */
5763
5764 switch (r1_type)
5765 {
5766 case RELOAD_FOR_INPUT:
5767 return (r2_type == RELOAD_FOR_INSN
5768 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5769 || r2_type == RELOAD_FOR_OPADDR_ADDR
5770 || r2_type == RELOAD_FOR_INPUT
5771 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5772 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5773 && r2_opnum > r1_opnum));
5774
5775 case RELOAD_FOR_INPUT_ADDRESS:
5776 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5777 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5778
5779 case RELOAD_FOR_INPADDR_ADDRESS:
5780 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5781 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5782
5783 case RELOAD_FOR_OUTPUT_ADDRESS:
5784 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5785 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5786
5787 case RELOAD_FOR_OUTADDR_ADDRESS:
5788 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5789 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5790
5791 case RELOAD_FOR_OPERAND_ADDRESS:
5792 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5793 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5794 && (!reloads_unique_chain_p (r1, r2)
5795 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5796
5797 case RELOAD_FOR_OPADDR_ADDR:
5798 return (r2_type == RELOAD_FOR_INPUT
5799 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5800
5801 case RELOAD_FOR_OUTPUT:
5802 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5803 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5804 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5805 && r2_opnum >= r1_opnum));
5806
5807 case RELOAD_FOR_INSN:
5808 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5809 || r2_type == RELOAD_FOR_INSN
5810 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5811
5812 case RELOAD_FOR_OTHER_ADDRESS:
5813 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5814
5815 case RELOAD_OTHER:
5816 return 1;
5817
5818 default:
5819 gcc_unreachable ();
5820 }
5821 }
5822 \f
5823 /* Indexed by reload number, 1 if incoming value
5824 inherited from previous insns. */
5825 static char reload_inherited[MAX_RELOADS];
5826
5827 /* For an inherited reload, this is the insn the reload was inherited from,
5828 if we know it. Otherwise, this is 0. */
5829 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5830
5831 /* If nonzero, this is a place to get the value of the reload,
5832 rather than using reload_in. */
5833 static rtx reload_override_in[MAX_RELOADS];
5834
5835 /* For each reload, the hard register number of the register used,
5836 or -1 if we did not need a register for this reload. */
5837 static int reload_spill_index[MAX_RELOADS];
5838
5839 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5840 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5841
5842 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5843 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5844
5845 /* Subroutine of free_for_value_p, used to check a single register.
5846 START_REGNO is the starting regno of the full reload register
5847 (possibly comprising multiple hard registers) that we are considering. */
5848
5849 static int
5850 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5851 enum reload_type type, rtx value, rtx out,
5852 int reloadnum, int ignore_address_reloads)
5853 {
5854 int time1;
5855 /* Set if we see an input reload that must not share its reload register
5856 with any new earlyclobber, but might otherwise share the reload
5857 register with an output or input-output reload. */
5858 int check_earlyclobber = 0;
5859 int i;
5860 int copy = 0;
5861
5862 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5863 return 0;
5864
5865 if (out == const0_rtx)
5866 {
5867 copy = 1;
5868 out = NULL_RTX;
5869 }
5870
5871 /* We use some pseudo 'time' value to check if the lifetimes of the
5872 new register use would overlap with the one of a previous reload
5873 that is not read-only or uses a different value.
5874 The 'time' used doesn't have to be linear in any shape or form, just
5875 monotonic.
5876 Some reload types use different 'buckets' for each operand.
5877 So there are MAX_RECOG_OPERANDS different time values for each
5878 such reload type.
5879 We compute TIME1 as the time when the register for the prospective
5880 new reload ceases to be live, and TIME2 for each existing
5881 reload as the time when that the reload register of that reload
5882 becomes live.
5883 Where there is little to be gained by exact lifetime calculations,
5884 we just make conservative assumptions, i.e. a longer lifetime;
5885 this is done in the 'default:' cases. */
5886 switch (type)
5887 {
5888 case RELOAD_FOR_OTHER_ADDRESS:
5889 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5890 time1 = copy ? 0 : 1;
5891 break;
5892 case RELOAD_OTHER:
5893 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5894 break;
5895 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5896 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5897 respectively, to the time values for these, we get distinct time
5898 values. To get distinct time values for each operand, we have to
5899 multiply opnum by at least three. We round that up to four because
5900 multiply by four is often cheaper. */
5901 case RELOAD_FOR_INPADDR_ADDRESS:
5902 time1 = opnum * 4 + 2;
5903 break;
5904 case RELOAD_FOR_INPUT_ADDRESS:
5905 time1 = opnum * 4 + 3;
5906 break;
5907 case RELOAD_FOR_INPUT:
5908 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5909 executes (inclusive). */
5910 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5911 break;
5912 case RELOAD_FOR_OPADDR_ADDR:
5913 /* opnum * 4 + 4
5914 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5915 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5916 break;
5917 case RELOAD_FOR_OPERAND_ADDRESS:
5918 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5919 is executed. */
5920 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5921 break;
5922 case RELOAD_FOR_OUTADDR_ADDRESS:
5923 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5924 break;
5925 case RELOAD_FOR_OUTPUT_ADDRESS:
5926 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5927 break;
5928 default:
5929 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5930 }
5931
5932 for (i = 0; i < n_reloads; i++)
5933 {
5934 rtx reg = rld[i].reg_rtx;
5935 if (reg && REG_P (reg)
5936 && ((unsigned) regno - true_regnum (reg)
5937 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5938 && i != reloadnum)
5939 {
5940 rtx other_input = rld[i].in;
5941
5942 /* If the other reload loads the same input value, that
5943 will not cause a conflict only if it's loading it into
5944 the same register. */
5945 if (true_regnum (reg) != start_regno)
5946 other_input = NULL_RTX;
5947 if (! other_input || ! rtx_equal_p (other_input, value)
5948 || rld[i].out || out)
5949 {
5950 int time2;
5951 switch (rld[i].when_needed)
5952 {
5953 case RELOAD_FOR_OTHER_ADDRESS:
5954 time2 = 0;
5955 break;
5956 case RELOAD_FOR_INPADDR_ADDRESS:
5957 /* find_reloads makes sure that a
5958 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5959 by at most one - the first -
5960 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5961 address reload is inherited, the address address reload
5962 goes away, so we can ignore this conflict. */
5963 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5964 && ignore_address_reloads
5965 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5966 Then the address address is still needed to store
5967 back the new address. */
5968 && ! rld[reloadnum].out)
5969 continue;
5970 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5971 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5972 reloads go away. */
5973 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5974 && ignore_address_reloads
5975 /* Unless we are reloading an auto_inc expression. */
5976 && ! rld[reloadnum].out)
5977 continue;
5978 time2 = rld[i].opnum * 4 + 2;
5979 break;
5980 case RELOAD_FOR_INPUT_ADDRESS:
5981 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5982 && ignore_address_reloads
5983 && ! rld[reloadnum].out)
5984 continue;
5985 time2 = rld[i].opnum * 4 + 3;
5986 break;
5987 case RELOAD_FOR_INPUT:
5988 time2 = rld[i].opnum * 4 + 4;
5989 check_earlyclobber = 1;
5990 break;
5991 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5992 == MAX_RECOG_OPERAND * 4 */
5993 case RELOAD_FOR_OPADDR_ADDR:
5994 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5995 && ignore_address_reloads
5996 && ! rld[reloadnum].out)
5997 continue;
5998 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5999 break;
6000 case RELOAD_FOR_OPERAND_ADDRESS:
6001 time2 = MAX_RECOG_OPERANDS * 4 + 2;
6002 check_earlyclobber = 1;
6003 break;
6004 case RELOAD_FOR_INSN:
6005 time2 = MAX_RECOG_OPERANDS * 4 + 3;
6006 break;
6007 case RELOAD_FOR_OUTPUT:
6008 /* All RELOAD_FOR_OUTPUT reloads become live just after the
6009 instruction is executed. */
6010 time2 = MAX_RECOG_OPERANDS * 4 + 4;
6011 break;
6012 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
6013 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
6014 value. */
6015 case RELOAD_FOR_OUTADDR_ADDRESS:
6016 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
6017 && ignore_address_reloads
6018 && ! rld[reloadnum].out)
6019 continue;
6020 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
6021 break;
6022 case RELOAD_FOR_OUTPUT_ADDRESS:
6023 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
6024 break;
6025 case RELOAD_OTHER:
6026 /* If there is no conflict in the input part, handle this
6027 like an output reload. */
6028 if (! rld[i].in || rtx_equal_p (other_input, value))
6029 {
6030 time2 = MAX_RECOG_OPERANDS * 4 + 4;
6031 /* Earlyclobbered outputs must conflict with inputs. */
6032 if (earlyclobber_operand_p (rld[i].out))
6033 time2 = MAX_RECOG_OPERANDS * 4 + 3;
6034
6035 break;
6036 }
6037 time2 = 1;
6038 /* RELOAD_OTHER might be live beyond instruction execution,
6039 but this is not obvious when we set time2 = 1. So check
6040 here if there might be a problem with the new reload
6041 clobbering the register used by the RELOAD_OTHER. */
6042 if (out)
6043 return 0;
6044 break;
6045 default:
6046 return 0;
6047 }
6048 if ((time1 >= time2
6049 && (! rld[i].in || rld[i].out
6050 || ! rtx_equal_p (other_input, value)))
6051 || (out && rld[reloadnum].out_reg
6052 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6053 return 0;
6054 }
6055 }
6056 }
6057
6058 /* Earlyclobbered outputs must conflict with inputs. */
6059 if (check_earlyclobber && out && earlyclobber_operand_p (out))
6060 return 0;
6061
6062 return 1;
6063 }
6064
6065 /* Return 1 if the value in reload reg REGNO, as used by a reload
6066 needed for the part of the insn specified by OPNUM and TYPE,
6067 may be used to load VALUE into it.
6068
6069 MODE is the mode in which the register is used, this is needed to
6070 determine how many hard regs to test.
6071
6072 Other read-only reloads with the same value do not conflict
6073 unless OUT is nonzero and these other reloads have to live while
6074 output reloads live.
6075 If OUT is CONST0_RTX, this is a special case: it means that the
6076 test should not be for using register REGNO as reload register, but
6077 for copying from register REGNO into the reload register.
6078
6079 RELOADNUM is the number of the reload we want to load this value for;
6080 a reload does not conflict with itself.
6081
6082 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6083 reloads that load an address for the very reload we are considering.
6084
6085 The caller has to make sure that there is no conflict with the return
6086 register. */
6087
6088 static int
6089 free_for_value_p (int regno, machine_mode mode, int opnum,
6090 enum reload_type type, rtx value, rtx out, int reloadnum,
6091 int ignore_address_reloads)
6092 {
6093 int nregs = hard_regno_nregs[regno][mode];
6094 while (nregs-- > 0)
6095 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6096 value, out, reloadnum,
6097 ignore_address_reloads))
6098 return 0;
6099 return 1;
6100 }
6101
6102 /* Return nonzero if the rtx X is invariant over the current function. */
6103 /* ??? Actually, the places where we use this expect exactly what is
6104 tested here, and not everything that is function invariant. In
6105 particular, the frame pointer and arg pointer are special cased;
6106 pic_offset_table_rtx is not, and we must not spill these things to
6107 memory. */
6108
6109 int
6110 function_invariant_p (const_rtx x)
6111 {
6112 if (CONSTANT_P (x))
6113 return 1;
6114 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6115 return 1;
6116 if (GET_CODE (x) == PLUS
6117 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6118 && GET_CODE (XEXP (x, 1)) == CONST_INT)
6119 return 1;
6120 return 0;
6121 }
6122
6123 /* Determine whether the reload reg X overlaps any rtx'es used for
6124 overriding inheritance. Return nonzero if so. */
6125
6126 static int
6127 conflicts_with_override (rtx x)
6128 {
6129 int i;
6130 for (i = 0; i < n_reloads; i++)
6131 if (reload_override_in[i]
6132 && reg_overlap_mentioned_p (x, reload_override_in[i]))
6133 return 1;
6134 return 0;
6135 }
6136 \f
6137 /* Give an error message saying we failed to find a reload for INSN,
6138 and clear out reload R. */
6139 static void
6140 failed_reload (rtx_insn *insn, int r)
6141 {
6142 if (asm_noperands (PATTERN (insn)) < 0)
6143 /* It's the compiler's fault. */
6144 fatal_insn ("could not find a spill register", insn);
6145
6146 /* It's the user's fault; the operand's mode and constraint
6147 don't match. Disable this reload so we don't crash in final. */
6148 error_for_asm (insn,
6149 "%<asm%> operand constraint incompatible with operand size");
6150 rld[r].in = 0;
6151 rld[r].out = 0;
6152 rld[r].reg_rtx = 0;
6153 rld[r].optional = 1;
6154 rld[r].secondary_p = 1;
6155 }
6156
6157 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6158 for reload R. If it's valid, get an rtx for it. Return nonzero if
6159 successful. */
6160 static int
6161 set_reload_reg (int i, int r)
6162 {
6163 /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6164 parameter. */
6165 int regno ATTRIBUTE_UNUSED;
6166 rtx reg = spill_reg_rtx[i];
6167
6168 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6169 spill_reg_rtx[i] = reg
6170 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6171
6172 regno = true_regnum (reg);
6173
6174 /* Detect when the reload reg can't hold the reload mode.
6175 This used to be one `if', but Sequent compiler can't handle that. */
6176 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6177 {
6178 machine_mode test_mode = VOIDmode;
6179 if (rld[r].in)
6180 test_mode = GET_MODE (rld[r].in);
6181 /* If rld[r].in has VOIDmode, it means we will load it
6182 in whatever mode the reload reg has: to wit, rld[r].mode.
6183 We have already tested that for validity. */
6184 /* Aside from that, we need to test that the expressions
6185 to reload from or into have modes which are valid for this
6186 reload register. Otherwise the reload insns would be invalid. */
6187 if (! (rld[r].in != 0 && test_mode != VOIDmode
6188 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6189 if (! (rld[r].out != 0
6190 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6191 {
6192 /* The reg is OK. */
6193 last_spill_reg = i;
6194
6195 /* Mark as in use for this insn the reload regs we use
6196 for this. */
6197 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6198 rld[r].when_needed, rld[r].mode);
6199
6200 rld[r].reg_rtx = reg;
6201 reload_spill_index[r] = spill_regs[i];
6202 return 1;
6203 }
6204 }
6205 return 0;
6206 }
6207
6208 /* Find a spill register to use as a reload register for reload R.
6209 LAST_RELOAD is nonzero if this is the last reload for the insn being
6210 processed.
6211
6212 Set rld[R].reg_rtx to the register allocated.
6213
6214 We return 1 if successful, or 0 if we couldn't find a spill reg and
6215 we didn't change anything. */
6216
6217 static int
6218 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6219 int last_reload)
6220 {
6221 int i, pass, count;
6222
6223 /* If we put this reload ahead, thinking it is a group,
6224 then insist on finding a group. Otherwise we can grab a
6225 reg that some other reload needs.
6226 (That can happen when we have a 68000 DATA_OR_FP_REG
6227 which is a group of data regs or one fp reg.)
6228 We need not be so restrictive if there are no more reloads
6229 for this insn.
6230
6231 ??? Really it would be nicer to have smarter handling
6232 for that kind of reg class, where a problem like this is normal.
6233 Perhaps those classes should be avoided for reloading
6234 by use of more alternatives. */
6235
6236 int force_group = rld[r].nregs > 1 && ! last_reload;
6237
6238 /* If we want a single register and haven't yet found one,
6239 take any reg in the right class and not in use.
6240 If we want a consecutive group, here is where we look for it.
6241
6242 We use three passes so we can first look for reload regs to
6243 reuse, which are already in use for other reloads in this insn,
6244 and only then use additional registers which are not "bad", then
6245 finally any register.
6246
6247 I think that maximizing reuse is needed to make sure we don't
6248 run out of reload regs. Suppose we have three reloads, and
6249 reloads A and B can share regs. These need two regs.
6250 Suppose A and B are given different regs.
6251 That leaves none for C. */
6252 for (pass = 0; pass < 3; pass++)
6253 {
6254 /* I is the index in spill_regs.
6255 We advance it round-robin between insns to use all spill regs
6256 equally, so that inherited reloads have a chance
6257 of leapfrogging each other. */
6258
6259 i = last_spill_reg;
6260
6261 for (count = 0; count < n_spills; count++)
6262 {
6263 int rclass = (int) rld[r].rclass;
6264 int regnum;
6265
6266 i++;
6267 if (i >= n_spills)
6268 i -= n_spills;
6269 regnum = spill_regs[i];
6270
6271 if ((reload_reg_free_p (regnum, rld[r].opnum,
6272 rld[r].when_needed)
6273 || (rld[r].in
6274 /* We check reload_reg_used to make sure we
6275 don't clobber the return register. */
6276 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6277 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6278 rld[r].when_needed, rld[r].in,
6279 rld[r].out, r, 1)))
6280 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6281 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6282 /* Look first for regs to share, then for unshared. But
6283 don't share regs used for inherited reloads; they are
6284 the ones we want to preserve. */
6285 && (pass
6286 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6287 regnum)
6288 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6289 regnum))))
6290 {
6291 int nr = hard_regno_nregs[regnum][rld[r].mode];
6292
6293 /* During the second pass we want to avoid reload registers
6294 which are "bad" for this reload. */
6295 if (pass == 1
6296 && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6297 continue;
6298
6299 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6300 (on 68000) got us two FP regs. If NR is 1,
6301 we would reject both of them. */
6302 if (force_group)
6303 nr = rld[r].nregs;
6304 /* If we need only one reg, we have already won. */
6305 if (nr == 1)
6306 {
6307 /* But reject a single reg if we demand a group. */
6308 if (force_group)
6309 continue;
6310 break;
6311 }
6312 /* Otherwise check that as many consecutive regs as we need
6313 are available here. */
6314 while (nr > 1)
6315 {
6316 int regno = regnum + nr - 1;
6317 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6318 && spill_reg_order[regno] >= 0
6319 && reload_reg_free_p (regno, rld[r].opnum,
6320 rld[r].when_needed)))
6321 break;
6322 nr--;
6323 }
6324 if (nr == 1)
6325 break;
6326 }
6327 }
6328
6329 /* If we found something on the current pass, omit later passes. */
6330 if (count < n_spills)
6331 break;
6332 }
6333
6334 /* We should have found a spill register by now. */
6335 if (count >= n_spills)
6336 return 0;
6337
6338 /* I is the index in SPILL_REG_RTX of the reload register we are to
6339 allocate. Get an rtx for it and find its register number. */
6340
6341 return set_reload_reg (i, r);
6342 }
6343 \f
6344 /* Initialize all the tables needed to allocate reload registers.
6345 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6346 is the array we use to restore the reg_rtx field for every reload. */
6347
6348 static void
6349 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6350 {
6351 int i;
6352
6353 for (i = 0; i < n_reloads; i++)
6354 rld[i].reg_rtx = save_reload_reg_rtx[i];
6355
6356 memset (reload_inherited, 0, MAX_RELOADS);
6357 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6358 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6359
6360 CLEAR_HARD_REG_SET (reload_reg_used);
6361 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6362 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6363 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6364 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6365 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6366
6367 CLEAR_HARD_REG_SET (reg_used_in_insn);
6368 {
6369 HARD_REG_SET tmp;
6370 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6371 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6372 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6373 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6374 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6375 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6376 }
6377
6378 for (i = 0; i < reload_n_operands; i++)
6379 {
6380 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6381 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6382 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6383 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6384 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6385 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6386 }
6387
6388 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6389
6390 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6391
6392 for (i = 0; i < n_reloads; i++)
6393 /* If we have already decided to use a certain register,
6394 don't use it in another way. */
6395 if (rld[i].reg_rtx)
6396 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6397 rld[i].when_needed, rld[i].mode);
6398 }
6399
6400 #ifdef SECONDARY_MEMORY_NEEDED
6401 /* If X is not a subreg, return it unmodified. If it is a subreg,
6402 look up whether we made a replacement for the SUBREG_REG. Return
6403 either the replacement or the SUBREG_REG. */
6404
6405 static rtx
6406 replaced_subreg (rtx x)
6407 {
6408 if (GET_CODE (x) == SUBREG)
6409 return find_replacement (&SUBREG_REG (x));
6410 return x;
6411 }
6412 #endif
6413
6414 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6415 mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6416 SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6417 otherwise it is NULL. */
6418
6419 static int
6420 compute_reload_subreg_offset (machine_mode outermode,
6421 rtx subreg,
6422 machine_mode innermode)
6423 {
6424 int outer_offset;
6425 machine_mode middlemode;
6426
6427 if (!subreg)
6428 return subreg_lowpart_offset (outermode, innermode);
6429
6430 outer_offset = SUBREG_BYTE (subreg);
6431 middlemode = GET_MODE (SUBREG_REG (subreg));
6432
6433 /* If SUBREG is paradoxical then return the normal lowpart offset
6434 for OUTERMODE and INNERMODE. Our caller has already checked
6435 that OUTERMODE fits in INNERMODE. */
6436 if (outer_offset == 0
6437 && GET_MODE_SIZE (outermode) > GET_MODE_SIZE (middlemode))
6438 return subreg_lowpart_offset (outermode, innermode);
6439
6440 /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6441 plus the normal lowpart offset for MIDDLEMODE and INNERMODE. */
6442 return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6443 }
6444
6445 /* Assign hard reg targets for the pseudo-registers we must reload
6446 into hard regs for this insn.
6447 Also output the instructions to copy them in and out of the hard regs.
6448
6449 For machines with register classes, we are responsible for
6450 finding a reload reg in the proper class. */
6451
6452 static void
6453 choose_reload_regs (struct insn_chain *chain)
6454 {
6455 rtx_insn *insn = chain->insn;
6456 int i, j;
6457 unsigned int max_group_size = 1;
6458 enum reg_class group_class = NO_REGS;
6459 int pass, win, inheritance;
6460
6461 rtx save_reload_reg_rtx[MAX_RELOADS];
6462
6463 /* In order to be certain of getting the registers we need,
6464 we must sort the reloads into order of increasing register class.
6465 Then our grabbing of reload registers will parallel the process
6466 that provided the reload registers.
6467
6468 Also note whether any of the reloads wants a consecutive group of regs.
6469 If so, record the maximum size of the group desired and what
6470 register class contains all the groups needed by this insn. */
6471
6472 for (j = 0; j < n_reloads; j++)
6473 {
6474 reload_order[j] = j;
6475 if (rld[j].reg_rtx != NULL_RTX)
6476 {
6477 gcc_assert (REG_P (rld[j].reg_rtx)
6478 && HARD_REGISTER_P (rld[j].reg_rtx));
6479 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6480 }
6481 else
6482 reload_spill_index[j] = -1;
6483
6484 if (rld[j].nregs > 1)
6485 {
6486 max_group_size = MAX (rld[j].nregs, max_group_size);
6487 group_class
6488 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6489 }
6490
6491 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6492 }
6493
6494 if (n_reloads > 1)
6495 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6496
6497 /* If -O, try first with inheritance, then turning it off.
6498 If not -O, don't do inheritance.
6499 Using inheritance when not optimizing leads to paradoxes
6500 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6501 because one side of the comparison might be inherited. */
6502 win = 0;
6503 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6504 {
6505 choose_reload_regs_init (chain, save_reload_reg_rtx);
6506
6507 /* Process the reloads in order of preference just found.
6508 Beyond this point, subregs can be found in reload_reg_rtx.
6509
6510 This used to look for an existing reloaded home for all of the
6511 reloads, and only then perform any new reloads. But that could lose
6512 if the reloads were done out of reg-class order because a later
6513 reload with a looser constraint might have an old home in a register
6514 needed by an earlier reload with a tighter constraint.
6515
6516 To solve this, we make two passes over the reloads, in the order
6517 described above. In the first pass we try to inherit a reload
6518 from a previous insn. If there is a later reload that needs a
6519 class that is a proper subset of the class being processed, we must
6520 also allocate a spill register during the first pass.
6521
6522 Then make a second pass over the reloads to allocate any reloads
6523 that haven't been given registers yet. */
6524
6525 for (j = 0; j < n_reloads; j++)
6526 {
6527 int r = reload_order[j];
6528 rtx search_equiv = NULL_RTX;
6529
6530 /* Ignore reloads that got marked inoperative. */
6531 if (rld[r].out == 0 && rld[r].in == 0
6532 && ! rld[r].secondary_p)
6533 continue;
6534
6535 /* If find_reloads chose to use reload_in or reload_out as a reload
6536 register, we don't need to chose one. Otherwise, try even if it
6537 found one since we might save an insn if we find the value lying
6538 around.
6539 Try also when reload_in is a pseudo without a hard reg. */
6540 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6541 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6542 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6543 && !MEM_P (rld[r].in)
6544 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6545 continue;
6546
6547 #if 0 /* No longer needed for correct operation.
6548 It might give better code, or might not; worth an experiment? */
6549 /* If this is an optional reload, we can't inherit from earlier insns
6550 until we are sure that any non-optional reloads have been allocated.
6551 The following code takes advantage of the fact that optional reloads
6552 are at the end of reload_order. */
6553 if (rld[r].optional != 0)
6554 for (i = 0; i < j; i++)
6555 if ((rld[reload_order[i]].out != 0
6556 || rld[reload_order[i]].in != 0
6557 || rld[reload_order[i]].secondary_p)
6558 && ! rld[reload_order[i]].optional
6559 && rld[reload_order[i]].reg_rtx == 0)
6560 allocate_reload_reg (chain, reload_order[i], 0);
6561 #endif
6562
6563 /* First see if this pseudo is already available as reloaded
6564 for a previous insn. We cannot try to inherit for reloads
6565 that are smaller than the maximum number of registers needed
6566 for groups unless the register we would allocate cannot be used
6567 for the groups.
6568
6569 We could check here to see if this is a secondary reload for
6570 an object that is already in a register of the desired class.
6571 This would avoid the need for the secondary reload register.
6572 But this is complex because we can't easily determine what
6573 objects might want to be loaded via this reload. So let a
6574 register be allocated here. In `emit_reload_insns' we suppress
6575 one of the loads in the case described above. */
6576
6577 if (inheritance)
6578 {
6579 int byte = 0;
6580 int regno = -1;
6581 machine_mode mode = VOIDmode;
6582 rtx subreg = NULL_RTX;
6583
6584 if (rld[r].in == 0)
6585 ;
6586 else if (REG_P (rld[r].in))
6587 {
6588 regno = REGNO (rld[r].in);
6589 mode = GET_MODE (rld[r].in);
6590 }
6591 else if (REG_P (rld[r].in_reg))
6592 {
6593 regno = REGNO (rld[r].in_reg);
6594 mode = GET_MODE (rld[r].in_reg);
6595 }
6596 else if (GET_CODE (rld[r].in_reg) == SUBREG
6597 && REG_P (SUBREG_REG (rld[r].in_reg)))
6598 {
6599 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6600 if (regno < FIRST_PSEUDO_REGISTER)
6601 regno = subreg_regno (rld[r].in_reg);
6602 else
6603 {
6604 subreg = rld[r].in_reg;
6605 byte = SUBREG_BYTE (subreg);
6606 }
6607 mode = GET_MODE (rld[r].in_reg);
6608 }
6609 #if AUTO_INC_DEC
6610 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6611 && REG_P (XEXP (rld[r].in_reg, 0)))
6612 {
6613 regno = REGNO (XEXP (rld[r].in_reg, 0));
6614 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6615 rld[r].out = rld[r].in;
6616 }
6617 #endif
6618 #if 0
6619 /* This won't work, since REGNO can be a pseudo reg number.
6620 Also, it takes much more hair to keep track of all the things
6621 that can invalidate an inherited reload of part of a pseudoreg. */
6622 else if (GET_CODE (rld[r].in) == SUBREG
6623 && REG_P (SUBREG_REG (rld[r].in)))
6624 regno = subreg_regno (rld[r].in);
6625 #endif
6626
6627 if (regno >= 0
6628 && reg_last_reload_reg[regno] != 0
6629 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6630 >= GET_MODE_SIZE (mode) + byte)
6631 #ifdef CANNOT_CHANGE_MODE_CLASS
6632 /* Verify that the register it's in can be used in
6633 mode MODE. */
6634 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6635 GET_MODE (reg_last_reload_reg[regno]),
6636 mode)
6637 #endif
6638 )
6639 {
6640 enum reg_class rclass = rld[r].rclass, last_class;
6641 rtx last_reg = reg_last_reload_reg[regno];
6642
6643 i = REGNO (last_reg);
6644 byte = compute_reload_subreg_offset (mode,
6645 subreg,
6646 GET_MODE (last_reg));
6647 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6648 last_class = REGNO_REG_CLASS (i);
6649
6650 if (reg_reloaded_contents[i] == regno
6651 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6652 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6653 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6654 /* Even if we can't use this register as a reload
6655 register, we might use it for reload_override_in,
6656 if copying it to the desired class is cheap
6657 enough. */
6658 || ((register_move_cost (mode, last_class, rclass)
6659 < memory_move_cost (mode, rclass, true))
6660 && (secondary_reload_class (1, rclass, mode,
6661 last_reg)
6662 == NO_REGS)
6663 #ifdef SECONDARY_MEMORY_NEEDED
6664 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6665 mode)
6666 #endif
6667 ))
6668
6669 && (rld[r].nregs == max_group_size
6670 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6671 i))
6672 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6673 rld[r].when_needed, rld[r].in,
6674 const0_rtx, r, 1))
6675 {
6676 /* If a group is needed, verify that all the subsequent
6677 registers still have their values intact. */
6678 int nr = hard_regno_nregs[i][rld[r].mode];
6679 int k;
6680
6681 for (k = 1; k < nr; k++)
6682 if (reg_reloaded_contents[i + k] != regno
6683 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6684 break;
6685
6686 if (k == nr)
6687 {
6688 int i1;
6689 int bad_for_class;
6690
6691 last_reg = (GET_MODE (last_reg) == mode
6692 ? last_reg : gen_rtx_REG (mode, i));
6693
6694 bad_for_class = 0;
6695 for (k = 0; k < nr; k++)
6696 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6697 i+k);
6698
6699 /* We found a register that contains the
6700 value we need. If this register is the
6701 same as an `earlyclobber' operand of the
6702 current insn, just mark it as a place to
6703 reload from since we can't use it as the
6704 reload register itself. */
6705
6706 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6707 if (reg_overlap_mentioned_for_reload_p
6708 (reg_last_reload_reg[regno],
6709 reload_earlyclobbers[i1]))
6710 break;
6711
6712 if (i1 != n_earlyclobbers
6713 || ! (free_for_value_p (i, rld[r].mode,
6714 rld[r].opnum,
6715 rld[r].when_needed, rld[r].in,
6716 rld[r].out, r, 1))
6717 /* Don't use it if we'd clobber a pseudo reg. */
6718 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6719 && rld[r].out
6720 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6721 /* Don't clobber the frame pointer. */
6722 || (i == HARD_FRAME_POINTER_REGNUM
6723 && frame_pointer_needed
6724 && rld[r].out)
6725 /* Don't really use the inherited spill reg
6726 if we need it wider than we've got it. */
6727 || (GET_MODE_SIZE (rld[r].mode)
6728 > GET_MODE_SIZE (mode))
6729 || bad_for_class
6730
6731 /* If find_reloads chose reload_out as reload
6732 register, stay with it - that leaves the
6733 inherited register for subsequent reloads. */
6734 || (rld[r].out && rld[r].reg_rtx
6735 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6736 {
6737 if (! rld[r].optional)
6738 {
6739 reload_override_in[r] = last_reg;
6740 reload_inheritance_insn[r]
6741 = reg_reloaded_insn[i];
6742 }
6743 }
6744 else
6745 {
6746 int k;
6747 /* We can use this as a reload reg. */
6748 /* Mark the register as in use for this part of
6749 the insn. */
6750 mark_reload_reg_in_use (i,
6751 rld[r].opnum,
6752 rld[r].when_needed,
6753 rld[r].mode);
6754 rld[r].reg_rtx = last_reg;
6755 reload_inherited[r] = 1;
6756 reload_inheritance_insn[r]
6757 = reg_reloaded_insn[i];
6758 reload_spill_index[r] = i;
6759 for (k = 0; k < nr; k++)
6760 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6761 i + k);
6762 }
6763 }
6764 }
6765 }
6766 }
6767
6768 /* Here's another way to see if the value is already lying around. */
6769 if (inheritance
6770 && rld[r].in != 0
6771 && ! reload_inherited[r]
6772 && rld[r].out == 0
6773 && (CONSTANT_P (rld[r].in)
6774 || GET_CODE (rld[r].in) == PLUS
6775 || REG_P (rld[r].in)
6776 || MEM_P (rld[r].in))
6777 && (rld[r].nregs == max_group_size
6778 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6779 search_equiv = rld[r].in;
6780
6781 if (search_equiv)
6782 {
6783 rtx equiv
6784 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6785 -1, NULL, 0, rld[r].mode);
6786 int regno = 0;
6787
6788 if (equiv != 0)
6789 {
6790 if (REG_P (equiv))
6791 regno = REGNO (equiv);
6792 else
6793 {
6794 /* This must be a SUBREG of a hard register.
6795 Make a new REG since this might be used in an
6796 address and not all machines support SUBREGs
6797 there. */
6798 gcc_assert (GET_CODE (equiv) == SUBREG);
6799 regno = subreg_regno (equiv);
6800 equiv = gen_rtx_REG (rld[r].mode, regno);
6801 /* If we choose EQUIV as the reload register, but the
6802 loop below decides to cancel the inheritance, we'll
6803 end up reloading EQUIV in rld[r].mode, not the mode
6804 it had originally. That isn't safe when EQUIV isn't
6805 available as a spill register since its value might
6806 still be live at this point. */
6807 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6808 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6809 equiv = 0;
6810 }
6811 }
6812
6813 /* If we found a spill reg, reject it unless it is free
6814 and of the desired class. */
6815 if (equiv != 0)
6816 {
6817 int regs_used = 0;
6818 int bad_for_class = 0;
6819 int max_regno = regno + rld[r].nregs;
6820
6821 for (i = regno; i < max_regno; i++)
6822 {
6823 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6824 i);
6825 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6826 i);
6827 }
6828
6829 if ((regs_used
6830 && ! free_for_value_p (regno, rld[r].mode,
6831 rld[r].opnum, rld[r].when_needed,
6832 rld[r].in, rld[r].out, r, 1))
6833 || bad_for_class)
6834 equiv = 0;
6835 }
6836
6837 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6838 equiv = 0;
6839
6840 /* We found a register that contains the value we need.
6841 If this register is the same as an `earlyclobber' operand
6842 of the current insn, just mark it as a place to reload from
6843 since we can't use it as the reload register itself. */
6844
6845 if (equiv != 0)
6846 for (i = 0; i < n_earlyclobbers; i++)
6847 if (reg_overlap_mentioned_for_reload_p (equiv,
6848 reload_earlyclobbers[i]))
6849 {
6850 if (! rld[r].optional)
6851 reload_override_in[r] = equiv;
6852 equiv = 0;
6853 break;
6854 }
6855
6856 /* If the equiv register we have found is explicitly clobbered
6857 in the current insn, it depends on the reload type if we
6858 can use it, use it for reload_override_in, or not at all.
6859 In particular, we then can't use EQUIV for a
6860 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6861
6862 if (equiv != 0)
6863 {
6864 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6865 switch (rld[r].when_needed)
6866 {
6867 case RELOAD_FOR_OTHER_ADDRESS:
6868 case RELOAD_FOR_INPADDR_ADDRESS:
6869 case RELOAD_FOR_INPUT_ADDRESS:
6870 case RELOAD_FOR_OPADDR_ADDR:
6871 break;
6872 case RELOAD_OTHER:
6873 case RELOAD_FOR_INPUT:
6874 case RELOAD_FOR_OPERAND_ADDRESS:
6875 if (! rld[r].optional)
6876 reload_override_in[r] = equiv;
6877 /* Fall through. */
6878 default:
6879 equiv = 0;
6880 break;
6881 }
6882 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6883 switch (rld[r].when_needed)
6884 {
6885 case RELOAD_FOR_OTHER_ADDRESS:
6886 case RELOAD_FOR_INPADDR_ADDRESS:
6887 case RELOAD_FOR_INPUT_ADDRESS:
6888 case RELOAD_FOR_OPADDR_ADDR:
6889 case RELOAD_FOR_OPERAND_ADDRESS:
6890 case RELOAD_FOR_INPUT:
6891 break;
6892 case RELOAD_OTHER:
6893 if (! rld[r].optional)
6894 reload_override_in[r] = equiv;
6895 /* Fall through. */
6896 default:
6897 equiv = 0;
6898 break;
6899 }
6900 }
6901
6902 /* If we found an equivalent reg, say no code need be generated
6903 to load it, and use it as our reload reg. */
6904 if (equiv != 0
6905 && (regno != HARD_FRAME_POINTER_REGNUM
6906 || !frame_pointer_needed))
6907 {
6908 int nr = hard_regno_nregs[regno][rld[r].mode];
6909 int k;
6910 rld[r].reg_rtx = equiv;
6911 reload_spill_index[r] = regno;
6912 reload_inherited[r] = 1;
6913
6914 /* If reg_reloaded_valid is not set for this register,
6915 there might be a stale spill_reg_store lying around.
6916 We must clear it, since otherwise emit_reload_insns
6917 might delete the store. */
6918 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6919 spill_reg_store[regno] = NULL;
6920 /* If any of the hard registers in EQUIV are spill
6921 registers, mark them as in use for this insn. */
6922 for (k = 0; k < nr; k++)
6923 {
6924 i = spill_reg_order[regno + k];
6925 if (i >= 0)
6926 {
6927 mark_reload_reg_in_use (regno, rld[r].opnum,
6928 rld[r].when_needed,
6929 rld[r].mode);
6930 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6931 regno + k);
6932 }
6933 }
6934 }
6935 }
6936
6937 /* If we found a register to use already, or if this is an optional
6938 reload, we are done. */
6939 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6940 continue;
6941
6942 #if 0
6943 /* No longer needed for correct operation. Might or might
6944 not give better code on the average. Want to experiment? */
6945
6946 /* See if there is a later reload that has a class different from our
6947 class that intersects our class or that requires less register
6948 than our reload. If so, we must allocate a register to this
6949 reload now, since that reload might inherit a previous reload
6950 and take the only available register in our class. Don't do this
6951 for optional reloads since they will force all previous reloads
6952 to be allocated. Also don't do this for reloads that have been
6953 turned off. */
6954
6955 for (i = j + 1; i < n_reloads; i++)
6956 {
6957 int s = reload_order[i];
6958
6959 if ((rld[s].in == 0 && rld[s].out == 0
6960 && ! rld[s].secondary_p)
6961 || rld[s].optional)
6962 continue;
6963
6964 if ((rld[s].rclass != rld[r].rclass
6965 && reg_classes_intersect_p (rld[r].rclass,
6966 rld[s].rclass))
6967 || rld[s].nregs < rld[r].nregs)
6968 break;
6969 }
6970
6971 if (i == n_reloads)
6972 continue;
6973
6974 allocate_reload_reg (chain, r, j == n_reloads - 1);
6975 #endif
6976 }
6977
6978 /* Now allocate reload registers for anything non-optional that
6979 didn't get one yet. */
6980 for (j = 0; j < n_reloads; j++)
6981 {
6982 int r = reload_order[j];
6983
6984 /* Ignore reloads that got marked inoperative. */
6985 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6986 continue;
6987
6988 /* Skip reloads that already have a register allocated or are
6989 optional. */
6990 if (rld[r].reg_rtx != 0 || rld[r].optional)
6991 continue;
6992
6993 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
6994 break;
6995 }
6996
6997 /* If that loop got all the way, we have won. */
6998 if (j == n_reloads)
6999 {
7000 win = 1;
7001 break;
7002 }
7003
7004 /* Loop around and try without any inheritance. */
7005 }
7006
7007 if (! win)
7008 {
7009 /* First undo everything done by the failed attempt
7010 to allocate with inheritance. */
7011 choose_reload_regs_init (chain, save_reload_reg_rtx);
7012
7013 /* Some sanity tests to verify that the reloads found in the first
7014 pass are identical to the ones we have now. */
7015 gcc_assert (chain->n_reloads == n_reloads);
7016
7017 for (i = 0; i < n_reloads; i++)
7018 {
7019 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
7020 continue;
7021 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
7022 for (j = 0; j < n_spills; j++)
7023 if (spill_regs[j] == chain->rld[i].regno)
7024 if (! set_reload_reg (j, i))
7025 failed_reload (chain->insn, i);
7026 }
7027 }
7028
7029 /* If we thought we could inherit a reload, because it seemed that
7030 nothing else wanted the same reload register earlier in the insn,
7031 verify that assumption, now that all reloads have been assigned.
7032 Likewise for reloads where reload_override_in has been set. */
7033
7034 /* If doing expensive optimizations, do one preliminary pass that doesn't
7035 cancel any inheritance, but removes reloads that have been needed only
7036 for reloads that we know can be inherited. */
7037 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
7038 {
7039 for (j = 0; j < n_reloads; j++)
7040 {
7041 int r = reload_order[j];
7042 rtx check_reg;
7043 #ifdef SECONDARY_MEMORY_NEEDED
7044 rtx tem;
7045 #endif
7046 if (reload_inherited[r] && rld[r].reg_rtx)
7047 check_reg = rld[r].reg_rtx;
7048 else if (reload_override_in[r]
7049 && (REG_P (reload_override_in[r])
7050 || GET_CODE (reload_override_in[r]) == SUBREG))
7051 check_reg = reload_override_in[r];
7052 else
7053 continue;
7054 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
7055 rld[r].opnum, rld[r].when_needed, rld[r].in,
7056 (reload_inherited[r]
7057 ? rld[r].out : const0_rtx),
7058 r, 1))
7059 {
7060 if (pass)
7061 continue;
7062 reload_inherited[r] = 0;
7063 reload_override_in[r] = 0;
7064 }
7065 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7066 reload_override_in, then we do not need its related
7067 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7068 likewise for other reload types.
7069 We handle this by removing a reload when its only replacement
7070 is mentioned in reload_in of the reload we are going to inherit.
7071 A special case are auto_inc expressions; even if the input is
7072 inherited, we still need the address for the output. We can
7073 recognize them because they have RELOAD_OUT set to RELOAD_IN.
7074 If we succeeded removing some reload and we are doing a preliminary
7075 pass just to remove such reloads, make another pass, since the
7076 removal of one reload might allow us to inherit another one. */
7077 else if (rld[r].in
7078 && rld[r].out != rld[r].in
7079 && remove_address_replacements (rld[r].in))
7080 {
7081 if (pass)
7082 pass = 2;
7083 }
7084 #ifdef SECONDARY_MEMORY_NEEDED
7085 /* If we needed a memory location for the reload, we also have to
7086 remove its related reloads. */
7087 else if (rld[r].in
7088 && rld[r].out != rld[r].in
7089 && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7090 && REGNO (tem) < FIRST_PSEUDO_REGISTER
7091 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem)),
7092 rld[r].rclass, rld[r].inmode)
7093 && remove_address_replacements
7094 (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7095 rld[r].when_needed)))
7096 {
7097 if (pass)
7098 pass = 2;
7099 }
7100 #endif
7101 }
7102 }
7103
7104 /* Now that reload_override_in is known valid,
7105 actually override reload_in. */
7106 for (j = 0; j < n_reloads; j++)
7107 if (reload_override_in[j])
7108 rld[j].in = reload_override_in[j];
7109
7110 /* If this reload won't be done because it has been canceled or is
7111 optional and not inherited, clear reload_reg_rtx so other
7112 routines (such as subst_reloads) don't get confused. */
7113 for (j = 0; j < n_reloads; j++)
7114 if (rld[j].reg_rtx != 0
7115 && ((rld[j].optional && ! reload_inherited[j])
7116 || (rld[j].in == 0 && rld[j].out == 0
7117 && ! rld[j].secondary_p)))
7118 {
7119 int regno = true_regnum (rld[j].reg_rtx);
7120
7121 if (spill_reg_order[regno] >= 0)
7122 clear_reload_reg_in_use (regno, rld[j].opnum,
7123 rld[j].when_needed, rld[j].mode);
7124 rld[j].reg_rtx = 0;
7125 reload_spill_index[j] = -1;
7126 }
7127
7128 /* Record which pseudos and which spill regs have output reloads. */
7129 for (j = 0; j < n_reloads; j++)
7130 {
7131 int r = reload_order[j];
7132
7133 i = reload_spill_index[r];
7134
7135 /* I is nonneg if this reload uses a register.
7136 If rld[r].reg_rtx is 0, this is an optional reload
7137 that we opted to ignore. */
7138 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7139 && rld[r].reg_rtx != 0)
7140 {
7141 int nregno = REGNO (rld[r].out_reg);
7142 int nr = 1;
7143
7144 if (nregno < FIRST_PSEUDO_REGISTER)
7145 nr = hard_regno_nregs[nregno][rld[r].mode];
7146
7147 while (--nr >= 0)
7148 SET_REGNO_REG_SET (&reg_has_output_reload,
7149 nregno + nr);
7150
7151 if (i >= 0)
7152 add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7153
7154 gcc_assert (rld[r].when_needed == RELOAD_OTHER
7155 || rld[r].when_needed == RELOAD_FOR_OUTPUT
7156 || rld[r].when_needed == RELOAD_FOR_INSN);
7157 }
7158 }
7159 }
7160
7161 /* Deallocate the reload register for reload R. This is called from
7162 remove_address_replacements. */
7163
7164 void
7165 deallocate_reload_reg (int r)
7166 {
7167 int regno;
7168
7169 if (! rld[r].reg_rtx)
7170 return;
7171 regno = true_regnum (rld[r].reg_rtx);
7172 rld[r].reg_rtx = 0;
7173 if (spill_reg_order[regno] >= 0)
7174 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7175 rld[r].mode);
7176 reload_spill_index[r] = -1;
7177 }
7178 \f
7179 /* These arrays are filled by emit_reload_insns and its subroutines. */
7180 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7181 static rtx_insn *other_input_address_reload_insns = 0;
7182 static rtx_insn *other_input_reload_insns = 0;
7183 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7184 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7185 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7186 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7187 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7188 static rtx_insn *operand_reload_insns = 0;
7189 static rtx_insn *other_operand_reload_insns = 0;
7190 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7191
7192 /* Values to be put in spill_reg_store are put here first. Instructions
7193 must only be placed here if the associated reload register reaches
7194 the end of the instruction's reload sequence. */
7195 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7196 static HARD_REG_SET reg_reloaded_died;
7197
7198 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7199 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7200 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7201 adjusted register, and return true. Otherwise, return false. */
7202 static bool
7203 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7204 enum reg_class new_class,
7205 machine_mode new_mode)
7206
7207 {
7208 rtx reg;
7209
7210 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7211 {
7212 unsigned regno = REGNO (reg);
7213
7214 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7215 continue;
7216 if (GET_MODE (reg) != new_mode)
7217 {
7218 if (!HARD_REGNO_MODE_OK (regno, new_mode))
7219 continue;
7220 if (hard_regno_nregs[regno][new_mode]
7221 > hard_regno_nregs[regno][GET_MODE (reg)])
7222 continue;
7223 reg = reload_adjust_reg_for_mode (reg, new_mode);
7224 }
7225 *reload_reg = reg;
7226 return true;
7227 }
7228 return false;
7229 }
7230
7231 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7232 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7233 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7234 adjusted register, and return true. Otherwise, return false. */
7235 static bool
7236 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7237 enum insn_code icode)
7238
7239 {
7240 enum reg_class new_class = scratch_reload_class (icode);
7241 machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7242
7243 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7244 new_class, new_mode);
7245 }
7246
7247 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7248 has the number J. OLD contains the value to be used as input. */
7249
7250 static void
7251 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7252 rtx old, int j)
7253 {
7254 rtx_insn *insn = chain->insn;
7255 rtx reloadreg;
7256 rtx oldequiv_reg = 0;
7257 rtx oldequiv = 0;
7258 int special = 0;
7259 machine_mode mode;
7260 rtx_insn **where;
7261
7262 /* delete_output_reload is only invoked properly if old contains
7263 the original pseudo register. Since this is replaced with a
7264 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7265 find the pseudo in RELOAD_IN_REG. This is also used to
7266 determine whether a secondary reload is needed. */
7267 if (reload_override_in[j]
7268 && (REG_P (rl->in_reg)
7269 || (GET_CODE (rl->in_reg) == SUBREG
7270 && REG_P (SUBREG_REG (rl->in_reg)))))
7271 {
7272 oldequiv = old;
7273 old = rl->in_reg;
7274 }
7275 if (oldequiv == 0)
7276 oldequiv = old;
7277 else if (REG_P (oldequiv))
7278 oldequiv_reg = oldequiv;
7279 else if (GET_CODE (oldequiv) == SUBREG)
7280 oldequiv_reg = SUBREG_REG (oldequiv);
7281
7282 reloadreg = reload_reg_rtx_for_input[j];
7283 mode = GET_MODE (reloadreg);
7284
7285 /* If we are reloading from a register that was recently stored in
7286 with an output-reload, see if we can prove there was
7287 actually no need to store the old value in it. */
7288
7289 if (optimize && REG_P (oldequiv)
7290 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7291 && spill_reg_store[REGNO (oldequiv)]
7292 && REG_P (old)
7293 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7294 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7295 rl->out_reg)))
7296 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7297
7298 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7299 OLDEQUIV. */
7300
7301 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7302 oldequiv = SUBREG_REG (oldequiv);
7303 if (GET_MODE (oldequiv) != VOIDmode
7304 && mode != GET_MODE (oldequiv))
7305 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7306
7307 /* Switch to the right place to emit the reload insns. */
7308 switch (rl->when_needed)
7309 {
7310 case RELOAD_OTHER:
7311 where = &other_input_reload_insns;
7312 break;
7313 case RELOAD_FOR_INPUT:
7314 where = &input_reload_insns[rl->opnum];
7315 break;
7316 case RELOAD_FOR_INPUT_ADDRESS:
7317 where = &input_address_reload_insns[rl->opnum];
7318 break;
7319 case RELOAD_FOR_INPADDR_ADDRESS:
7320 where = &inpaddr_address_reload_insns[rl->opnum];
7321 break;
7322 case RELOAD_FOR_OUTPUT_ADDRESS:
7323 where = &output_address_reload_insns[rl->opnum];
7324 break;
7325 case RELOAD_FOR_OUTADDR_ADDRESS:
7326 where = &outaddr_address_reload_insns[rl->opnum];
7327 break;
7328 case RELOAD_FOR_OPERAND_ADDRESS:
7329 where = &operand_reload_insns;
7330 break;
7331 case RELOAD_FOR_OPADDR_ADDR:
7332 where = &other_operand_reload_insns;
7333 break;
7334 case RELOAD_FOR_OTHER_ADDRESS:
7335 where = &other_input_address_reload_insns;
7336 break;
7337 default:
7338 gcc_unreachable ();
7339 }
7340
7341 push_to_sequence (*where);
7342
7343 /* Auto-increment addresses must be reloaded in a special way. */
7344 if (rl->out && ! rl->out_reg)
7345 {
7346 /* We are not going to bother supporting the case where a
7347 incremented register can't be copied directly from
7348 OLDEQUIV since this seems highly unlikely. */
7349 gcc_assert (rl->secondary_in_reload < 0);
7350
7351 if (reload_inherited[j])
7352 oldequiv = reloadreg;
7353
7354 old = XEXP (rl->in_reg, 0);
7355
7356 /* Prevent normal processing of this reload. */
7357 special = 1;
7358 /* Output a special code sequence for this case. */
7359 inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7360 }
7361
7362 /* If we are reloading a pseudo-register that was set by the previous
7363 insn, see if we can get rid of that pseudo-register entirely
7364 by redirecting the previous insn into our reload register. */
7365
7366 else if (optimize && REG_P (old)
7367 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7368 && dead_or_set_p (insn, old)
7369 /* This is unsafe if some other reload
7370 uses the same reg first. */
7371 && ! conflicts_with_override (reloadreg)
7372 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7373 rl->when_needed, old, rl->out, j, 0))
7374 {
7375 rtx_insn *temp = PREV_INSN (insn);
7376 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7377 temp = PREV_INSN (temp);
7378 if (temp
7379 && NONJUMP_INSN_P (temp)
7380 && GET_CODE (PATTERN (temp)) == SET
7381 && SET_DEST (PATTERN (temp)) == old
7382 /* Make sure we can access insn_operand_constraint. */
7383 && asm_noperands (PATTERN (temp)) < 0
7384 /* This is unsafe if operand occurs more than once in current
7385 insn. Perhaps some occurrences aren't reloaded. */
7386 && count_occurrences (PATTERN (insn), old, 0) == 1)
7387 {
7388 rtx old = SET_DEST (PATTERN (temp));
7389 /* Store into the reload register instead of the pseudo. */
7390 SET_DEST (PATTERN (temp)) = reloadreg;
7391
7392 /* Verify that resulting insn is valid.
7393
7394 Note that we have replaced the destination of TEMP with
7395 RELOADREG. If TEMP references RELOADREG within an
7396 autoincrement addressing mode, then the resulting insn
7397 is ill-formed and we must reject this optimization. */
7398 extract_insn (temp);
7399 if (constrain_operands (1, get_enabled_alternatives (temp))
7400 #if AUTO_INC_DEC
7401 && ! find_reg_note (temp, REG_INC, reloadreg)
7402 #endif
7403 )
7404 {
7405 /* If the previous insn is an output reload, the source is
7406 a reload register, and its spill_reg_store entry will
7407 contain the previous destination. This is now
7408 invalid. */
7409 if (REG_P (SET_SRC (PATTERN (temp)))
7410 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7411 {
7412 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7413 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7414 }
7415
7416 /* If these are the only uses of the pseudo reg,
7417 pretend for GDB it lives in the reload reg we used. */
7418 if (REG_N_DEATHS (REGNO (old)) == 1
7419 && REG_N_SETS (REGNO (old)) == 1)
7420 {
7421 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7422 if (ira_conflicts_p)
7423 /* Inform IRA about the change. */
7424 ira_mark_allocation_change (REGNO (old));
7425 alter_reg (REGNO (old), -1, false);
7426 }
7427 special = 1;
7428
7429 /* Adjust any debug insns between temp and insn. */
7430 while ((temp = NEXT_INSN (temp)) != insn)
7431 if (DEBUG_INSN_P (temp))
7432 replace_rtx (PATTERN (temp), old, reloadreg);
7433 else
7434 gcc_assert (NOTE_P (temp));
7435 }
7436 else
7437 {
7438 SET_DEST (PATTERN (temp)) = old;
7439 }
7440 }
7441 }
7442
7443 /* We can't do that, so output an insn to load RELOADREG. */
7444
7445 /* If we have a secondary reload, pick up the secondary register
7446 and icode, if any. If OLDEQUIV and OLD are different or
7447 if this is an in-out reload, recompute whether or not we
7448 still need a secondary register and what the icode should
7449 be. If we still need a secondary register and the class or
7450 icode is different, go back to reloading from OLD if using
7451 OLDEQUIV means that we got the wrong type of register. We
7452 cannot have different class or icode due to an in-out reload
7453 because we don't make such reloads when both the input and
7454 output need secondary reload registers. */
7455
7456 if (! special && rl->secondary_in_reload >= 0)
7457 {
7458 rtx second_reload_reg = 0;
7459 rtx third_reload_reg = 0;
7460 int secondary_reload = rl->secondary_in_reload;
7461 rtx real_oldequiv = oldequiv;
7462 rtx real_old = old;
7463 rtx tmp;
7464 enum insn_code icode;
7465 enum insn_code tertiary_icode = CODE_FOR_nothing;
7466
7467 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7468 and similarly for OLD.
7469 See comments in get_secondary_reload in reload.c. */
7470 /* If it is a pseudo that cannot be replaced with its
7471 equivalent MEM, we must fall back to reload_in, which
7472 will have all the necessary substitutions registered.
7473 Likewise for a pseudo that can't be replaced with its
7474 equivalent constant.
7475
7476 Take extra care for subregs of such pseudos. Note that
7477 we cannot use reg_equiv_mem in this case because it is
7478 not in the right mode. */
7479
7480 tmp = oldequiv;
7481 if (GET_CODE (tmp) == SUBREG)
7482 tmp = SUBREG_REG (tmp);
7483 if (REG_P (tmp)
7484 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7485 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7486 || reg_equiv_constant (REGNO (tmp)) != 0))
7487 {
7488 if (! reg_equiv_mem (REGNO (tmp))
7489 || num_not_at_initial_offset
7490 || GET_CODE (oldequiv) == SUBREG)
7491 real_oldequiv = rl->in;
7492 else
7493 real_oldequiv = reg_equiv_mem (REGNO (tmp));
7494 }
7495
7496 tmp = old;
7497 if (GET_CODE (tmp) == SUBREG)
7498 tmp = SUBREG_REG (tmp);
7499 if (REG_P (tmp)
7500 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7501 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7502 || reg_equiv_constant (REGNO (tmp)) != 0))
7503 {
7504 if (! reg_equiv_mem (REGNO (tmp))
7505 || num_not_at_initial_offset
7506 || GET_CODE (old) == SUBREG)
7507 real_old = rl->in;
7508 else
7509 real_old = reg_equiv_mem (REGNO (tmp));
7510 }
7511
7512 second_reload_reg = rld[secondary_reload].reg_rtx;
7513 if (rld[secondary_reload].secondary_in_reload >= 0)
7514 {
7515 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7516
7517 third_reload_reg = rld[tertiary_reload].reg_rtx;
7518 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7519 /* We'd have to add more code for quartary reloads. */
7520 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7521 }
7522 icode = rl->secondary_in_icode;
7523
7524 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7525 || (rl->in != 0 && rl->out != 0))
7526 {
7527 secondary_reload_info sri, sri2;
7528 enum reg_class new_class, new_t_class;
7529
7530 sri.icode = CODE_FOR_nothing;
7531 sri.prev_sri = NULL;
7532 new_class
7533 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7534 rl->rclass, mode,
7535 &sri);
7536
7537 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7538 second_reload_reg = 0;
7539 else if (new_class == NO_REGS)
7540 {
7541 if (reload_adjust_reg_for_icode (&second_reload_reg,
7542 third_reload_reg,
7543 (enum insn_code) sri.icode))
7544 {
7545 icode = (enum insn_code) sri.icode;
7546 third_reload_reg = 0;
7547 }
7548 else
7549 {
7550 oldequiv = old;
7551 real_oldequiv = real_old;
7552 }
7553 }
7554 else if (sri.icode != CODE_FOR_nothing)
7555 /* We currently lack a way to express this in reloads. */
7556 gcc_unreachable ();
7557 else
7558 {
7559 sri2.icode = CODE_FOR_nothing;
7560 sri2.prev_sri = &sri;
7561 new_t_class
7562 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7563 new_class, mode,
7564 &sri);
7565 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7566 {
7567 if (reload_adjust_reg_for_temp (&second_reload_reg,
7568 third_reload_reg,
7569 new_class, mode))
7570 {
7571 third_reload_reg = 0;
7572 tertiary_icode = (enum insn_code) sri2.icode;
7573 }
7574 else
7575 {
7576 oldequiv = old;
7577 real_oldequiv = real_old;
7578 }
7579 }
7580 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7581 {
7582 rtx intermediate = second_reload_reg;
7583
7584 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7585 new_class, mode)
7586 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7587 ((enum insn_code)
7588 sri2.icode)))
7589 {
7590 second_reload_reg = intermediate;
7591 tertiary_icode = (enum insn_code) sri2.icode;
7592 }
7593 else
7594 {
7595 oldequiv = old;
7596 real_oldequiv = real_old;
7597 }
7598 }
7599 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7600 {
7601 rtx intermediate = second_reload_reg;
7602
7603 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7604 new_class, mode)
7605 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7606 new_t_class, mode))
7607 {
7608 second_reload_reg = intermediate;
7609 tertiary_icode = (enum insn_code) sri2.icode;
7610 }
7611 else
7612 {
7613 oldequiv = old;
7614 real_oldequiv = real_old;
7615 }
7616 }
7617 else
7618 {
7619 /* This could be handled more intelligently too. */
7620 oldequiv = old;
7621 real_oldequiv = real_old;
7622 }
7623 }
7624 }
7625
7626 /* If we still need a secondary reload register, check
7627 to see if it is being used as a scratch or intermediate
7628 register and generate code appropriately. If we need
7629 a scratch register, use REAL_OLDEQUIV since the form of
7630 the insn may depend on the actual address if it is
7631 a MEM. */
7632
7633 if (second_reload_reg)
7634 {
7635 if (icode != CODE_FOR_nothing)
7636 {
7637 /* We'd have to add extra code to handle this case. */
7638 gcc_assert (!third_reload_reg);
7639
7640 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7641 second_reload_reg));
7642 special = 1;
7643 }
7644 else
7645 {
7646 /* See if we need a scratch register to load the
7647 intermediate register (a tertiary reload). */
7648 if (tertiary_icode != CODE_FOR_nothing)
7649 {
7650 emit_insn ((GEN_FCN (tertiary_icode)
7651 (second_reload_reg, real_oldequiv,
7652 third_reload_reg)));
7653 }
7654 else if (third_reload_reg)
7655 {
7656 gen_reload (third_reload_reg, real_oldequiv,
7657 rl->opnum,
7658 rl->when_needed);
7659 gen_reload (second_reload_reg, third_reload_reg,
7660 rl->opnum,
7661 rl->when_needed);
7662 }
7663 else
7664 gen_reload (second_reload_reg, real_oldequiv,
7665 rl->opnum,
7666 rl->when_needed);
7667
7668 oldequiv = second_reload_reg;
7669 }
7670 }
7671 }
7672
7673 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7674 {
7675 rtx real_oldequiv = oldequiv;
7676
7677 if ((REG_P (oldequiv)
7678 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7679 && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7680 || reg_equiv_constant (REGNO (oldequiv)) != 0))
7681 || (GET_CODE (oldequiv) == SUBREG
7682 && REG_P (SUBREG_REG (oldequiv))
7683 && (REGNO (SUBREG_REG (oldequiv))
7684 >= FIRST_PSEUDO_REGISTER)
7685 && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7686 || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7687 || (CONSTANT_P (oldequiv)
7688 && (targetm.preferred_reload_class (oldequiv,
7689 REGNO_REG_CLASS (REGNO (reloadreg)))
7690 == NO_REGS)))
7691 real_oldequiv = rl->in;
7692 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7693 rl->when_needed);
7694 }
7695
7696 if (cfun->can_throw_non_call_exceptions)
7697 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7698
7699 /* End this sequence. */
7700 *where = get_insns ();
7701 end_sequence ();
7702
7703 /* Update reload_override_in so that delete_address_reloads_1
7704 can see the actual register usage. */
7705 if (oldequiv_reg)
7706 reload_override_in[j] = oldequiv;
7707 }
7708
7709 /* Generate insns to for the output reload RL, which is for the insn described
7710 by CHAIN and has the number J. */
7711 static void
7712 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7713 int j)
7714 {
7715 rtx reloadreg;
7716 rtx_insn *insn = chain->insn;
7717 int special = 0;
7718 rtx old = rl->out;
7719 machine_mode mode;
7720 rtx_insn *p;
7721 rtx rl_reg_rtx;
7722
7723 if (rl->when_needed == RELOAD_OTHER)
7724 start_sequence ();
7725 else
7726 push_to_sequence (output_reload_insns[rl->opnum]);
7727
7728 rl_reg_rtx = reload_reg_rtx_for_output[j];
7729 mode = GET_MODE (rl_reg_rtx);
7730
7731 reloadreg = rl_reg_rtx;
7732
7733 /* If we need two reload regs, set RELOADREG to the intermediate
7734 one, since it will be stored into OLD. We might need a secondary
7735 register only for an input reload, so check again here. */
7736
7737 if (rl->secondary_out_reload >= 0)
7738 {
7739 rtx real_old = old;
7740 int secondary_reload = rl->secondary_out_reload;
7741 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7742
7743 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7744 && reg_equiv_mem (REGNO (old)) != 0)
7745 real_old = reg_equiv_mem (REGNO (old));
7746
7747 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7748 {
7749 rtx second_reloadreg = reloadreg;
7750 reloadreg = rld[secondary_reload].reg_rtx;
7751
7752 /* See if RELOADREG is to be used as a scratch register
7753 or as an intermediate register. */
7754 if (rl->secondary_out_icode != CODE_FOR_nothing)
7755 {
7756 /* We'd have to add extra code to handle this case. */
7757 gcc_assert (tertiary_reload < 0);
7758
7759 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7760 (real_old, second_reloadreg, reloadreg)));
7761 special = 1;
7762 }
7763 else
7764 {
7765 /* See if we need both a scratch and intermediate reload
7766 register. */
7767
7768 enum insn_code tertiary_icode
7769 = rld[secondary_reload].secondary_out_icode;
7770
7771 /* We'd have to add more code for quartary reloads. */
7772 gcc_assert (tertiary_reload < 0
7773 || rld[tertiary_reload].secondary_out_reload < 0);
7774
7775 if (GET_MODE (reloadreg) != mode)
7776 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7777
7778 if (tertiary_icode != CODE_FOR_nothing)
7779 {
7780 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7781
7782 /* Copy primary reload reg to secondary reload reg.
7783 (Note that these have been swapped above, then
7784 secondary reload reg to OLD using our insn.) */
7785
7786 /* If REAL_OLD is a paradoxical SUBREG, remove it
7787 and try to put the opposite SUBREG on
7788 RELOADREG. */
7789 strip_paradoxical_subreg (&real_old, &reloadreg);
7790
7791 gen_reload (reloadreg, second_reloadreg,
7792 rl->opnum, rl->when_needed);
7793 emit_insn ((GEN_FCN (tertiary_icode)
7794 (real_old, reloadreg, third_reloadreg)));
7795 special = 1;
7796 }
7797
7798 else
7799 {
7800 /* Copy between the reload regs here and then to
7801 OUT later. */
7802
7803 gen_reload (reloadreg, second_reloadreg,
7804 rl->opnum, rl->when_needed);
7805 if (tertiary_reload >= 0)
7806 {
7807 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7808
7809 gen_reload (third_reloadreg, reloadreg,
7810 rl->opnum, rl->when_needed);
7811 reloadreg = third_reloadreg;
7812 }
7813 }
7814 }
7815 }
7816 }
7817
7818 /* Output the last reload insn. */
7819 if (! special)
7820 {
7821 rtx set;
7822
7823 /* Don't output the last reload if OLD is not the dest of
7824 INSN and is in the src and is clobbered by INSN. */
7825 if (! flag_expensive_optimizations
7826 || !REG_P (old)
7827 || !(set = single_set (insn))
7828 || rtx_equal_p (old, SET_DEST (set))
7829 || !reg_mentioned_p (old, SET_SRC (set))
7830 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7831 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7832 gen_reload (old, reloadreg, rl->opnum,
7833 rl->when_needed);
7834 }
7835
7836 /* Look at all insns we emitted, just to be safe. */
7837 for (p = get_insns (); p; p = NEXT_INSN (p))
7838 if (INSN_P (p))
7839 {
7840 rtx pat = PATTERN (p);
7841
7842 /* If this output reload doesn't come from a spill reg,
7843 clear any memory of reloaded copies of the pseudo reg.
7844 If this output reload comes from a spill reg,
7845 reg_has_output_reload will make this do nothing. */
7846 note_stores (pat, forget_old_reloads_1, NULL);
7847
7848 if (reg_mentioned_p (rl_reg_rtx, pat))
7849 {
7850 rtx set = single_set (insn);
7851 if (reload_spill_index[j] < 0
7852 && set
7853 && SET_SRC (set) == rl_reg_rtx)
7854 {
7855 int src = REGNO (SET_SRC (set));
7856
7857 reload_spill_index[j] = src;
7858 SET_HARD_REG_BIT (reg_is_output_reload, src);
7859 if (find_regno_note (insn, REG_DEAD, src))
7860 SET_HARD_REG_BIT (reg_reloaded_died, src);
7861 }
7862 if (HARD_REGISTER_P (rl_reg_rtx))
7863 {
7864 int s = rl->secondary_out_reload;
7865 set = single_set (p);
7866 /* If this reload copies only to the secondary reload
7867 register, the secondary reload does the actual
7868 store. */
7869 if (s >= 0 && set == NULL_RTX)
7870 /* We can't tell what function the secondary reload
7871 has and where the actual store to the pseudo is
7872 made; leave new_spill_reg_store alone. */
7873 ;
7874 else if (s >= 0
7875 && SET_SRC (set) == rl_reg_rtx
7876 && SET_DEST (set) == rld[s].reg_rtx)
7877 {
7878 /* Usually the next instruction will be the
7879 secondary reload insn; if we can confirm
7880 that it is, setting new_spill_reg_store to
7881 that insn will allow an extra optimization. */
7882 rtx s_reg = rld[s].reg_rtx;
7883 rtx_insn *next = NEXT_INSN (p);
7884 rld[s].out = rl->out;
7885 rld[s].out_reg = rl->out_reg;
7886 set = single_set (next);
7887 if (set && SET_SRC (set) == s_reg
7888 && reload_reg_rtx_reaches_end_p (s_reg, s))
7889 {
7890 SET_HARD_REG_BIT (reg_is_output_reload,
7891 REGNO (s_reg));
7892 new_spill_reg_store[REGNO (s_reg)] = next;
7893 }
7894 }
7895 else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7896 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7897 }
7898 }
7899 }
7900
7901 if (rl->when_needed == RELOAD_OTHER)
7902 {
7903 emit_insn (other_output_reload_insns[rl->opnum]);
7904 other_output_reload_insns[rl->opnum] = get_insns ();
7905 }
7906 else
7907 output_reload_insns[rl->opnum] = get_insns ();
7908
7909 if (cfun->can_throw_non_call_exceptions)
7910 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7911
7912 end_sequence ();
7913 }
7914
7915 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7916 and has the number J. */
7917 static void
7918 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7919 {
7920 rtx_insn *insn = chain->insn;
7921 rtx old = (rl->in && MEM_P (rl->in)
7922 ? rl->in_reg : rl->in);
7923 rtx reg_rtx = rl->reg_rtx;
7924
7925 if (old && reg_rtx)
7926 {
7927 machine_mode mode;
7928
7929 /* Determine the mode to reload in.
7930 This is very tricky because we have three to choose from.
7931 There is the mode the insn operand wants (rl->inmode).
7932 There is the mode of the reload register RELOADREG.
7933 There is the intrinsic mode of the operand, which we could find
7934 by stripping some SUBREGs.
7935 It turns out that RELOADREG's mode is irrelevant:
7936 we can change that arbitrarily.
7937
7938 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7939 then the reload reg may not support QImode moves, so use SImode.
7940 If foo is in memory due to spilling a pseudo reg, this is safe,
7941 because the QImode value is in the least significant part of a
7942 slot big enough for a SImode. If foo is some other sort of
7943 memory reference, then it is impossible to reload this case,
7944 so previous passes had better make sure this never happens.
7945
7946 Then consider a one-word union which has SImode and one of its
7947 members is a float, being fetched as (SUBREG:SF union:SI).
7948 We must fetch that as SFmode because we could be loading into
7949 a float-only register. In this case OLD's mode is correct.
7950
7951 Consider an immediate integer: it has VOIDmode. Here we need
7952 to get a mode from something else.
7953
7954 In some cases, there is a fourth mode, the operand's
7955 containing mode. If the insn specifies a containing mode for
7956 this operand, it overrides all others.
7957
7958 I am not sure whether the algorithm here is always right,
7959 but it does the right things in those cases. */
7960
7961 mode = GET_MODE (old);
7962 if (mode == VOIDmode)
7963 mode = rl->inmode;
7964
7965 /* We cannot use gen_lowpart_common since it can do the wrong thing
7966 when REG_RTX has a multi-word mode. Note that REG_RTX must
7967 always be a REG here. */
7968 if (GET_MODE (reg_rtx) != mode)
7969 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7970 }
7971 reload_reg_rtx_for_input[j] = reg_rtx;
7972
7973 if (old != 0
7974 /* AUTO_INC reloads need to be handled even if inherited. We got an
7975 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7976 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7977 && ! rtx_equal_p (reg_rtx, old)
7978 && reg_rtx != 0)
7979 emit_input_reload_insns (chain, rld + j, old, j);
7980
7981 /* When inheriting a wider reload, we have a MEM in rl->in,
7982 e.g. inheriting a SImode output reload for
7983 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7984 if (optimize && reload_inherited[j] && rl->in
7985 && MEM_P (rl->in)
7986 && MEM_P (rl->in_reg)
7987 && reload_spill_index[j] >= 0
7988 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7989 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7990
7991 /* If we are reloading a register that was recently stored in with an
7992 output-reload, see if we can prove there was
7993 actually no need to store the old value in it. */
7994
7995 if (optimize
7996 && (reload_inherited[j] || reload_override_in[j])
7997 && reg_rtx
7998 && REG_P (reg_rtx)
7999 && spill_reg_store[REGNO (reg_rtx)] != 0
8000 #if 0
8001 /* There doesn't seem to be any reason to restrict this to pseudos
8002 and doing so loses in the case where we are copying from a
8003 register of the wrong class. */
8004 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
8005 #endif
8006 /* The insn might have already some references to stackslots
8007 replaced by MEMs, while reload_out_reg still names the
8008 original pseudo. */
8009 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
8010 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
8011 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
8012 }
8013
8014 /* Do output reloading for reload RL, which is for the insn described by
8015 CHAIN and has the number J.
8016 ??? At some point we need to support handling output reloads of
8017 JUMP_INSNs or insns that set cc0. */
8018 static void
8019 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
8020 {
8021 rtx note, old;
8022 rtx_insn *insn = chain->insn;
8023 /* If this is an output reload that stores something that is
8024 not loaded in this same reload, see if we can eliminate a previous
8025 store. */
8026 rtx pseudo = rl->out_reg;
8027 rtx reg_rtx = rl->reg_rtx;
8028
8029 if (rl->out && reg_rtx)
8030 {
8031 machine_mode mode;
8032
8033 /* Determine the mode to reload in.
8034 See comments above (for input reloading). */
8035 mode = GET_MODE (rl->out);
8036 if (mode == VOIDmode)
8037 {
8038 /* VOIDmode should never happen for an output. */
8039 if (asm_noperands (PATTERN (insn)) < 0)
8040 /* It's the compiler's fault. */
8041 fatal_insn ("VOIDmode on an output", insn);
8042 error_for_asm (insn, "output operand is constant in %<asm%>");
8043 /* Prevent crash--use something we know is valid. */
8044 mode = word_mode;
8045 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8046 }
8047 if (GET_MODE (reg_rtx) != mode)
8048 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8049 }
8050 reload_reg_rtx_for_output[j] = reg_rtx;
8051
8052 if (pseudo
8053 && optimize
8054 && REG_P (pseudo)
8055 && ! rtx_equal_p (rl->in_reg, pseudo)
8056 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8057 && reg_last_reload_reg[REGNO (pseudo)])
8058 {
8059 int pseudo_no = REGNO (pseudo);
8060 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8061
8062 /* We don't need to test full validity of last_regno for
8063 inherit here; we only want to know if the store actually
8064 matches the pseudo. */
8065 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8066 && reg_reloaded_contents[last_regno] == pseudo_no
8067 && spill_reg_store[last_regno]
8068 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8069 delete_output_reload (insn, j, last_regno, reg_rtx);
8070 }
8071
8072 old = rl->out_reg;
8073 if (old == 0
8074 || reg_rtx == 0
8075 || rtx_equal_p (old, reg_rtx))
8076 return;
8077
8078 /* An output operand that dies right away does need a reload,
8079 but need not be copied from it. Show the new location in the
8080 REG_UNUSED note. */
8081 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8082 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8083 {
8084 XEXP (note, 0) = reg_rtx;
8085 return;
8086 }
8087 /* Likewise for a SUBREG of an operand that dies. */
8088 else if (GET_CODE (old) == SUBREG
8089 && REG_P (SUBREG_REG (old))
8090 && 0 != (note = find_reg_note (insn, REG_UNUSED,
8091 SUBREG_REG (old))))
8092 {
8093 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8094 return;
8095 }
8096 else if (GET_CODE (old) == SCRATCH)
8097 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8098 but we don't want to make an output reload. */
8099 return;
8100
8101 /* If is a JUMP_INSN, we can't support output reloads yet. */
8102 gcc_assert (NONJUMP_INSN_P (insn));
8103
8104 emit_output_reload_insns (chain, rld + j, j);
8105 }
8106
8107 /* A reload copies values of MODE from register SRC to register DEST.
8108 Return true if it can be treated for inheritance purposes like a
8109 group of reloads, each one reloading a single hard register. The
8110 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8111 occupy the same number of hard registers. */
8112
8113 static bool
8114 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8115 int src ATTRIBUTE_UNUSED,
8116 machine_mode mode ATTRIBUTE_UNUSED)
8117 {
8118 #ifdef CANNOT_CHANGE_MODE_CLASS
8119 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8120 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8121 #else
8122 return true;
8123 #endif
8124 }
8125
8126 /* Output insns to reload values in and out of the chosen reload regs. */
8127
8128 static void
8129 emit_reload_insns (struct insn_chain *chain)
8130 {
8131 rtx_insn *insn = chain->insn;
8132
8133 int j;
8134
8135 CLEAR_HARD_REG_SET (reg_reloaded_died);
8136
8137 for (j = 0; j < reload_n_operands; j++)
8138 input_reload_insns[j] = input_address_reload_insns[j]
8139 = inpaddr_address_reload_insns[j]
8140 = output_reload_insns[j] = output_address_reload_insns[j]
8141 = outaddr_address_reload_insns[j]
8142 = other_output_reload_insns[j] = 0;
8143 other_input_address_reload_insns = 0;
8144 other_input_reload_insns = 0;
8145 operand_reload_insns = 0;
8146 other_operand_reload_insns = 0;
8147
8148 /* Dump reloads into the dump file. */
8149 if (dump_file)
8150 {
8151 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8152 debug_reload_to_stream (dump_file);
8153 }
8154
8155 for (j = 0; j < n_reloads; j++)
8156 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8157 {
8158 unsigned int i;
8159
8160 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8161 new_spill_reg_store[i] = 0;
8162 }
8163
8164 /* Now output the instructions to copy the data into and out of the
8165 reload registers. Do these in the order that the reloads were reported,
8166 since reloads of base and index registers precede reloads of operands
8167 and the operands may need the base and index registers reloaded. */
8168
8169 for (j = 0; j < n_reloads; j++)
8170 {
8171 do_input_reload (chain, rld + j, j);
8172 do_output_reload (chain, rld + j, j);
8173 }
8174
8175 /* Now write all the insns we made for reloads in the order expected by
8176 the allocation functions. Prior to the insn being reloaded, we write
8177 the following reloads:
8178
8179 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8180
8181 RELOAD_OTHER reloads.
8182
8183 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8184 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8185 RELOAD_FOR_INPUT reload for the operand.
8186
8187 RELOAD_FOR_OPADDR_ADDRS reloads.
8188
8189 RELOAD_FOR_OPERAND_ADDRESS reloads.
8190
8191 After the insn being reloaded, we write the following:
8192
8193 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8194 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8195 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8196 reloads for the operand. The RELOAD_OTHER output reloads are
8197 output in descending order by reload number. */
8198
8199 emit_insn_before (other_input_address_reload_insns, insn);
8200 emit_insn_before (other_input_reload_insns, insn);
8201
8202 for (j = 0; j < reload_n_operands; j++)
8203 {
8204 emit_insn_before (inpaddr_address_reload_insns[j], insn);
8205 emit_insn_before (input_address_reload_insns[j], insn);
8206 emit_insn_before (input_reload_insns[j], insn);
8207 }
8208
8209 emit_insn_before (other_operand_reload_insns, insn);
8210 emit_insn_before (operand_reload_insns, insn);
8211
8212 for (j = 0; j < reload_n_operands; j++)
8213 {
8214 rtx_insn *x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8215 x = emit_insn_after (output_address_reload_insns[j], x);
8216 x = emit_insn_after (output_reload_insns[j], x);
8217 emit_insn_after (other_output_reload_insns[j], x);
8218 }
8219
8220 /* For all the spill regs newly reloaded in this instruction,
8221 record what they were reloaded from, so subsequent instructions
8222 can inherit the reloads.
8223
8224 Update spill_reg_store for the reloads of this insn.
8225 Copy the elements that were updated in the loop above. */
8226
8227 for (j = 0; j < n_reloads; j++)
8228 {
8229 int r = reload_order[j];
8230 int i = reload_spill_index[r];
8231
8232 /* If this is a non-inherited input reload from a pseudo, we must
8233 clear any memory of a previous store to the same pseudo. Only do
8234 something if there will not be an output reload for the pseudo
8235 being reloaded. */
8236 if (rld[r].in_reg != 0
8237 && ! (reload_inherited[r] || reload_override_in[r]))
8238 {
8239 rtx reg = rld[r].in_reg;
8240
8241 if (GET_CODE (reg) == SUBREG)
8242 reg = SUBREG_REG (reg);
8243
8244 if (REG_P (reg)
8245 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8246 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8247 {
8248 int nregno = REGNO (reg);
8249
8250 if (reg_last_reload_reg[nregno])
8251 {
8252 int last_regno = REGNO (reg_last_reload_reg[nregno]);
8253
8254 if (reg_reloaded_contents[last_regno] == nregno)
8255 spill_reg_store[last_regno] = 0;
8256 }
8257 }
8258 }
8259
8260 /* I is nonneg if this reload used a register.
8261 If rld[r].reg_rtx is 0, this is an optional reload
8262 that we opted to ignore. */
8263
8264 if (i >= 0 && rld[r].reg_rtx != 0)
8265 {
8266 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8267 int k;
8268
8269 /* For a multi register reload, we need to check if all or part
8270 of the value lives to the end. */
8271 for (k = 0; k < nr; k++)
8272 if (reload_reg_reaches_end_p (i + k, r))
8273 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8274
8275 /* Maybe the spill reg contains a copy of reload_out. */
8276 if (rld[r].out != 0
8277 && (REG_P (rld[r].out)
8278 || (rld[r].out_reg
8279 ? REG_P (rld[r].out_reg)
8280 /* The reload value is an auto-modification of
8281 some kind. For PRE_INC, POST_INC, PRE_DEC
8282 and POST_DEC, we record an equivalence
8283 between the reload register and the operand
8284 on the optimistic assumption that we can make
8285 the equivalence hold. reload_as_needed must
8286 then either make it hold or invalidate the
8287 equivalence.
8288
8289 PRE_MODIFY and POST_MODIFY addresses are reloaded
8290 somewhat differently, and allowing them here leads
8291 to problems. */
8292 : (GET_CODE (rld[r].out) != POST_MODIFY
8293 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8294 {
8295 rtx reg;
8296
8297 reg = reload_reg_rtx_for_output[r];
8298 if (reload_reg_rtx_reaches_end_p (reg, r))
8299 {
8300 machine_mode mode = GET_MODE (reg);
8301 int regno = REGNO (reg);
8302 int nregs = hard_regno_nregs[regno][mode];
8303 rtx out = (REG_P (rld[r].out)
8304 ? rld[r].out
8305 : rld[r].out_reg
8306 ? rld[r].out_reg
8307 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8308 int out_regno = REGNO (out);
8309 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8310 : hard_regno_nregs[out_regno][mode]);
8311 bool piecemeal;
8312
8313 spill_reg_store[regno] = new_spill_reg_store[regno];
8314 spill_reg_stored_to[regno] = out;
8315 reg_last_reload_reg[out_regno] = reg;
8316
8317 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8318 && nregs == out_nregs
8319 && inherit_piecemeal_p (out_regno, regno, mode));
8320
8321 /* If OUT_REGNO is a hard register, it may occupy more than
8322 one register. If it does, say what is in the
8323 rest of the registers assuming that both registers
8324 agree on how many words the object takes. If not,
8325 invalidate the subsequent registers. */
8326
8327 if (HARD_REGISTER_NUM_P (out_regno))
8328 for (k = 1; k < out_nregs; k++)
8329 reg_last_reload_reg[out_regno + k]
8330 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8331
8332 /* Now do the inverse operation. */
8333 for (k = 0; k < nregs; k++)
8334 {
8335 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8336 reg_reloaded_contents[regno + k]
8337 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8338 ? out_regno
8339 : out_regno + k);
8340 reg_reloaded_insn[regno + k] = insn;
8341 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8342 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8343 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8344 regno + k);
8345 else
8346 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8347 regno + k);
8348 }
8349 }
8350 }
8351 /* Maybe the spill reg contains a copy of reload_in. Only do
8352 something if there will not be an output reload for
8353 the register being reloaded. */
8354 else if (rld[r].out_reg == 0
8355 && rld[r].in != 0
8356 && ((REG_P (rld[r].in)
8357 && !HARD_REGISTER_P (rld[r].in)
8358 && !REGNO_REG_SET_P (&reg_has_output_reload,
8359 REGNO (rld[r].in)))
8360 || (REG_P (rld[r].in_reg)
8361 && !REGNO_REG_SET_P (&reg_has_output_reload,
8362 REGNO (rld[r].in_reg))))
8363 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8364 {
8365 rtx reg;
8366
8367 reg = reload_reg_rtx_for_input[r];
8368 if (reload_reg_rtx_reaches_end_p (reg, r))
8369 {
8370 machine_mode mode;
8371 int regno;
8372 int nregs;
8373 int in_regno;
8374 int in_nregs;
8375 rtx in;
8376 bool piecemeal;
8377
8378 mode = GET_MODE (reg);
8379 regno = REGNO (reg);
8380 nregs = hard_regno_nregs[regno][mode];
8381 if (REG_P (rld[r].in)
8382 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8383 in = rld[r].in;
8384 else if (REG_P (rld[r].in_reg))
8385 in = rld[r].in_reg;
8386 else
8387 in = XEXP (rld[r].in_reg, 0);
8388 in_regno = REGNO (in);
8389
8390 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8391 : hard_regno_nregs[in_regno][mode]);
8392
8393 reg_last_reload_reg[in_regno] = reg;
8394
8395 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8396 && nregs == in_nregs
8397 && inherit_piecemeal_p (regno, in_regno, mode));
8398
8399 if (HARD_REGISTER_NUM_P (in_regno))
8400 for (k = 1; k < in_nregs; k++)
8401 reg_last_reload_reg[in_regno + k]
8402 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8403
8404 /* Unless we inherited this reload, show we haven't
8405 recently done a store.
8406 Previous stores of inherited auto_inc expressions
8407 also have to be discarded. */
8408 if (! reload_inherited[r]
8409 || (rld[r].out && ! rld[r].out_reg))
8410 spill_reg_store[regno] = 0;
8411
8412 for (k = 0; k < nregs; k++)
8413 {
8414 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8415 reg_reloaded_contents[regno + k]
8416 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8417 ? in_regno
8418 : in_regno + k);
8419 reg_reloaded_insn[regno + k] = insn;
8420 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8421 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8422 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8423 regno + k);
8424 else
8425 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8426 regno + k);
8427 }
8428 }
8429 }
8430 }
8431
8432 /* The following if-statement was #if 0'd in 1.34 (or before...).
8433 It's reenabled in 1.35 because supposedly nothing else
8434 deals with this problem. */
8435
8436 /* If a register gets output-reloaded from a non-spill register,
8437 that invalidates any previous reloaded copy of it.
8438 But forget_old_reloads_1 won't get to see it, because
8439 it thinks only about the original insn. So invalidate it here.
8440 Also do the same thing for RELOAD_OTHER constraints where the
8441 output is discarded. */
8442 if (i < 0
8443 && ((rld[r].out != 0
8444 && (REG_P (rld[r].out)
8445 || (MEM_P (rld[r].out)
8446 && REG_P (rld[r].out_reg))))
8447 || (rld[r].out == 0 && rld[r].out_reg
8448 && REG_P (rld[r].out_reg))))
8449 {
8450 rtx out = ((rld[r].out && REG_P (rld[r].out))
8451 ? rld[r].out : rld[r].out_reg);
8452 int out_regno = REGNO (out);
8453 machine_mode mode = GET_MODE (out);
8454
8455 /* REG_RTX is now set or clobbered by the main instruction.
8456 As the comment above explains, forget_old_reloads_1 only
8457 sees the original instruction, and there is no guarantee
8458 that the original instruction also clobbered REG_RTX.
8459 For example, if find_reloads sees that the input side of
8460 a matched operand pair dies in this instruction, it may
8461 use the input register as the reload register.
8462
8463 Calling forget_old_reloads_1 is a waste of effort if
8464 REG_RTX is also the output register.
8465
8466 If we know that REG_RTX holds the value of a pseudo
8467 register, the code after the call will record that fact. */
8468 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8469 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8470
8471 if (!HARD_REGISTER_NUM_P (out_regno))
8472 {
8473 rtx src_reg;
8474 rtx_insn *store_insn = NULL;
8475
8476 reg_last_reload_reg[out_regno] = 0;
8477
8478 /* If we can find a hard register that is stored, record
8479 the storing insn so that we may delete this insn with
8480 delete_output_reload. */
8481 src_reg = reload_reg_rtx_for_output[r];
8482
8483 if (src_reg)
8484 {
8485 if (reload_reg_rtx_reaches_end_p (src_reg, r))
8486 store_insn = new_spill_reg_store[REGNO (src_reg)];
8487 else
8488 src_reg = NULL_RTX;
8489 }
8490 else
8491 {
8492 /* If this is an optional reload, try to find the
8493 source reg from an input reload. */
8494 rtx set = single_set (insn);
8495 if (set && SET_DEST (set) == rld[r].out)
8496 {
8497 int k;
8498
8499 src_reg = SET_SRC (set);
8500 store_insn = insn;
8501 for (k = 0; k < n_reloads; k++)
8502 {
8503 if (rld[k].in == src_reg)
8504 {
8505 src_reg = reload_reg_rtx_for_input[k];
8506 break;
8507 }
8508 }
8509 }
8510 }
8511 if (src_reg && REG_P (src_reg)
8512 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8513 {
8514 int src_regno, src_nregs, k;
8515 rtx note;
8516
8517 gcc_assert (GET_MODE (src_reg) == mode);
8518 src_regno = REGNO (src_reg);
8519 src_nregs = hard_regno_nregs[src_regno][mode];
8520 /* The place where to find a death note varies with
8521 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8522 necessarily checked exactly in the code that moves
8523 notes, so just check both locations. */
8524 note = find_regno_note (insn, REG_DEAD, src_regno);
8525 if (! note && store_insn)
8526 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8527 for (k = 0; k < src_nregs; k++)
8528 {
8529 spill_reg_store[src_regno + k] = store_insn;
8530 spill_reg_stored_to[src_regno + k] = out;
8531 reg_reloaded_contents[src_regno + k] = out_regno;
8532 reg_reloaded_insn[src_regno + k] = store_insn;
8533 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8534 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8535 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8536 mode))
8537 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8538 src_regno + k);
8539 else
8540 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8541 src_regno + k);
8542 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8543 if (note)
8544 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8545 else
8546 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8547 }
8548 reg_last_reload_reg[out_regno] = src_reg;
8549 /* We have to set reg_has_output_reload here, or else
8550 forget_old_reloads_1 will clear reg_last_reload_reg
8551 right away. */
8552 SET_REGNO_REG_SET (&reg_has_output_reload,
8553 out_regno);
8554 }
8555 }
8556 else
8557 {
8558 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8559
8560 for (k = 0; k < out_nregs; k++)
8561 reg_last_reload_reg[out_regno + k] = 0;
8562 }
8563 }
8564 }
8565 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8566 }
8567 \f
8568 /* Go through the motions to emit INSN and test if it is strictly valid.
8569 Return the emitted insn if valid, else return NULL. */
8570
8571 static rtx_insn *
8572 emit_insn_if_valid_for_reload (rtx pat)
8573 {
8574 rtx_insn *last = get_last_insn ();
8575 int code;
8576
8577 rtx_insn *insn = emit_insn (pat);
8578 code = recog_memoized (insn);
8579
8580 if (code >= 0)
8581 {
8582 extract_insn (insn);
8583 /* We want constrain operands to treat this insn strictly in its
8584 validity determination, i.e., the way it would after reload has
8585 completed. */
8586 if (constrain_operands (1, get_enabled_alternatives (insn)))
8587 return insn;
8588 }
8589
8590 delete_insns_since (last);
8591 return NULL;
8592 }
8593
8594 /* Emit code to perform a reload from IN (which may be a reload register) to
8595 OUT (which may also be a reload register). IN or OUT is from operand
8596 OPNUM with reload type TYPE.
8597
8598 Returns first insn emitted. */
8599
8600 static rtx_insn *
8601 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8602 {
8603 rtx_insn *last = get_last_insn ();
8604 rtx_insn *tem;
8605 #ifdef SECONDARY_MEMORY_NEEDED
8606 rtx tem1, tem2;
8607 #endif
8608
8609 /* If IN is a paradoxical SUBREG, remove it and try to put the
8610 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8611 if (!strip_paradoxical_subreg (&in, &out))
8612 strip_paradoxical_subreg (&out, &in);
8613
8614 /* How to do this reload can get quite tricky. Normally, we are being
8615 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8616 register that didn't get a hard register. In that case we can just
8617 call emit_move_insn.
8618
8619 We can also be asked to reload a PLUS that adds a register or a MEM to
8620 another register, constant or MEM. This can occur during frame pointer
8621 elimination and while reloading addresses. This case is handled by
8622 trying to emit a single insn to perform the add. If it is not valid,
8623 we use a two insn sequence.
8624
8625 Or we can be asked to reload an unary operand that was a fragment of
8626 an addressing mode, into a register. If it isn't recognized as-is,
8627 we try making the unop operand and the reload-register the same:
8628 (set reg:X (unop:X expr:Y))
8629 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8630
8631 Finally, we could be called to handle an 'o' constraint by putting
8632 an address into a register. In that case, we first try to do this
8633 with a named pattern of "reload_load_address". If no such pattern
8634 exists, we just emit a SET insn and hope for the best (it will normally
8635 be valid on machines that use 'o').
8636
8637 This entire process is made complex because reload will never
8638 process the insns we generate here and so we must ensure that
8639 they will fit their constraints and also by the fact that parts of
8640 IN might be being reloaded separately and replaced with spill registers.
8641 Because of this, we are, in some sense, just guessing the right approach
8642 here. The one listed above seems to work.
8643
8644 ??? At some point, this whole thing needs to be rethought. */
8645
8646 if (GET_CODE (in) == PLUS
8647 && (REG_P (XEXP (in, 0))
8648 || GET_CODE (XEXP (in, 0)) == SUBREG
8649 || MEM_P (XEXP (in, 0)))
8650 && (REG_P (XEXP (in, 1))
8651 || GET_CODE (XEXP (in, 1)) == SUBREG
8652 || CONSTANT_P (XEXP (in, 1))
8653 || MEM_P (XEXP (in, 1))))
8654 {
8655 /* We need to compute the sum of a register or a MEM and another
8656 register, constant, or MEM, and put it into the reload
8657 register. The best possible way of doing this is if the machine
8658 has a three-operand ADD insn that accepts the required operands.
8659
8660 The simplest approach is to try to generate such an insn and see if it
8661 is recognized and matches its constraints. If so, it can be used.
8662
8663 It might be better not to actually emit the insn unless it is valid,
8664 but we need to pass the insn as an operand to `recog' and
8665 `extract_insn' and it is simpler to emit and then delete the insn if
8666 not valid than to dummy things up. */
8667
8668 rtx op0, op1, tem;
8669 rtx_insn *insn;
8670 enum insn_code code;
8671
8672 op0 = find_replacement (&XEXP (in, 0));
8673 op1 = find_replacement (&XEXP (in, 1));
8674
8675 /* Since constraint checking is strict, commutativity won't be
8676 checked, so we need to do that here to avoid spurious failure
8677 if the add instruction is two-address and the second operand
8678 of the add is the same as the reload reg, which is frequently
8679 the case. If the insn would be A = B + A, rearrange it so
8680 it will be A = A + B as constrain_operands expects. */
8681
8682 if (REG_P (XEXP (in, 1))
8683 && REGNO (out) == REGNO (XEXP (in, 1)))
8684 tem = op0, op0 = op1, op1 = tem;
8685
8686 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8687 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8688
8689 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8690 if (insn)
8691 return insn;
8692
8693 /* If that failed, we must use a conservative two-insn sequence.
8694
8695 Use a move to copy one operand into the reload register. Prefer
8696 to reload a constant, MEM or pseudo since the move patterns can
8697 handle an arbitrary operand. If OP1 is not a constant, MEM or
8698 pseudo and OP1 is not a valid operand for an add instruction, then
8699 reload OP1.
8700
8701 After reloading one of the operands into the reload register, add
8702 the reload register to the output register.
8703
8704 If there is another way to do this for a specific machine, a
8705 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8706 we emit below. */
8707
8708 code = optab_handler (add_optab, GET_MODE (out));
8709
8710 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8711 || (REG_P (op1)
8712 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8713 || (code != CODE_FOR_nothing
8714 && !insn_operand_matches (code, 2, op1)))
8715 tem = op0, op0 = op1, op1 = tem;
8716
8717 gen_reload (out, op0, opnum, type);
8718
8719 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8720 This fixes a problem on the 32K where the stack pointer cannot
8721 be used as an operand of an add insn. */
8722
8723 if (rtx_equal_p (op0, op1))
8724 op1 = out;
8725
8726 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8727 if (insn)
8728 {
8729 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8730 set_dst_reg_note (insn, REG_EQUIV, in, out);
8731 return insn;
8732 }
8733
8734 /* If that failed, copy the address register to the reload register.
8735 Then add the constant to the reload register. */
8736
8737 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8738 gen_reload (out, op1, opnum, type);
8739 insn = emit_insn (gen_add2_insn (out, op0));
8740 set_dst_reg_note (insn, REG_EQUIV, in, out);
8741 }
8742
8743 #ifdef SECONDARY_MEMORY_NEEDED
8744 /* If we need a memory location to do the move, do it that way. */
8745 else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8746 (REG_P (tem1) && REG_P (tem2)))
8747 && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8748 && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8749 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1)),
8750 REGNO_REG_CLASS (REGNO (tem2)),
8751 GET_MODE (out)))
8752 {
8753 /* Get the memory to use and rewrite both registers to its mode. */
8754 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8755
8756 if (GET_MODE (loc) != GET_MODE (out))
8757 out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8758
8759 if (GET_MODE (loc) != GET_MODE (in))
8760 in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8761
8762 gen_reload (loc, in, opnum, type);
8763 gen_reload (out, loc, opnum, type);
8764 }
8765 #endif
8766 else if (REG_P (out) && UNARY_P (in))
8767 {
8768 rtx insn;
8769 rtx op1;
8770 rtx out_moded;
8771 rtx_insn *set;
8772
8773 op1 = find_replacement (&XEXP (in, 0));
8774 if (op1 != XEXP (in, 0))
8775 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8776
8777 /* First, try a plain SET. */
8778 set = emit_insn_if_valid_for_reload (gen_rtx_SET (out, in));
8779 if (set)
8780 return set;
8781
8782 /* If that failed, move the inner operand to the reload
8783 register, and try the same unop with the inner expression
8784 replaced with the reload register. */
8785
8786 if (GET_MODE (op1) != GET_MODE (out))
8787 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8788 else
8789 out_moded = out;
8790
8791 gen_reload (out_moded, op1, opnum, type);
8792
8793 insn = gen_rtx_SET (out, gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8794 out_moded));
8795 insn = emit_insn_if_valid_for_reload (insn);
8796 if (insn)
8797 {
8798 set_unique_reg_note (insn, REG_EQUIV, in);
8799 return as_a <rtx_insn *> (insn);
8800 }
8801
8802 fatal_insn ("failure trying to reload:", set);
8803 }
8804 /* If IN is a simple operand, use gen_move_insn. */
8805 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8806 {
8807 tem = emit_insn (gen_move_insn (out, in));
8808 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8809 mark_jump_label (in, tem, 0);
8810 }
8811
8812 #ifdef HAVE_reload_load_address
8813 else if (HAVE_reload_load_address)
8814 emit_insn (gen_reload_load_address (out, in));
8815 #endif
8816
8817 /* Otherwise, just write (set OUT IN) and hope for the best. */
8818 else
8819 emit_insn (gen_rtx_SET (out, in));
8820
8821 /* Return the first insn emitted.
8822 We can not just return get_last_insn, because there may have
8823 been multiple instructions emitted. Also note that gen_move_insn may
8824 emit more than one insn itself, so we can not assume that there is one
8825 insn emitted per emit_insn_before call. */
8826
8827 return last ? NEXT_INSN (last) : get_insns ();
8828 }
8829 \f
8830 /* Delete a previously made output-reload whose result we now believe
8831 is not needed. First we double-check.
8832
8833 INSN is the insn now being processed.
8834 LAST_RELOAD_REG is the hard register number for which we want to delete
8835 the last output reload.
8836 J is the reload-number that originally used REG. The caller has made
8837 certain that reload J doesn't use REG any longer for input.
8838 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8839
8840 static void
8841 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8842 rtx new_reload_reg)
8843 {
8844 rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8845 rtx reg = spill_reg_stored_to[last_reload_reg];
8846 int k;
8847 int n_occurrences;
8848 int n_inherited = 0;
8849 rtx substed;
8850 unsigned regno;
8851 int nregs;
8852
8853 /* It is possible that this reload has been only used to set another reload
8854 we eliminated earlier and thus deleted this instruction too. */
8855 if (output_reload_insn->deleted ())
8856 return;
8857
8858 /* Get the raw pseudo-register referred to. */
8859
8860 while (GET_CODE (reg) == SUBREG)
8861 reg = SUBREG_REG (reg);
8862 substed = reg_equiv_memory_loc (REGNO (reg));
8863
8864 /* This is unsafe if the operand occurs more often in the current
8865 insn than it is inherited. */
8866 for (k = n_reloads - 1; k >= 0; k--)
8867 {
8868 rtx reg2 = rld[k].in;
8869 if (! reg2)
8870 continue;
8871 if (MEM_P (reg2) || reload_override_in[k])
8872 reg2 = rld[k].in_reg;
8873 #if AUTO_INC_DEC
8874 if (rld[k].out && ! rld[k].out_reg)
8875 reg2 = XEXP (rld[k].in_reg, 0);
8876 #endif
8877 while (GET_CODE (reg2) == SUBREG)
8878 reg2 = SUBREG_REG (reg2);
8879 if (rtx_equal_p (reg2, reg))
8880 {
8881 if (reload_inherited[k] || reload_override_in[k] || k == j)
8882 n_inherited++;
8883 else
8884 return;
8885 }
8886 }
8887 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8888 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8889 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8890 reg, 0);
8891 if (substed)
8892 n_occurrences += count_occurrences (PATTERN (insn),
8893 eliminate_regs (substed, VOIDmode,
8894 NULL_RTX), 0);
8895 for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8896 {
8897 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8898 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8899 }
8900 if (n_occurrences > n_inherited)
8901 return;
8902
8903 regno = REGNO (reg);
8904 if (regno >= FIRST_PSEUDO_REGISTER)
8905 nregs = 1;
8906 else
8907 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8908
8909 /* If the pseudo-reg we are reloading is no longer referenced
8910 anywhere between the store into it and here,
8911 and we're within the same basic block, then the value can only
8912 pass through the reload reg and end up here.
8913 Otherwise, give up--return. */
8914 for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8915 i1 != insn; i1 = NEXT_INSN (i1))
8916 {
8917 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8918 return;
8919 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8920 && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8921 {
8922 /* If this is USE in front of INSN, we only have to check that
8923 there are no more references than accounted for by inheritance. */
8924 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8925 {
8926 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8927 i1 = NEXT_INSN (i1);
8928 }
8929 if (n_occurrences <= n_inherited && i1 == insn)
8930 break;
8931 return;
8932 }
8933 }
8934
8935 /* We will be deleting the insn. Remove the spill reg information. */
8936 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8937 {
8938 spill_reg_store[last_reload_reg + k] = 0;
8939 spill_reg_stored_to[last_reload_reg + k] = 0;
8940 }
8941
8942 /* The caller has already checked that REG dies or is set in INSN.
8943 It has also checked that we are optimizing, and thus some
8944 inaccuracies in the debugging information are acceptable.
8945 So we could just delete output_reload_insn. But in some cases
8946 we can improve the debugging information without sacrificing
8947 optimization - maybe even improving the code: See if the pseudo
8948 reg has been completely replaced with reload regs. If so, delete
8949 the store insn and forget we had a stack slot for the pseudo. */
8950 if (rld[j].out != rld[j].in
8951 && REG_N_DEATHS (REGNO (reg)) == 1
8952 && REG_N_SETS (REGNO (reg)) == 1
8953 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8954 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8955 {
8956 rtx_insn *i2;
8957
8958 /* We know that it was used only between here and the beginning of
8959 the current basic block. (We also know that the last use before
8960 INSN was the output reload we are thinking of deleting, but never
8961 mind that.) Search that range; see if any ref remains. */
8962 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8963 {
8964 rtx set = single_set (i2);
8965
8966 /* Uses which just store in the pseudo don't count,
8967 since if they are the only uses, they are dead. */
8968 if (set != 0 && SET_DEST (set) == reg)
8969 continue;
8970 if (LABEL_P (i2) || JUMP_P (i2))
8971 break;
8972 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8973 && reg_mentioned_p (reg, PATTERN (i2)))
8974 {
8975 /* Some other ref remains; just delete the output reload we
8976 know to be dead. */
8977 delete_address_reloads (output_reload_insn, insn);
8978 delete_insn (output_reload_insn);
8979 return;
8980 }
8981 }
8982
8983 /* Delete the now-dead stores into this pseudo. Note that this
8984 loop also takes care of deleting output_reload_insn. */
8985 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8986 {
8987 rtx set = single_set (i2);
8988
8989 if (set != 0 && SET_DEST (set) == reg)
8990 {
8991 delete_address_reloads (i2, insn);
8992 delete_insn (i2);
8993 }
8994 if (LABEL_P (i2) || JUMP_P (i2))
8995 break;
8996 }
8997
8998 /* For the debugging info, say the pseudo lives in this reload reg. */
8999 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
9000 if (ira_conflicts_p)
9001 /* Inform IRA about the change. */
9002 ira_mark_allocation_change (REGNO (reg));
9003 alter_reg (REGNO (reg), -1, false);
9004 }
9005 else
9006 {
9007 delete_address_reloads (output_reload_insn, insn);
9008 delete_insn (output_reload_insn);
9009 }
9010 }
9011
9012 /* We are going to delete DEAD_INSN. Recursively delete loads of
9013 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
9014 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
9015 static void
9016 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
9017 {
9018 rtx set = single_set (dead_insn);
9019 rtx set2, dst;
9020 rtx_insn *prev, *next;
9021 if (set)
9022 {
9023 rtx dst = SET_DEST (set);
9024 if (MEM_P (dst))
9025 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
9026 }
9027 /* If we deleted the store from a reloaded post_{in,de}c expression,
9028 we can delete the matching adds. */
9029 prev = PREV_INSN (dead_insn);
9030 next = NEXT_INSN (dead_insn);
9031 if (! prev || ! next)
9032 return;
9033 set = single_set (next);
9034 set2 = single_set (prev);
9035 if (! set || ! set2
9036 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9037 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9038 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9039 return;
9040 dst = SET_DEST (set);
9041 if (! rtx_equal_p (dst, SET_DEST (set2))
9042 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9043 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9044 || (INTVAL (XEXP (SET_SRC (set), 1))
9045 != -INTVAL (XEXP (SET_SRC (set2), 1))))
9046 return;
9047 delete_related_insns (prev);
9048 delete_related_insns (next);
9049 }
9050
9051 /* Subfunction of delete_address_reloads: process registers found in X. */
9052 static void
9053 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
9054 {
9055 rtx_insn *prev, *i2;
9056 rtx set, dst;
9057 int i, j;
9058 enum rtx_code code = GET_CODE (x);
9059
9060 if (code != REG)
9061 {
9062 const char *fmt = GET_RTX_FORMAT (code);
9063 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9064 {
9065 if (fmt[i] == 'e')
9066 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9067 else if (fmt[i] == 'E')
9068 {
9069 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9070 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9071 current_insn);
9072 }
9073 }
9074 return;
9075 }
9076
9077 if (spill_reg_order[REGNO (x)] < 0)
9078 return;
9079
9080 /* Scan backwards for the insn that sets x. This might be a way back due
9081 to inheritance. */
9082 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9083 {
9084 code = GET_CODE (prev);
9085 if (code == CODE_LABEL || code == JUMP_INSN)
9086 return;
9087 if (!INSN_P (prev))
9088 continue;
9089 if (reg_set_p (x, PATTERN (prev)))
9090 break;
9091 if (reg_referenced_p (x, PATTERN (prev)))
9092 return;
9093 }
9094 if (! prev || INSN_UID (prev) < reload_first_uid)
9095 return;
9096 /* Check that PREV only sets the reload register. */
9097 set = single_set (prev);
9098 if (! set)
9099 return;
9100 dst = SET_DEST (set);
9101 if (!REG_P (dst)
9102 || ! rtx_equal_p (dst, x))
9103 return;
9104 if (! reg_set_p (dst, PATTERN (dead_insn)))
9105 {
9106 /* Check if DST was used in a later insn -
9107 it might have been inherited. */
9108 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9109 {
9110 if (LABEL_P (i2))
9111 break;
9112 if (! INSN_P (i2))
9113 continue;
9114 if (reg_referenced_p (dst, PATTERN (i2)))
9115 {
9116 /* If there is a reference to the register in the current insn,
9117 it might be loaded in a non-inherited reload. If no other
9118 reload uses it, that means the register is set before
9119 referenced. */
9120 if (i2 == current_insn)
9121 {
9122 for (j = n_reloads - 1; j >= 0; j--)
9123 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9124 || reload_override_in[j] == dst)
9125 return;
9126 for (j = n_reloads - 1; j >= 0; j--)
9127 if (rld[j].in && rld[j].reg_rtx == dst)
9128 break;
9129 if (j >= 0)
9130 break;
9131 }
9132 return;
9133 }
9134 if (JUMP_P (i2))
9135 break;
9136 /* If DST is still live at CURRENT_INSN, check if it is used for
9137 any reload. Note that even if CURRENT_INSN sets DST, we still
9138 have to check the reloads. */
9139 if (i2 == current_insn)
9140 {
9141 for (j = n_reloads - 1; j >= 0; j--)
9142 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9143 || reload_override_in[j] == dst)
9144 return;
9145 /* ??? We can't finish the loop here, because dst might be
9146 allocated to a pseudo in this block if no reload in this
9147 block needs any of the classes containing DST - see
9148 spill_hard_reg. There is no easy way to tell this, so we
9149 have to scan till the end of the basic block. */
9150 }
9151 if (reg_set_p (dst, PATTERN (i2)))
9152 break;
9153 }
9154 }
9155 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9156 reg_reloaded_contents[REGNO (dst)] = -1;
9157 delete_insn (prev);
9158 }
9159 \f
9160 /* Output reload-insns to reload VALUE into RELOADREG.
9161 VALUE is an autoincrement or autodecrement RTX whose operand
9162 is a register or memory location;
9163 so reloading involves incrementing that location.
9164 IN is either identical to VALUE, or some cheaper place to reload from.
9165
9166 INC_AMOUNT is the number to increment or decrement by (always positive).
9167 This cannot be deduced from VALUE. */
9168
9169 static void
9170 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9171 {
9172 /* REG or MEM to be copied and incremented. */
9173 rtx incloc = find_replacement (&XEXP (value, 0));
9174 /* Nonzero if increment after copying. */
9175 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9176 || GET_CODE (value) == POST_MODIFY);
9177 rtx_insn *last;
9178 rtx inc;
9179 rtx_insn *add_insn;
9180 int code;
9181 rtx real_in = in == value ? incloc : in;
9182
9183 /* No hard register is equivalent to this register after
9184 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9185 we could inc/dec that register as well (maybe even using it for
9186 the source), but I'm not sure it's worth worrying about. */
9187 if (REG_P (incloc))
9188 reg_last_reload_reg[REGNO (incloc)] = 0;
9189
9190 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9191 {
9192 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9193 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9194 }
9195 else
9196 {
9197 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9198 inc_amount = -inc_amount;
9199
9200 inc = GEN_INT (inc_amount);
9201 }
9202
9203 /* If this is post-increment, first copy the location to the reload reg. */
9204 if (post && real_in != reloadreg)
9205 emit_insn (gen_move_insn (reloadreg, real_in));
9206
9207 if (in == value)
9208 {
9209 /* See if we can directly increment INCLOC. Use a method similar to
9210 that in gen_reload. */
9211
9212 last = get_last_insn ();
9213 add_insn = emit_insn (gen_rtx_SET (incloc,
9214 gen_rtx_PLUS (GET_MODE (incloc),
9215 incloc, inc)));
9216
9217 code = recog_memoized (add_insn);
9218 if (code >= 0)
9219 {
9220 extract_insn (add_insn);
9221 if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9222 {
9223 /* If this is a pre-increment and we have incremented the value
9224 where it lives, copy the incremented value to RELOADREG to
9225 be used as an address. */
9226
9227 if (! post)
9228 emit_insn (gen_move_insn (reloadreg, incloc));
9229 return;
9230 }
9231 }
9232 delete_insns_since (last);
9233 }
9234
9235 /* If couldn't do the increment directly, must increment in RELOADREG.
9236 The way we do this depends on whether this is pre- or post-increment.
9237 For pre-increment, copy INCLOC to the reload register, increment it
9238 there, then save back. */
9239
9240 if (! post)
9241 {
9242 if (in != reloadreg)
9243 emit_insn (gen_move_insn (reloadreg, real_in));
9244 emit_insn (gen_add2_insn (reloadreg, inc));
9245 emit_insn (gen_move_insn (incloc, reloadreg));
9246 }
9247 else
9248 {
9249 /* Postincrement.
9250 Because this might be a jump insn or a compare, and because RELOADREG
9251 may not be available after the insn in an input reload, we must do
9252 the incrementation before the insn being reloaded for.
9253
9254 We have already copied IN to RELOADREG. Increment the copy in
9255 RELOADREG, save that back, then decrement RELOADREG so it has
9256 the original value. */
9257
9258 emit_insn (gen_add2_insn (reloadreg, inc));
9259 emit_insn (gen_move_insn (incloc, reloadreg));
9260 if (CONST_INT_P (inc))
9261 emit_insn (gen_add2_insn (reloadreg,
9262 gen_int_mode (-INTVAL (inc),
9263 GET_MODE (reloadreg))));
9264 else
9265 emit_insn (gen_sub2_insn (reloadreg, inc));
9266 }
9267 }
9268 \f
9269 #if AUTO_INC_DEC
9270 static void
9271 add_auto_inc_notes (rtx_insn *insn, rtx x)
9272 {
9273 enum rtx_code code = GET_CODE (x);
9274 const char *fmt;
9275 int i, j;
9276
9277 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9278 {
9279 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9280 return;
9281 }
9282
9283 /* Scan all the operand sub-expressions. */
9284 fmt = GET_RTX_FORMAT (code);
9285 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9286 {
9287 if (fmt[i] == 'e')
9288 add_auto_inc_notes (insn, XEXP (x, i));
9289 else if (fmt[i] == 'E')
9290 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9291 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9292 }
9293 }
9294 #endif