]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload1.c
2014-11-01 Andrew MacLeod <amacleod@redhat,com>
[thirdparty/gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl-error.h"
28 #include "tm_p.h"
29 #include "obstack.h"
30 #include "insn-config.h"
31 #include "ggc.h"
32 #include "flags.h"
33 #include "hashtab.h"
34 #include "hash-set.h"
35 #include "vec.h"
36 #include "input.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "insn-codes.h"
40 #include "optabs.h"
41 #include "regs.h"
42 #include "addresses.h"
43 #include "predict.h"
44 #include "dominance.h"
45 #include "cfg.h"
46 #include "cfgrtl.h"
47 #include "cfgbuild.h"
48 #include "basic-block.h"
49 #include "df.h"
50 #include "reload.h"
51 #include "recog.h"
52 #include "except.h"
53 #include "tree.h"
54 #include "ira.h"
55 #include "target.h"
56 #include "emit-rtl.h"
57 #include "dumpfile.h"
58 #include "rtl-iter.h"
59
60 /* This file contains the reload pass of the compiler, which is
61 run after register allocation has been done. It checks that
62 each insn is valid (operands required to be in registers really
63 are in registers of the proper class) and fixes up invalid ones
64 by copying values temporarily into registers for the insns
65 that need them.
66
67 The results of register allocation are described by the vector
68 reg_renumber; the insns still contain pseudo regs, but reg_renumber
69 can be used to find which hard reg, if any, a pseudo reg is in.
70
71 The technique we always use is to free up a few hard regs that are
72 called ``reload regs'', and for each place where a pseudo reg
73 must be in a hard reg, copy it temporarily into one of the reload regs.
74
75 Reload regs are allocated locally for every instruction that needs
76 reloads. When there are pseudos which are allocated to a register that
77 has been chosen as a reload reg, such pseudos must be ``spilled''.
78 This means that they go to other hard regs, or to stack slots if no other
79 available hard regs can be found. Spilling can invalidate more
80 insns, requiring additional need for reloads, so we must keep checking
81 until the process stabilizes.
82
83 For machines with different classes of registers, we must keep track
84 of the register class needed for each reload, and make sure that
85 we allocate enough reload registers of each class.
86
87 The file reload.c contains the code that checks one insn for
88 validity and reports the reloads that it needs. This file
89 is in charge of scanning the entire rtl code, accumulating the
90 reload needs, spilling, assigning reload registers to use for
91 fixing up each insn, and generating the new insns to copy values
92 into the reload registers. */
93 \f
94 struct target_reload default_target_reload;
95 #if SWITCHABLE_TARGET
96 struct target_reload *this_target_reload = &default_target_reload;
97 #endif
98
99 #define spill_indirect_levels \
100 (this_target_reload->x_spill_indirect_levels)
101
102 /* During reload_as_needed, element N contains a REG rtx for the hard reg
103 into which reg N has been reloaded (perhaps for a previous insn). */
104 static rtx *reg_last_reload_reg;
105
106 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
107 for an output reload that stores into reg N. */
108 static regset_head reg_has_output_reload;
109
110 /* Indicates which hard regs are reload-registers for an output reload
111 in the current insn. */
112 static HARD_REG_SET reg_is_output_reload;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static unsigned int *reg_max_ref_width;
116
117 /* Vector to remember old contents of reg_renumber before spilling. */
118 static short *reg_old_renumber;
119
120 /* During reload_as_needed, element N contains the last pseudo regno reloaded
121 into hard register N. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 hard register N was last used. Its contents are significant only
128 when reg_reloaded_valid is set for this register. */
129 static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
130
131 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
132 static HARD_REG_SET reg_reloaded_valid;
133 /* Indicate if the register was dead at the end of the reload.
134 This is only valid if reg_reloaded_contents is set and valid. */
135 static HARD_REG_SET reg_reloaded_dead;
136
137 /* Indicate whether the register's current value is one that is not
138 safe to retain across a call, even for registers that are normally
139 call-saved. This is only meaningful for members of reg_reloaded_valid. */
140 static HARD_REG_SET reg_reloaded_call_part_clobbered;
141
142 /* Number of spill-regs so far; number of valid elements of spill_regs. */
143 static int n_spills;
144
145 /* In parallel with spill_regs, contains REG rtx's for those regs.
146 Holds the last rtx used for any given reg, or 0 if it has never
147 been used for spilling yet. This rtx is reused, provided it has
148 the proper mode. */
149 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
150
151 /* In parallel with spill_regs, contains nonzero for a spill reg
152 that was stored after the last time it was used.
153 The precise value is the insn generated to do the store. */
154 static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER];
155
156 /* This is the register that was stored with spill_reg_store. This is a
157 copy of reload_out / reload_out_reg when the value was stored; if
158 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
159 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
160
161 /* This table is the inverse mapping of spill_regs:
162 indexed by hard reg number,
163 it contains the position of that reg in spill_regs,
164 or -1 for something that is not in spill_regs.
165
166 ?!? This is no longer accurate. */
167 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
168
169 /* This reg set indicates registers that can't be used as spill registers for
170 the currently processed insn. These are the hard registers which are live
171 during the insn, but not allocated to pseudos, as well as fixed
172 registers. */
173 static HARD_REG_SET bad_spill_regs;
174
175 /* These are the hard registers that can't be used as spill register for any
176 insn. This includes registers used for user variables and registers that
177 we can't eliminate. A register that appears in this set also can't be used
178 to retry register allocation. */
179 static HARD_REG_SET bad_spill_regs_global;
180
181 /* Describes order of use of registers for reloading
182 of spilled pseudo-registers. `n_spills' is the number of
183 elements that are actually valid; new ones are added at the end.
184
185 Both spill_regs and spill_reg_order are used on two occasions:
186 once during find_reload_regs, where they keep track of the spill registers
187 for a single insn, but also during reload_as_needed where they show all
188 the registers ever used by reload. For the latter case, the information
189 is calculated during finish_spills. */
190 static short spill_regs[FIRST_PSEUDO_REGISTER];
191
192 /* This vector of reg sets indicates, for each pseudo, which hard registers
193 may not be used for retrying global allocation because the register was
194 formerly spilled from one of them. If we allowed reallocating a pseudo to
195 a register that it was already allocated to, reload might not
196 terminate. */
197 static HARD_REG_SET *pseudo_previous_regs;
198
199 /* This vector of reg sets indicates, for each pseudo, which hard
200 registers may not be used for retrying global allocation because they
201 are used as spill registers during one of the insns in which the
202 pseudo is live. */
203 static HARD_REG_SET *pseudo_forbidden_regs;
204
205 /* All hard regs that have been used as spill registers for any insn are
206 marked in this set. */
207 static HARD_REG_SET used_spill_regs;
208
209 /* Index of last register assigned as a spill register. We allocate in
210 a round-robin fashion. */
211 static int last_spill_reg;
212
213 /* Record the stack slot for each spilled hard register. */
214 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
215
216 /* Width allocated so far for that stack slot. */
217 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
218
219 /* Record which pseudos needed to be spilled. */
220 static regset_head spilled_pseudos;
221
222 /* Record which pseudos changed their allocation in finish_spills. */
223 static regset_head changed_allocation_pseudos;
224
225 /* Used for communication between order_regs_for_reload and count_pseudo.
226 Used to avoid counting one pseudo twice. */
227 static regset_head pseudos_counted;
228
229 /* First uid used by insns created by reload in this function.
230 Used in find_equiv_reg. */
231 int reload_first_uid;
232
233 /* Flag set by local-alloc or global-alloc if anything is live in
234 a call-clobbered reg across calls. */
235 int caller_save_needed;
236
237 /* Set to 1 while reload_as_needed is operating.
238 Required by some machines to handle any generated moves differently. */
239 int reload_in_progress = 0;
240
241 /* This obstack is used for allocation of rtl during register elimination.
242 The allocated storage can be freed once find_reloads has processed the
243 insn. */
244 static struct obstack reload_obstack;
245
246 /* Points to the beginning of the reload_obstack. All insn_chain structures
247 are allocated first. */
248 static char *reload_startobj;
249
250 /* The point after all insn_chain structures. Used to quickly deallocate
251 memory allocated in copy_reloads during calculate_needs_all_insns. */
252 static char *reload_firstobj;
253
254 /* This points before all local rtl generated by register elimination.
255 Used to quickly free all memory after processing one insn. */
256 static char *reload_insn_firstobj;
257
258 /* List of insn_chain instructions, one for every insn that reload needs to
259 examine. */
260 struct insn_chain *reload_insn_chain;
261
262 /* TRUE if we potentially left dead insns in the insn stream and want to
263 run DCE immediately after reload, FALSE otherwise. */
264 static bool need_dce;
265
266 /* List of all insns needing reloads. */
267 static struct insn_chain *insns_need_reload;
268 \f
269 /* This structure is used to record information about register eliminations.
270 Each array entry describes one possible way of eliminating a register
271 in favor of another. If there is more than one way of eliminating a
272 particular register, the most preferred should be specified first. */
273
274 struct elim_table
275 {
276 int from; /* Register number to be eliminated. */
277 int to; /* Register number used as replacement. */
278 HOST_WIDE_INT initial_offset; /* Initial difference between values. */
279 int can_eliminate; /* Nonzero if this elimination can be done. */
280 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
281 target hook in previous scan over insns
282 made by reload. */
283 HOST_WIDE_INT offset; /* Current offset between the two regs. */
284 HOST_WIDE_INT previous_offset;/* Offset at end of previous insn. */
285 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
286 rtx from_rtx; /* REG rtx for the register to be eliminated.
287 We cannot simply compare the number since
288 we might then spuriously replace a hard
289 register corresponding to a pseudo
290 assigned to the reg to be eliminated. */
291 rtx to_rtx; /* REG rtx for the replacement. */
292 };
293
294 static struct elim_table *reg_eliminate = 0;
295
296 /* This is an intermediate structure to initialize the table. It has
297 exactly the members provided by ELIMINABLE_REGS. */
298 static const struct elim_table_1
299 {
300 const int from;
301 const int to;
302 } reg_eliminate_1[] =
303
304 /* If a set of eliminable registers was specified, define the table from it.
305 Otherwise, default to the normal case of the frame pointer being
306 replaced by the stack pointer. */
307
308 #ifdef ELIMINABLE_REGS
309 ELIMINABLE_REGS;
310 #else
311 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
312 #endif
313
314 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
315
316 /* Record the number of pending eliminations that have an offset not equal
317 to their initial offset. If nonzero, we use a new copy of each
318 replacement result in any insns encountered. */
319 int num_not_at_initial_offset;
320
321 /* Count the number of registers that we may be able to eliminate. */
322 static int num_eliminable;
323 /* And the number of registers that are equivalent to a constant that
324 can be eliminated to frame_pointer / arg_pointer + constant. */
325 static int num_eliminable_invariants;
326
327 /* For each label, we record the offset of each elimination. If we reach
328 a label by more than one path and an offset differs, we cannot do the
329 elimination. This information is indexed by the difference of the
330 number of the label and the first label number. We can't offset the
331 pointer itself as this can cause problems on machines with segmented
332 memory. The first table is an array of flags that records whether we
333 have yet encountered a label and the second table is an array of arrays,
334 one entry in the latter array for each elimination. */
335
336 static int first_label_num;
337 static char *offsets_known_at;
338 static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
339
340 vec<reg_equivs_t, va_gc> *reg_equivs;
341
342 /* Stack of addresses where an rtx has been changed. We can undo the
343 changes by popping items off the stack and restoring the original
344 value at each location.
345
346 We use this simplistic undo capability rather than copy_rtx as copy_rtx
347 will not make a deep copy of a normally sharable rtx, such as
348 (const (plus (symbol_ref) (const_int))). If such an expression appears
349 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
350 rtx expression would be changed. See PR 42431. */
351
352 typedef rtx *rtx_p;
353 static vec<rtx_p> substitute_stack;
354
355 /* Number of labels in the current function. */
356
357 static int num_labels;
358 \f
359 static void replace_pseudos_in (rtx *, machine_mode, rtx);
360 static void maybe_fix_stack_asms (void);
361 static void copy_reloads (struct insn_chain *);
362 static void calculate_needs_all_insns (int);
363 static int find_reg (struct insn_chain *, int);
364 static void find_reload_regs (struct insn_chain *);
365 static void select_reload_regs (void);
366 static void delete_caller_save_insns (void);
367
368 static void spill_failure (rtx_insn *, enum reg_class);
369 static void count_spilled_pseudo (int, int, int);
370 static void delete_dead_insn (rtx_insn *);
371 static void alter_reg (int, int, bool);
372 static void set_label_offsets (rtx, rtx_insn *, int);
373 static void check_eliminable_occurrences (rtx);
374 static void elimination_effects (rtx, machine_mode);
375 static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
376 static int eliminate_regs_in_insn (rtx_insn *, int);
377 static void update_eliminable_offsets (void);
378 static void mark_not_eliminable (rtx, const_rtx, void *);
379 static void set_initial_elim_offsets (void);
380 static bool verify_initial_elim_offsets (void);
381 static void set_initial_label_offsets (void);
382 static void set_offsets_for_label (rtx_insn *);
383 static void init_eliminable_invariants (rtx_insn *, bool);
384 static void init_elim_table (void);
385 static void free_reg_equiv (void);
386 static void update_eliminables (HARD_REG_SET *);
387 static bool update_eliminables_and_spill (void);
388 static void elimination_costs_in_insn (rtx_insn *);
389 static void spill_hard_reg (unsigned int, int);
390 static int finish_spills (int);
391 static void scan_paradoxical_subregs (rtx);
392 static void count_pseudo (int);
393 static void order_regs_for_reload (struct insn_chain *);
394 static void reload_as_needed (int);
395 static void forget_old_reloads_1 (rtx, const_rtx, void *);
396 static void forget_marked_reloads (regset);
397 static int reload_reg_class_lower (const void *, const void *);
398 static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
399 machine_mode);
400 static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
401 machine_mode);
402 static int reload_reg_free_p (unsigned int, int, enum reload_type);
403 static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
404 rtx, rtx, int, int);
405 static int free_for_value_p (int, machine_mode, int, enum reload_type,
406 rtx, rtx, int, int);
407 static int allocate_reload_reg (struct insn_chain *, int, int);
408 static int conflicts_with_override (rtx);
409 static void failed_reload (rtx_insn *, int);
410 static int set_reload_reg (int, int);
411 static void choose_reload_regs_init (struct insn_chain *, rtx *);
412 static void choose_reload_regs (struct insn_chain *);
413 static void emit_input_reload_insns (struct insn_chain *, struct reload *,
414 rtx, int);
415 static void emit_output_reload_insns (struct insn_chain *, struct reload *,
416 int);
417 static void do_input_reload (struct insn_chain *, struct reload *, int);
418 static void do_output_reload (struct insn_chain *, struct reload *, int);
419 static void emit_reload_insns (struct insn_chain *);
420 static void delete_output_reload (rtx_insn *, int, int, rtx);
421 static void delete_address_reloads (rtx_insn *, rtx_insn *);
422 static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
423 static void inc_for_reload (rtx, rtx, rtx, int);
424 #ifdef AUTO_INC_DEC
425 static void add_auto_inc_notes (rtx_insn *, rtx);
426 #endif
427 static void substitute (rtx *, const_rtx, rtx);
428 static bool gen_reload_chain_without_interm_reg_p (int, int);
429 static int reloads_conflict (int, int);
430 static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
431 static rtx_insn *emit_insn_if_valid_for_reload (rtx);
432 \f
433 /* Initialize the reload pass. This is called at the beginning of compilation
434 and may be called again if the target is reinitialized. */
435
436 void
437 init_reload (void)
438 {
439 int i;
440
441 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
442 Set spill_indirect_levels to the number of levels such addressing is
443 permitted, zero if it is not permitted at all. */
444
445 rtx tem
446 = gen_rtx_MEM (Pmode,
447 gen_rtx_PLUS (Pmode,
448 gen_rtx_REG (Pmode,
449 LAST_VIRTUAL_REGISTER + 1),
450 gen_int_mode (4, Pmode)));
451 spill_indirect_levels = 0;
452
453 while (memory_address_p (QImode, tem))
454 {
455 spill_indirect_levels++;
456 tem = gen_rtx_MEM (Pmode, tem);
457 }
458
459 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
460
461 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
462 indirect_symref_ok = memory_address_p (QImode, tem);
463
464 /* See if reg+reg is a valid (and offsettable) address. */
465
466 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
467 {
468 tem = gen_rtx_PLUS (Pmode,
469 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
470 gen_rtx_REG (Pmode, i));
471
472 /* This way, we make sure that reg+reg is an offsettable address. */
473 tem = plus_constant (Pmode, tem, 4);
474
475 if (memory_address_p (QImode, tem))
476 {
477 double_reg_address_ok = 1;
478 break;
479 }
480 }
481
482 /* Initialize obstack for our rtl allocation. */
483 if (reload_startobj == NULL)
484 {
485 gcc_obstack_init (&reload_obstack);
486 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0);
487 }
488
489 INIT_REG_SET (&spilled_pseudos);
490 INIT_REG_SET (&changed_allocation_pseudos);
491 INIT_REG_SET (&pseudos_counted);
492 }
493
494 /* List of insn chains that are currently unused. */
495 static struct insn_chain *unused_insn_chains = 0;
496
497 /* Allocate an empty insn_chain structure. */
498 struct insn_chain *
499 new_insn_chain (void)
500 {
501 struct insn_chain *c;
502
503 if (unused_insn_chains == 0)
504 {
505 c = XOBNEW (&reload_obstack, struct insn_chain);
506 INIT_REG_SET (&c->live_throughout);
507 INIT_REG_SET (&c->dead_or_set);
508 }
509 else
510 {
511 c = unused_insn_chains;
512 unused_insn_chains = c->next;
513 }
514 c->is_caller_save_insn = 0;
515 c->need_operand_change = 0;
516 c->need_reload = 0;
517 c->need_elim = 0;
518 return c;
519 }
520
521 /* Small utility function to set all regs in hard reg set TO which are
522 allocated to pseudos in regset FROM. */
523
524 void
525 compute_use_by_pseudos (HARD_REG_SET *to, regset from)
526 {
527 unsigned int regno;
528 reg_set_iterator rsi;
529
530 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)
531 {
532 int r = reg_renumber[regno];
533
534 if (r < 0)
535 {
536 /* reload_combine uses the information from DF_LIVE_IN,
537 which might still contain registers that have not
538 actually been allocated since they have an
539 equivalence. */
540 gcc_assert (ira_conflicts_p || reload_completed);
541 }
542 else
543 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno), r);
544 }
545 }
546
547 /* Replace all pseudos found in LOC with their corresponding
548 equivalences. */
549
550 static void
551 replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
552 {
553 rtx x = *loc;
554 enum rtx_code code;
555 const char *fmt;
556 int i, j;
557
558 if (! x)
559 return;
560
561 code = GET_CODE (x);
562 if (code == REG)
563 {
564 unsigned int regno = REGNO (x);
565
566 if (regno < FIRST_PSEUDO_REGISTER)
567 return;
568
569 x = eliminate_regs_1 (x, mem_mode, usage, true, false);
570 if (x != *loc)
571 {
572 *loc = x;
573 replace_pseudos_in (loc, mem_mode, usage);
574 return;
575 }
576
577 if (reg_equiv_constant (regno))
578 *loc = reg_equiv_constant (regno);
579 else if (reg_equiv_invariant (regno))
580 *loc = reg_equiv_invariant (regno);
581 else if (reg_equiv_mem (regno))
582 *loc = reg_equiv_mem (regno);
583 else if (reg_equiv_address (regno))
584 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address (regno));
585 else
586 {
587 gcc_assert (!REG_P (regno_reg_rtx[regno])
588 || REGNO (regno_reg_rtx[regno]) != regno);
589 *loc = regno_reg_rtx[regno];
590 }
591
592 return;
593 }
594 else if (code == MEM)
595 {
596 replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
597 return;
598 }
599
600 /* Process each of our operands recursively. */
601 fmt = GET_RTX_FORMAT (code);
602 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
603 if (*fmt == 'e')
604 replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
605 else if (*fmt == 'E')
606 for (j = 0; j < XVECLEN (x, i); j++)
607 replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
608 }
609
610 /* Determine if the current function has an exception receiver block
611 that reaches the exit block via non-exceptional edges */
612
613 static bool
614 has_nonexceptional_receiver (void)
615 {
616 edge e;
617 edge_iterator ei;
618 basic_block *tos, *worklist, bb;
619
620 /* If we're not optimizing, then just err on the safe side. */
621 if (!optimize)
622 return true;
623
624 /* First determine which blocks can reach exit via normal paths. */
625 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
626
627 FOR_EACH_BB_FN (bb, cfun)
628 bb->flags &= ~BB_REACHABLE;
629
630 /* Place the exit block on our worklist. */
631 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
632 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
633
634 /* Iterate: find everything reachable from what we've already seen. */
635 while (tos != worklist)
636 {
637 bb = *--tos;
638
639 FOR_EACH_EDGE (e, ei, bb->preds)
640 if (!(e->flags & EDGE_ABNORMAL))
641 {
642 basic_block src = e->src;
643
644 if (!(src->flags & BB_REACHABLE))
645 {
646 src->flags |= BB_REACHABLE;
647 *tos++ = src;
648 }
649 }
650 }
651 free (worklist);
652
653 /* Now see if there's a reachable block with an exceptional incoming
654 edge. */
655 FOR_EACH_BB_FN (bb, cfun)
656 if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
657 return true;
658
659 /* No exceptional block reached exit unexceptionally. */
660 return false;
661 }
662
663 /* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
664 zero elements) to MAX_REG_NUM elements.
665
666 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
667 void
668 grow_reg_equivs (void)
669 {
670 int old_size = vec_safe_length (reg_equivs);
671 int max_regno = max_reg_num ();
672 int i;
673 reg_equivs_t ze;
674
675 memset (&ze, 0, sizeof (reg_equivs_t));
676 vec_safe_reserve (reg_equivs, max_regno);
677 for (i = old_size; i < max_regno; i++)
678 reg_equivs->quick_insert (i, ze);
679 }
680
681 \f
682 /* Global variables used by reload and its subroutines. */
683
684 /* The current basic block while in calculate_elim_costs_all_insns. */
685 static basic_block elim_bb;
686
687 /* Set during calculate_needs if an insn needs register elimination. */
688 static int something_needs_elimination;
689 /* Set during calculate_needs if an insn needs an operand changed. */
690 static int something_needs_operands_changed;
691 /* Set by alter_regs if we spilled a register to the stack. */
692 static bool something_was_spilled;
693
694 /* Nonzero means we couldn't get enough spill regs. */
695 static int failure;
696
697 /* Temporary array of pseudo-register number. */
698 static int *temp_pseudo_reg_arr;
699
700 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
701 If that insn didn't set the register (i.e., it copied the register to
702 memory), just delete that insn instead of the equivalencing insn plus
703 anything now dead. If we call delete_dead_insn on that insn, we may
704 delete the insn that actually sets the register if the register dies
705 there and that is incorrect. */
706 static void
707 remove_init_insns ()
708 {
709 for (int i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
710 {
711 if (reg_renumber[i] < 0 && reg_equiv_init (i) != 0)
712 {
713 rtx list;
714 for (list = reg_equiv_init (i); list; list = XEXP (list, 1))
715 {
716 rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0));
717
718 /* If we already deleted the insn or if it may trap, we can't
719 delete it. The latter case shouldn't happen, but can
720 if an insn has a variable address, gets a REG_EH_REGION
721 note added to it, and then gets converted into a load
722 from a constant address. */
723 if (NOTE_P (equiv_insn)
724 || can_throw_internal (equiv_insn))
725 ;
726 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
727 delete_dead_insn (equiv_insn);
728 else
729 SET_INSN_DELETED (equiv_insn);
730 }
731 }
732 }
733 }
734
735 /* Return true if remove_init_insns will delete INSN. */
736 static bool
737 will_delete_init_insn_p (rtx_insn *insn)
738 {
739 rtx set = single_set (insn);
740 if (!set || !REG_P (SET_DEST (set)))
741 return false;
742 unsigned regno = REGNO (SET_DEST (set));
743
744 if (can_throw_internal (insn))
745 return false;
746
747 if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
748 return false;
749
750 for (rtx list = reg_equiv_init (regno); list; list = XEXP (list, 1))
751 {
752 rtx equiv_insn = XEXP (list, 0);
753 if (equiv_insn == insn)
754 return true;
755 }
756 return false;
757 }
758
759 /* Main entry point for the reload pass.
760
761 FIRST is the first insn of the function being compiled.
762
763 GLOBAL nonzero means we were called from global_alloc
764 and should attempt to reallocate any pseudoregs that we
765 displace from hard regs we will use for reloads.
766 If GLOBAL is zero, we do not have enough information to do that,
767 so any pseudo reg that is spilled must go to the stack.
768
769 Return value is TRUE if reload likely left dead insns in the
770 stream and a DCE pass should be run to elimiante them. Else the
771 return value is FALSE. */
772
773 bool
774 reload (rtx_insn *first, int global)
775 {
776 int i, n;
777 rtx_insn *insn;
778 struct elim_table *ep;
779 basic_block bb;
780 bool inserted;
781
782 /* Make sure even insns with volatile mem refs are recognizable. */
783 init_recog ();
784
785 failure = 0;
786
787 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
788
789 /* Make sure that the last insn in the chain
790 is not something that needs reloading. */
791 emit_note (NOTE_INSN_DELETED);
792
793 /* Enable find_equiv_reg to distinguish insns made by reload. */
794 reload_first_uid = get_max_uid ();
795
796 #ifdef SECONDARY_MEMORY_NEEDED
797 /* Initialize the secondary memory table. */
798 clear_secondary_mem ();
799 #endif
800
801 /* We don't have a stack slot for any spill reg yet. */
802 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
803 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
804
805 /* Initialize the save area information for caller-save, in case some
806 are needed. */
807 init_save_areas ();
808
809 /* Compute which hard registers are now in use
810 as homes for pseudo registers.
811 This is done here rather than (eg) in global_alloc
812 because this point is reached even if not optimizing. */
813 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
814 mark_home_live (i);
815
816 /* A function that has a nonlocal label that can reach the exit
817 block via non-exceptional paths must save all call-saved
818 registers. */
819 if (cfun->has_nonlocal_label
820 && has_nonexceptional_receiver ())
821 crtl->saves_all_registers = 1;
822
823 if (crtl->saves_all_registers)
824 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
825 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
826 df_set_regs_ever_live (i, true);
827
828 /* Find all the pseudo registers that didn't get hard regs
829 but do have known equivalent constants or memory slots.
830 These include parameters (known equivalent to parameter slots)
831 and cse'd or loop-moved constant memory addresses.
832
833 Record constant equivalents in reg_equiv_constant
834 so they will be substituted by find_reloads.
835 Record memory equivalents in reg_mem_equiv so they can
836 be substituted eventually by altering the REG-rtx's. */
837
838 grow_reg_equivs ();
839 reg_old_renumber = XCNEWVEC (short, max_regno);
840 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
841 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno);
842 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno);
843
844 CLEAR_HARD_REG_SET (bad_spill_regs_global);
845
846 init_eliminable_invariants (first, true);
847 init_elim_table ();
848
849 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
850 stack slots to the pseudos that lack hard regs or equivalents.
851 Do not touch virtual registers. */
852
853 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1);
854 for (n = 0, i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
855 temp_pseudo_reg_arr[n++] = i;
856
857 if (ira_conflicts_p)
858 /* Ask IRA to order pseudo-registers for better stack slot
859 sharing. */
860 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_width);
861
862 for (i = 0; i < n; i++)
863 alter_reg (temp_pseudo_reg_arr[i], -1, false);
864
865 /* If we have some registers we think can be eliminated, scan all insns to
866 see if there is an insn that sets one of these registers to something
867 other than itself plus a constant. If so, the register cannot be
868 eliminated. Doing this scan here eliminates an extra pass through the
869 main reload loop in the most common case where register elimination
870 cannot be done. */
871 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
872 if (INSN_P (insn))
873 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
874
875 maybe_fix_stack_asms ();
876
877 insns_need_reload = 0;
878 something_needs_elimination = 0;
879
880 /* Initialize to -1, which means take the first spill register. */
881 last_spill_reg = -1;
882
883 /* Spill any hard regs that we know we can't eliminate. */
884 CLEAR_HARD_REG_SET (used_spill_regs);
885 /* There can be multiple ways to eliminate a register;
886 they should be listed adjacently.
887 Elimination for any register fails only if all possible ways fail. */
888 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; )
889 {
890 int from = ep->from;
891 int can_eliminate = 0;
892 do
893 {
894 can_eliminate |= ep->can_eliminate;
895 ep++;
896 }
897 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS] && ep->from == from);
898 if (! can_eliminate)
899 spill_hard_reg (from, 1);
900 }
901
902 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
903 if (frame_pointer_needed)
904 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
905 #endif
906 finish_spills (global);
907
908 /* From now on, we may need to generate moves differently. We may also
909 allow modifications of insns which cause them to not be recognized.
910 Any such modifications will be cleaned up during reload itself. */
911 reload_in_progress = 1;
912
913 /* This loop scans the entire function each go-round
914 and repeats until one repetition spills no additional hard regs. */
915 for (;;)
916 {
917 int something_changed;
918 int did_spill;
919 HOST_WIDE_INT starting_frame_size;
920
921 starting_frame_size = get_frame_size ();
922 something_was_spilled = false;
923
924 set_initial_elim_offsets ();
925 set_initial_label_offsets ();
926
927 /* For each pseudo register that has an equivalent location defined,
928 try to eliminate any eliminable registers (such as the frame pointer)
929 assuming initial offsets for the replacement register, which
930 is the normal case.
931
932 If the resulting location is directly addressable, substitute
933 the MEM we just got directly for the old REG.
934
935 If it is not addressable but is a constant or the sum of a hard reg
936 and constant, it is probably not addressable because the constant is
937 out of range, in that case record the address; we will generate
938 hairy code to compute the address in a register each time it is
939 needed. Similarly if it is a hard register, but one that is not
940 valid as an address register.
941
942 If the location is not addressable, but does not have one of the
943 above forms, assign a stack slot. We have to do this to avoid the
944 potential of producing lots of reloads if, e.g., a location involves
945 a pseudo that didn't get a hard register and has an equivalent memory
946 location that also involves a pseudo that didn't get a hard register.
947
948 Perhaps at some point we will improve reload_when_needed handling
949 so this problem goes away. But that's very hairy. */
950
951 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
952 if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i))
953 {
954 rtx x = eliminate_regs (reg_equiv_memory_loc (i), VOIDmode,
955 NULL_RTX);
956
957 if (strict_memory_address_addr_space_p
958 (GET_MODE (regno_reg_rtx[i]), XEXP (x, 0),
959 MEM_ADDR_SPACE (x)))
960 reg_equiv_mem (i) = x, reg_equiv_address (i) = 0;
961 else if (CONSTANT_P (XEXP (x, 0))
962 || (REG_P (XEXP (x, 0))
963 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
964 || (GET_CODE (XEXP (x, 0)) == PLUS
965 && REG_P (XEXP (XEXP (x, 0), 0))
966 && (REGNO (XEXP (XEXP (x, 0), 0))
967 < FIRST_PSEUDO_REGISTER)
968 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
969 reg_equiv_address (i) = XEXP (x, 0), reg_equiv_mem (i) = 0;
970 else
971 {
972 /* Make a new stack slot. Then indicate that something
973 changed so we go back and recompute offsets for
974 eliminable registers because the allocation of memory
975 below might change some offset. reg_equiv_{mem,address}
976 will be set up for this pseudo on the next pass around
977 the loop. */
978 reg_equiv_memory_loc (i) = 0;
979 reg_equiv_init (i) = 0;
980 alter_reg (i, -1, true);
981 }
982 }
983
984 if (caller_save_needed)
985 setup_save_areas ();
986
987 if (starting_frame_size && crtl->stack_alignment_needed)
988 {
989 /* If we have a stack frame, we must align it now. The
990 stack size may be a part of the offset computation for
991 register elimination. So if this changes the stack size,
992 then repeat the elimination bookkeeping. We don't
993 realign when there is no stack, as that will cause a
994 stack frame when none is needed should
995 STARTING_FRAME_OFFSET not be already aligned to
996 STACK_BOUNDARY. */
997 assign_stack_local (BLKmode, 0, crtl->stack_alignment_needed);
998 }
999 /* If we allocated another stack slot, redo elimination bookkeeping. */
1000 if (something_was_spilled || starting_frame_size != get_frame_size ())
1001 {
1002 update_eliminables_and_spill ();
1003 continue;
1004 }
1005
1006 if (caller_save_needed)
1007 {
1008 save_call_clobbered_regs ();
1009 /* That might have allocated new insn_chain structures. */
1010 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1011 }
1012
1013 calculate_needs_all_insns (global);
1014
1015 if (! ira_conflicts_p)
1016 /* Don't do it for IRA. We need this info because we don't
1017 change live_throughout and dead_or_set for chains when IRA
1018 is used. */
1019 CLEAR_REG_SET (&spilled_pseudos);
1020
1021 did_spill = 0;
1022
1023 something_changed = 0;
1024
1025 /* If we allocated any new memory locations, make another pass
1026 since it might have changed elimination offsets. */
1027 if (something_was_spilled || starting_frame_size != get_frame_size ())
1028 something_changed = 1;
1029
1030 /* Even if the frame size remained the same, we might still have
1031 changed elimination offsets, e.g. if find_reloads called
1032 force_const_mem requiring the back end to allocate a constant
1033 pool base register that needs to be saved on the stack. */
1034 else if (!verify_initial_elim_offsets ())
1035 something_changed = 1;
1036
1037 if (update_eliminables_and_spill ())
1038 {
1039 did_spill = 1;
1040 something_changed = 1;
1041 }
1042
1043 select_reload_regs ();
1044 if (failure)
1045 goto failed;
1046
1047 if (insns_need_reload != 0 || did_spill)
1048 something_changed |= finish_spills (global);
1049
1050 if (! something_changed)
1051 break;
1052
1053 if (caller_save_needed)
1054 delete_caller_save_insns ();
1055
1056 obstack_free (&reload_obstack, reload_firstobj);
1057 }
1058
1059 /* If global-alloc was run, notify it of any register eliminations we have
1060 done. */
1061 if (global)
1062 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1063 if (ep->can_eliminate)
1064 mark_elimination (ep->from, ep->to);
1065
1066 remove_init_insns ();
1067
1068 /* Use the reload registers where necessary
1069 by generating move instructions to move the must-be-register
1070 values into or out of the reload registers. */
1071
1072 if (insns_need_reload != 0 || something_needs_elimination
1073 || something_needs_operands_changed)
1074 {
1075 HOST_WIDE_INT old_frame_size = get_frame_size ();
1076
1077 reload_as_needed (global);
1078
1079 gcc_assert (old_frame_size == get_frame_size ());
1080
1081 gcc_assert (verify_initial_elim_offsets ());
1082 }
1083
1084 /* If we were able to eliminate the frame pointer, show that it is no
1085 longer live at the start of any basic block. If it ls live by
1086 virtue of being in a pseudo, that pseudo will be marked live
1087 and hence the frame pointer will be known to be live via that
1088 pseudo. */
1089
1090 if (! frame_pointer_needed)
1091 FOR_EACH_BB_FN (bb, cfun)
1092 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM);
1093
1094 /* Come here (with failure set nonzero) if we can't get enough spill
1095 regs. */
1096 failed:
1097
1098 CLEAR_REG_SET (&changed_allocation_pseudos);
1099 CLEAR_REG_SET (&spilled_pseudos);
1100 reload_in_progress = 0;
1101
1102 /* Now eliminate all pseudo regs by modifying them into
1103 their equivalent memory references.
1104 The REG-rtx's for the pseudos are modified in place,
1105 so all insns that used to refer to them now refer to memory.
1106
1107 For a reg that has a reg_equiv_address, all those insns
1108 were changed by reloading so that no insns refer to it any longer;
1109 but the DECL_RTL of a variable decl may refer to it,
1110 and if so this causes the debugging info to mention the variable. */
1111
1112 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1113 {
1114 rtx addr = 0;
1115
1116 if (reg_equiv_mem (i))
1117 addr = XEXP (reg_equiv_mem (i), 0);
1118
1119 if (reg_equiv_address (i))
1120 addr = reg_equiv_address (i);
1121
1122 if (addr)
1123 {
1124 if (reg_renumber[i] < 0)
1125 {
1126 rtx reg = regno_reg_rtx[i];
1127
1128 REG_USERVAR_P (reg) = 0;
1129 PUT_CODE (reg, MEM);
1130 XEXP (reg, 0) = addr;
1131 if (reg_equiv_memory_loc (i))
1132 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i));
1133 else
1134 MEM_ATTRS (reg) = 0;
1135 MEM_NOTRAP_P (reg) = 1;
1136 }
1137 else if (reg_equiv_mem (i))
1138 XEXP (reg_equiv_mem (i), 0) = addr;
1139 }
1140
1141 /* We don't want complex addressing modes in debug insns
1142 if simpler ones will do, so delegitimize equivalences
1143 in debug insns. */
1144 if (MAY_HAVE_DEBUG_INSNS && reg_renumber[i] < 0)
1145 {
1146 rtx reg = regno_reg_rtx[i];
1147 rtx equiv = 0;
1148 df_ref use, next;
1149
1150 if (reg_equiv_constant (i))
1151 equiv = reg_equiv_constant (i);
1152 else if (reg_equiv_invariant (i))
1153 equiv = reg_equiv_invariant (i);
1154 else if (reg && MEM_P (reg))
1155 equiv = targetm.delegitimize_address (reg);
1156 else if (reg && REG_P (reg) && (int)REGNO (reg) != i)
1157 equiv = reg;
1158
1159 if (equiv == reg)
1160 continue;
1161
1162 for (use = DF_REG_USE_CHAIN (i); use; use = next)
1163 {
1164 insn = DF_REF_INSN (use);
1165
1166 /* Make sure the next ref is for a different instruction,
1167 so that we're not affected by the rescan. */
1168 next = DF_REF_NEXT_REG (use);
1169 while (next && DF_REF_INSN (next) == insn)
1170 next = DF_REF_NEXT_REG (next);
1171
1172 if (DEBUG_INSN_P (insn))
1173 {
1174 if (!equiv)
1175 {
1176 INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC ();
1177 df_insn_rescan_debug_internal (insn);
1178 }
1179 else
1180 INSN_VAR_LOCATION_LOC (insn)
1181 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn),
1182 reg, equiv);
1183 }
1184 }
1185 }
1186 }
1187
1188 /* We must set reload_completed now since the cleanup_subreg_operands call
1189 below will re-recognize each insn and reload may have generated insns
1190 which are only valid during and after reload. */
1191 reload_completed = 1;
1192
1193 /* Make a pass over all the insns and delete all USEs which we inserted
1194 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1195 notes. Delete all CLOBBER insns, except those that refer to the return
1196 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1197 from misarranging variable-array code, and simplify (subreg (reg))
1198 operands. Strip and regenerate REG_INC notes that may have been moved
1199 around. */
1200
1201 for (insn = first; insn; insn = NEXT_INSN (insn))
1202 if (INSN_P (insn))
1203 {
1204 rtx *pnote;
1205
1206 if (CALL_P (insn))
1207 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
1208 VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
1209
1210 if ((GET_CODE (PATTERN (insn)) == USE
1211 /* We mark with QImode USEs introduced by reload itself. */
1212 && (GET_MODE (insn) == QImode
1213 || find_reg_note (insn, REG_EQUAL, NULL_RTX)))
1214 || (GET_CODE (PATTERN (insn)) == CLOBBER
1215 && (!MEM_P (XEXP (PATTERN (insn), 0))
1216 || GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
1217 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
1218 && XEXP (XEXP (PATTERN (insn), 0), 0)
1219 != stack_pointer_rtx))
1220 && (!REG_P (XEXP (PATTERN (insn), 0))
1221 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1222 {
1223 delete_insn (insn);
1224 continue;
1225 }
1226
1227 /* Some CLOBBERs may survive until here and still reference unassigned
1228 pseudos with const equivalent, which may in turn cause ICE in later
1229 passes if the reference remains in place. */
1230 if (GET_CODE (PATTERN (insn)) == CLOBBER)
1231 replace_pseudos_in (& XEXP (PATTERN (insn), 0),
1232 VOIDmode, PATTERN (insn));
1233
1234 /* Discard obvious no-ops, even without -O. This optimization
1235 is fast and doesn't interfere with debugging. */
1236 if (NONJUMP_INSN_P (insn)
1237 && GET_CODE (PATTERN (insn)) == SET
1238 && REG_P (SET_SRC (PATTERN (insn)))
1239 && REG_P (SET_DEST (PATTERN (insn)))
1240 && (REGNO (SET_SRC (PATTERN (insn)))
1241 == REGNO (SET_DEST (PATTERN (insn)))))
1242 {
1243 delete_insn (insn);
1244 continue;
1245 }
1246
1247 pnote = &REG_NOTES (insn);
1248 while (*pnote != 0)
1249 {
1250 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1251 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1252 || REG_NOTE_KIND (*pnote) == REG_INC)
1253 *pnote = XEXP (*pnote, 1);
1254 else
1255 pnote = &XEXP (*pnote, 1);
1256 }
1257
1258 #ifdef AUTO_INC_DEC
1259 add_auto_inc_notes (insn, PATTERN (insn));
1260 #endif
1261
1262 /* Simplify (subreg (reg)) if it appears as an operand. */
1263 cleanup_subreg_operands (insn);
1264
1265 /* Clean up invalid ASMs so that they don't confuse later passes.
1266 See PR 21299. */
1267 if (asm_noperands (PATTERN (insn)) >= 0)
1268 {
1269 extract_insn (insn);
1270 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1271 {
1272 error_for_asm (insn,
1273 "%<asm%> operand has impossible constraints");
1274 delete_insn (insn);
1275 continue;
1276 }
1277 }
1278 }
1279
1280 /* If we are doing generic stack checking, give a warning if this
1281 function's frame size is larger than we expect. */
1282 if (flag_stack_check == GENERIC_STACK_CHECK)
1283 {
1284 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1285 static int verbose_warned = 0;
1286
1287 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1288 if (df_regs_ever_live_p (i) && ! fixed_regs[i] && call_used_regs[i])
1289 size += UNITS_PER_WORD;
1290
1291 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1292 {
1293 warning (0, "frame size too large for reliable stack checking");
1294 if (! verbose_warned)
1295 {
1296 warning (0, "try reducing the number of local variables");
1297 verbose_warned = 1;
1298 }
1299 }
1300 }
1301
1302 free (temp_pseudo_reg_arr);
1303
1304 /* Indicate that we no longer have known memory locations or constants. */
1305 free_reg_equiv ();
1306
1307 free (reg_max_ref_width);
1308 free (reg_old_renumber);
1309 free (pseudo_previous_regs);
1310 free (pseudo_forbidden_regs);
1311
1312 CLEAR_HARD_REG_SET (used_spill_regs);
1313 for (i = 0; i < n_spills; i++)
1314 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1315
1316 /* Free all the insn_chain structures at once. */
1317 obstack_free (&reload_obstack, reload_startobj);
1318 unused_insn_chains = 0;
1319
1320 inserted = fixup_abnormal_edges ();
1321
1322 /* We've possibly turned single trapping insn into multiple ones. */
1323 if (cfun->can_throw_non_call_exceptions)
1324 {
1325 sbitmap blocks;
1326 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
1327 bitmap_ones (blocks);
1328 find_many_sub_basic_blocks (blocks);
1329 sbitmap_free (blocks);
1330 }
1331
1332 if (inserted)
1333 commit_edge_insertions ();
1334
1335 /* Replacing pseudos with their memory equivalents might have
1336 created shared rtx. Subsequent passes would get confused
1337 by this, so unshare everything here. */
1338 unshare_all_rtl_again (first);
1339
1340 #ifdef STACK_BOUNDARY
1341 /* init_emit has set the alignment of the hard frame pointer
1342 to STACK_BOUNDARY. It is very likely no longer valid if
1343 the hard frame pointer was used for register allocation. */
1344 if (!frame_pointer_needed)
1345 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
1346 #endif
1347
1348 substitute_stack.release ();
1349
1350 gcc_assert (bitmap_empty_p (&spilled_pseudos));
1351
1352 reload_completed = !failure;
1353
1354 return need_dce;
1355 }
1356
1357 /* Yet another special case. Unfortunately, reg-stack forces people to
1358 write incorrect clobbers in asm statements. These clobbers must not
1359 cause the register to appear in bad_spill_regs, otherwise we'll call
1360 fatal_insn later. We clear the corresponding regnos in the live
1361 register sets to avoid this.
1362 The whole thing is rather sick, I'm afraid. */
1363
1364 static void
1365 maybe_fix_stack_asms (void)
1366 {
1367 #ifdef STACK_REGS
1368 const char *constraints[MAX_RECOG_OPERANDS];
1369 machine_mode operand_mode[MAX_RECOG_OPERANDS];
1370 struct insn_chain *chain;
1371
1372 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1373 {
1374 int i, noperands;
1375 HARD_REG_SET clobbered, allowed;
1376 rtx pat;
1377
1378 if (! INSN_P (chain->insn)
1379 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1380 continue;
1381 pat = PATTERN (chain->insn);
1382 if (GET_CODE (pat) != PARALLEL)
1383 continue;
1384
1385 CLEAR_HARD_REG_SET (clobbered);
1386 CLEAR_HARD_REG_SET (allowed);
1387
1388 /* First, make a mask of all stack regs that are clobbered. */
1389 for (i = 0; i < XVECLEN (pat, 0); i++)
1390 {
1391 rtx t = XVECEXP (pat, 0, i);
1392 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1393 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1394 }
1395
1396 /* Get the operand values and constraints out of the insn. */
1397 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1398 constraints, operand_mode, NULL);
1399
1400 /* For every operand, see what registers are allowed. */
1401 for (i = 0; i < noperands; i++)
1402 {
1403 const char *p = constraints[i];
1404 /* For every alternative, we compute the class of registers allowed
1405 for reloading in CLS, and merge its contents into the reg set
1406 ALLOWED. */
1407 int cls = (int) NO_REGS;
1408
1409 for (;;)
1410 {
1411 char c = *p;
1412
1413 if (c == '\0' || c == ',' || c == '#')
1414 {
1415 /* End of one alternative - mark the regs in the current
1416 class, and reset the class. */
1417 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1418 cls = NO_REGS;
1419 p++;
1420 if (c == '#')
1421 do {
1422 c = *p++;
1423 } while (c != '\0' && c != ',');
1424 if (c == '\0')
1425 break;
1426 continue;
1427 }
1428
1429 switch (c)
1430 {
1431 case 'g':
1432 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1433 break;
1434
1435 default:
1436 enum constraint_num cn = lookup_constraint (p);
1437 if (insn_extra_address_constraint (cn))
1438 cls = (int) reg_class_subunion[cls]
1439 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
1440 ADDRESS, SCRATCH)];
1441 else
1442 cls = (int) reg_class_subunion[cls]
1443 [reg_class_for_constraint (cn)];
1444 break;
1445 }
1446 p += CONSTRAINT_LEN (c, p);
1447 }
1448 }
1449 /* Those of the registers which are clobbered, but allowed by the
1450 constraints, must be usable as reload registers. So clear them
1451 out of the life information. */
1452 AND_HARD_REG_SET (allowed, clobbered);
1453 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1454 if (TEST_HARD_REG_BIT (allowed, i))
1455 {
1456 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1457 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1458 }
1459 }
1460
1461 #endif
1462 }
1463 \f
1464 /* Copy the global variables n_reloads and rld into the corresponding elts
1465 of CHAIN. */
1466 static void
1467 copy_reloads (struct insn_chain *chain)
1468 {
1469 chain->n_reloads = n_reloads;
1470 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads);
1471 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1472 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1473 }
1474
1475 /* Walk the chain of insns, and determine for each whether it needs reloads
1476 and/or eliminations. Build the corresponding insns_need_reload list, and
1477 set something_needs_elimination as appropriate. */
1478 static void
1479 calculate_needs_all_insns (int global)
1480 {
1481 struct insn_chain **pprev_reload = &insns_need_reload;
1482 struct insn_chain *chain, *next = 0;
1483
1484 something_needs_elimination = 0;
1485
1486 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0);
1487 for (chain = reload_insn_chain; chain != 0; chain = next)
1488 {
1489 rtx_insn *insn = chain->insn;
1490
1491 next = chain->next;
1492
1493 /* Clear out the shortcuts. */
1494 chain->n_reloads = 0;
1495 chain->need_elim = 0;
1496 chain->need_reload = 0;
1497 chain->need_operand_change = 0;
1498
1499 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1500 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1501 what effects this has on the known offsets at labels. */
1502
1503 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1504 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1505 set_label_offsets (insn, insn, 0);
1506
1507 if (INSN_P (insn))
1508 {
1509 rtx old_body = PATTERN (insn);
1510 int old_code = INSN_CODE (insn);
1511 rtx old_notes = REG_NOTES (insn);
1512 int did_elimination = 0;
1513 int operands_changed = 0;
1514
1515 /* Skip insns that only set an equivalence. */
1516 if (will_delete_init_insn_p (insn))
1517 continue;
1518
1519 /* If needed, eliminate any eliminable registers. */
1520 if (num_eliminable || num_eliminable_invariants)
1521 did_elimination = eliminate_regs_in_insn (insn, 0);
1522
1523 /* Analyze the instruction. */
1524 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1525 global, spill_reg_order);
1526
1527 /* If a no-op set needs more than one reload, this is likely
1528 to be something that needs input address reloads. We
1529 can't get rid of this cleanly later, and it is of no use
1530 anyway, so discard it now.
1531 We only do this when expensive_optimizations is enabled,
1532 since this complements reload inheritance / output
1533 reload deletion, and it can make debugging harder. */
1534 if (flag_expensive_optimizations && n_reloads > 1)
1535 {
1536 rtx set = single_set (insn);
1537 if (set
1538 &&
1539 ((SET_SRC (set) == SET_DEST (set)
1540 && REG_P (SET_SRC (set))
1541 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1542 || (REG_P (SET_SRC (set)) && REG_P (SET_DEST (set))
1543 && reg_renumber[REGNO (SET_SRC (set))] < 0
1544 && reg_renumber[REGNO (SET_DEST (set))] < 0
1545 && reg_equiv_memory_loc (REGNO (SET_SRC (set))) != NULL
1546 && reg_equiv_memory_loc (REGNO (SET_DEST (set))) != NULL
1547 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set))),
1548 reg_equiv_memory_loc (REGNO (SET_DEST (set)))))))
1549 {
1550 if (ira_conflicts_p)
1551 /* Inform IRA about the insn deletion. */
1552 ira_mark_memory_move_deletion (REGNO (SET_DEST (set)),
1553 REGNO (SET_SRC (set)));
1554 delete_insn (insn);
1555 /* Delete it from the reload chain. */
1556 if (chain->prev)
1557 chain->prev->next = next;
1558 else
1559 reload_insn_chain = next;
1560 if (next)
1561 next->prev = chain->prev;
1562 chain->next = unused_insn_chains;
1563 unused_insn_chains = chain;
1564 continue;
1565 }
1566 }
1567 if (num_eliminable)
1568 update_eliminable_offsets ();
1569
1570 /* Remember for later shortcuts which insns had any reloads or
1571 register eliminations. */
1572 chain->need_elim = did_elimination;
1573 chain->need_reload = n_reloads > 0;
1574 chain->need_operand_change = operands_changed;
1575
1576 /* Discard any register replacements done. */
1577 if (did_elimination)
1578 {
1579 obstack_free (&reload_obstack, reload_insn_firstobj);
1580 PATTERN (insn) = old_body;
1581 INSN_CODE (insn) = old_code;
1582 REG_NOTES (insn) = old_notes;
1583 something_needs_elimination = 1;
1584 }
1585
1586 something_needs_operands_changed |= operands_changed;
1587
1588 if (n_reloads != 0)
1589 {
1590 copy_reloads (chain);
1591 *pprev_reload = chain;
1592 pprev_reload = &chain->next_need_reload;
1593 }
1594 }
1595 }
1596 *pprev_reload = 0;
1597 }
1598 \f
1599 /* This function is called from the register allocator to set up estimates
1600 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1601 an invariant. The structure is similar to calculate_needs_all_insns. */
1602
1603 void
1604 calculate_elim_costs_all_insns (void)
1605 {
1606 int *reg_equiv_init_cost;
1607 basic_block bb;
1608 int i;
1609
1610 reg_equiv_init_cost = XCNEWVEC (int, max_regno);
1611 init_elim_table ();
1612 init_eliminable_invariants (get_insns (), false);
1613
1614 set_initial_elim_offsets ();
1615 set_initial_label_offsets ();
1616
1617 FOR_EACH_BB_FN (bb, cfun)
1618 {
1619 rtx_insn *insn;
1620 elim_bb = bb;
1621
1622 FOR_BB_INSNS (bb, insn)
1623 {
1624 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1625 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1626 what effects this has on the known offsets at labels. */
1627
1628 if (LABEL_P (insn) || JUMP_P (insn) || JUMP_TABLE_DATA_P (insn)
1629 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1630 set_label_offsets (insn, insn, 0);
1631
1632 if (INSN_P (insn))
1633 {
1634 rtx set = single_set (insn);
1635
1636 /* Skip insns that only set an equivalence. */
1637 if (set && REG_P (SET_DEST (set))
1638 && reg_renumber[REGNO (SET_DEST (set))] < 0
1639 && (reg_equiv_constant (REGNO (SET_DEST (set)))
1640 || reg_equiv_invariant (REGNO (SET_DEST (set)))))
1641 {
1642 unsigned regno = REGNO (SET_DEST (set));
1643 rtx init = reg_equiv_init (regno);
1644 if (init)
1645 {
1646 rtx t = eliminate_regs_1 (SET_SRC (set), VOIDmode, insn,
1647 false, true);
1648 int cost = set_src_cost (t, optimize_bb_for_speed_p (bb));
1649 int freq = REG_FREQ_FROM_BB (bb);
1650
1651 reg_equiv_init_cost[regno] = cost * freq;
1652 continue;
1653 }
1654 }
1655 /* If needed, eliminate any eliminable registers. */
1656 if (num_eliminable || num_eliminable_invariants)
1657 elimination_costs_in_insn (insn);
1658
1659 if (num_eliminable)
1660 update_eliminable_offsets ();
1661 }
1662 }
1663 }
1664 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1665 {
1666 if (reg_equiv_invariant (i))
1667 {
1668 if (reg_equiv_init (i))
1669 {
1670 int cost = reg_equiv_init_cost[i];
1671 if (dump_file)
1672 fprintf (dump_file,
1673 "Reg %d has equivalence, initial gains %d\n", i, cost);
1674 if (cost != 0)
1675 ira_adjust_equiv_reg_cost (i, cost);
1676 }
1677 else
1678 {
1679 if (dump_file)
1680 fprintf (dump_file,
1681 "Reg %d had equivalence, but can't be eliminated\n",
1682 i);
1683 ira_adjust_equiv_reg_cost (i, 0);
1684 }
1685 }
1686 }
1687
1688 free (reg_equiv_init_cost);
1689 free (offsets_known_at);
1690 free (offsets_at);
1691 offsets_at = NULL;
1692 offsets_known_at = NULL;
1693 }
1694 \f
1695 /* Comparison function for qsort to decide which of two reloads
1696 should be handled first. *P1 and *P2 are the reload numbers. */
1697
1698 static int
1699 reload_reg_class_lower (const void *r1p, const void *r2p)
1700 {
1701 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1702 int t;
1703
1704 /* Consider required reloads before optional ones. */
1705 t = rld[r1].optional - rld[r2].optional;
1706 if (t != 0)
1707 return t;
1708
1709 /* Count all solitary classes before non-solitary ones. */
1710 t = ((reg_class_size[(int) rld[r2].rclass] == 1)
1711 - (reg_class_size[(int) rld[r1].rclass] == 1));
1712 if (t != 0)
1713 return t;
1714
1715 /* Aside from solitaires, consider all multi-reg groups first. */
1716 t = rld[r2].nregs - rld[r1].nregs;
1717 if (t != 0)
1718 return t;
1719
1720 /* Consider reloads in order of increasing reg-class number. */
1721 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1722 if (t != 0)
1723 return t;
1724
1725 /* If reloads are equally urgent, sort by reload number,
1726 so that the results of qsort leave nothing to chance. */
1727 return r1 - r2;
1728 }
1729 \f
1730 /* The cost of spilling each hard reg. */
1731 static int spill_cost[FIRST_PSEUDO_REGISTER];
1732
1733 /* When spilling multiple hard registers, we use SPILL_COST for the first
1734 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1735 only the first hard reg for a multi-reg pseudo. */
1736 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1737
1738 /* Map of hard regno to pseudo regno currently occupying the hard
1739 reg. */
1740 static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER];
1741
1742 /* Update the spill cost arrays, considering that pseudo REG is live. */
1743
1744 static void
1745 count_pseudo (int reg)
1746 {
1747 int freq = REG_FREQ (reg);
1748 int r = reg_renumber[reg];
1749 int nregs;
1750
1751 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1752 if (ira_conflicts_p && r < 0)
1753 return;
1754
1755 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1756 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1757 return;
1758
1759 SET_REGNO_REG_SET (&pseudos_counted, reg);
1760
1761 gcc_assert (r >= 0);
1762
1763 spill_add_cost[r] += freq;
1764 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1765 while (nregs-- > 0)
1766 {
1767 hard_regno_to_pseudo_regno[r + nregs] = reg;
1768 spill_cost[r + nregs] += freq;
1769 }
1770 }
1771
1772 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1773 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1774
1775 static void
1776 order_regs_for_reload (struct insn_chain *chain)
1777 {
1778 unsigned i;
1779 HARD_REG_SET used_by_pseudos;
1780 HARD_REG_SET used_by_pseudos2;
1781 reg_set_iterator rsi;
1782
1783 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1784
1785 memset (spill_cost, 0, sizeof spill_cost);
1786 memset (spill_add_cost, 0, sizeof spill_add_cost);
1787 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1788 hard_regno_to_pseudo_regno[i] = -1;
1789
1790 /* Count number of uses of each hard reg by pseudo regs allocated to it
1791 and then order them by decreasing use. First exclude hard registers
1792 that are live in or across this insn. */
1793
1794 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1795 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1796 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1797 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1798
1799 /* Now find out which pseudos are allocated to it, and update
1800 hard_reg_n_uses. */
1801 CLEAR_REG_SET (&pseudos_counted);
1802
1803 EXECUTE_IF_SET_IN_REG_SET
1804 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
1805 {
1806 count_pseudo (i);
1807 }
1808 EXECUTE_IF_SET_IN_REG_SET
1809 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
1810 {
1811 count_pseudo (i);
1812 }
1813 CLEAR_REG_SET (&pseudos_counted);
1814 }
1815 \f
1816 /* Vector of reload-numbers showing the order in which the reloads should
1817 be processed. */
1818 static short reload_order[MAX_RELOADS];
1819
1820 /* This is used to keep track of the spill regs used in one insn. */
1821 static HARD_REG_SET used_spill_regs_local;
1822
1823 /* We decided to spill hard register SPILLED, which has a size of
1824 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1825 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1826 update SPILL_COST/SPILL_ADD_COST. */
1827
1828 static void
1829 count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1830 {
1831 int freq = REG_FREQ (reg);
1832 int r = reg_renumber[reg];
1833 int nregs;
1834
1835 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1836 if (ira_conflicts_p && r < 0)
1837 return;
1838
1839 gcc_assert (r >= 0);
1840
1841 nregs = hard_regno_nregs[r][PSEUDO_REGNO_MODE (reg)];
1842
1843 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1844 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1845 return;
1846
1847 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1848
1849 spill_add_cost[r] -= freq;
1850 while (nregs-- > 0)
1851 {
1852 hard_regno_to_pseudo_regno[r + nregs] = -1;
1853 spill_cost[r + nregs] -= freq;
1854 }
1855 }
1856
1857 /* Find reload register to use for reload number ORDER. */
1858
1859 static int
1860 find_reg (struct insn_chain *chain, int order)
1861 {
1862 int rnum = reload_order[order];
1863 struct reload *rl = rld + rnum;
1864 int best_cost = INT_MAX;
1865 int best_reg = -1;
1866 unsigned int i, j, n;
1867 int k;
1868 HARD_REG_SET not_usable;
1869 HARD_REG_SET used_by_other_reload;
1870 reg_set_iterator rsi;
1871 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1872 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER];
1873
1874 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1875 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1876 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->rclass]);
1877
1878 CLEAR_HARD_REG_SET (used_by_other_reload);
1879 for (k = 0; k < order; k++)
1880 {
1881 int other = reload_order[k];
1882
1883 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1884 for (j = 0; j < rld[other].nregs; j++)
1885 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1886 }
1887
1888 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1889 {
1890 #ifdef REG_ALLOC_ORDER
1891 unsigned int regno = reg_alloc_order[i];
1892 #else
1893 unsigned int regno = i;
1894 #endif
1895
1896 if (! TEST_HARD_REG_BIT (not_usable, regno)
1897 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1898 && HARD_REGNO_MODE_OK (regno, rl->mode))
1899 {
1900 int this_cost = spill_cost[regno];
1901 int ok = 1;
1902 unsigned int this_nregs = hard_regno_nregs[regno][rl->mode];
1903
1904 for (j = 1; j < this_nregs; j++)
1905 {
1906 this_cost += spill_add_cost[regno + j];
1907 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1908 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1909 ok = 0;
1910 }
1911 if (! ok)
1912 continue;
1913
1914 if (ira_conflicts_p)
1915 {
1916 /* Ask IRA to find a better pseudo-register for
1917 spilling. */
1918 for (n = j = 0; j < this_nregs; j++)
1919 {
1920 int r = hard_regno_to_pseudo_regno[regno + j];
1921
1922 if (r < 0)
1923 continue;
1924 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1925 regno_pseudo_regs[n++] = r;
1926 }
1927 regno_pseudo_regs[n++] = -1;
1928 if (best_reg < 0
1929 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1930 best_regno_pseudo_regs,
1931 rl->in, rl->out,
1932 chain->insn))
1933 {
1934 best_reg = regno;
1935 for (j = 0;; j++)
1936 {
1937 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1938 if (regno_pseudo_regs[j] < 0)
1939 break;
1940 }
1941 }
1942 continue;
1943 }
1944
1945 if (rl->in && REG_P (rl->in) && REGNO (rl->in) == regno)
1946 this_cost--;
1947 if (rl->out && REG_P (rl->out) && REGNO (rl->out) == regno)
1948 this_cost--;
1949 if (this_cost < best_cost
1950 /* Among registers with equal cost, prefer caller-saved ones, or
1951 use REG_ALLOC_ORDER if it is defined. */
1952 || (this_cost == best_cost
1953 #ifdef REG_ALLOC_ORDER
1954 && (inv_reg_alloc_order[regno]
1955 < inv_reg_alloc_order[best_reg])
1956 #else
1957 && call_used_regs[regno]
1958 && ! call_used_regs[best_reg]
1959 #endif
1960 ))
1961 {
1962 best_reg = regno;
1963 best_cost = this_cost;
1964 }
1965 }
1966 }
1967 if (best_reg == -1)
1968 return 0;
1969
1970 if (dump_file)
1971 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1972
1973 rl->nregs = hard_regno_nregs[best_reg][rl->mode];
1974 rl->regno = best_reg;
1975
1976 EXECUTE_IF_SET_IN_REG_SET
1977 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)
1978 {
1979 count_spilled_pseudo (best_reg, rl->nregs, j);
1980 }
1981
1982 EXECUTE_IF_SET_IN_REG_SET
1983 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)
1984 {
1985 count_spilled_pseudo (best_reg, rl->nregs, j);
1986 }
1987
1988 for (i = 0; i < rl->nregs; i++)
1989 {
1990 gcc_assert (spill_cost[best_reg + i] == 0);
1991 gcc_assert (spill_add_cost[best_reg + i] == 0);
1992 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1);
1993 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1994 }
1995 return 1;
1996 }
1997
1998 /* Find more reload regs to satisfy the remaining need of an insn, which
1999 is given by CHAIN.
2000 Do it by ascending class number, since otherwise a reg
2001 might be spilled for a big class and might fail to count
2002 for a smaller class even though it belongs to that class. */
2003
2004 static void
2005 find_reload_regs (struct insn_chain *chain)
2006 {
2007 int i;
2008
2009 /* In order to be certain of getting the registers we need,
2010 we must sort the reloads into order of increasing register class.
2011 Then our grabbing of reload registers will parallel the process
2012 that provided the reload registers. */
2013 for (i = 0; i < chain->n_reloads; i++)
2014 {
2015 /* Show whether this reload already has a hard reg. */
2016 if (chain->rld[i].reg_rtx)
2017 {
2018 int regno = REGNO (chain->rld[i].reg_rtx);
2019 chain->rld[i].regno = regno;
2020 chain->rld[i].nregs
2021 = hard_regno_nregs[regno][GET_MODE (chain->rld[i].reg_rtx)];
2022 }
2023 else
2024 chain->rld[i].regno = -1;
2025 reload_order[i] = i;
2026 }
2027
2028 n_reloads = chain->n_reloads;
2029 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
2030
2031 CLEAR_HARD_REG_SET (used_spill_regs_local);
2032
2033 if (dump_file)
2034 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
2035
2036 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
2037
2038 /* Compute the order of preference for hard registers to spill. */
2039
2040 order_regs_for_reload (chain);
2041
2042 for (i = 0; i < n_reloads; i++)
2043 {
2044 int r = reload_order[i];
2045
2046 /* Ignore reloads that got marked inoperative. */
2047 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
2048 && ! rld[r].optional
2049 && rld[r].regno == -1)
2050 if (! find_reg (chain, i))
2051 {
2052 if (dump_file)
2053 fprintf (dump_file, "reload failure for reload %d\n", r);
2054 spill_failure (chain->insn, rld[r].rclass);
2055 failure = 1;
2056 return;
2057 }
2058 }
2059
2060 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
2061 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
2062
2063 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2064 }
2065
2066 static void
2067 select_reload_regs (void)
2068 {
2069 struct insn_chain *chain;
2070
2071 /* Try to satisfy the needs for each insn. */
2072 for (chain = insns_need_reload; chain != 0;
2073 chain = chain->next_need_reload)
2074 find_reload_regs (chain);
2075 }
2076 \f
2077 /* Delete all insns that were inserted by emit_caller_save_insns during
2078 this iteration. */
2079 static void
2080 delete_caller_save_insns (void)
2081 {
2082 struct insn_chain *c = reload_insn_chain;
2083
2084 while (c != 0)
2085 {
2086 while (c != 0 && c->is_caller_save_insn)
2087 {
2088 struct insn_chain *next = c->next;
2089 rtx_insn *insn = c->insn;
2090
2091 if (c == reload_insn_chain)
2092 reload_insn_chain = next;
2093 delete_insn (insn);
2094
2095 if (next)
2096 next->prev = c->prev;
2097 if (c->prev)
2098 c->prev->next = next;
2099 c->next = unused_insn_chains;
2100 unused_insn_chains = c;
2101 c = next;
2102 }
2103 if (c != 0)
2104 c = c->next;
2105 }
2106 }
2107 \f
2108 /* Handle the failure to find a register to spill.
2109 INSN should be one of the insns which needed this particular spill reg. */
2110
2111 static void
2112 spill_failure (rtx_insn *insn, enum reg_class rclass)
2113 {
2114 if (asm_noperands (PATTERN (insn)) >= 0)
2115 error_for_asm (insn, "can%'t find a register in class %qs while "
2116 "reloading %<asm%>",
2117 reg_class_names[rclass]);
2118 else
2119 {
2120 error ("unable to find a register to spill in class %qs",
2121 reg_class_names[rclass]);
2122
2123 if (dump_file)
2124 {
2125 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2126 debug_reload_to_stream (dump_file);
2127 }
2128 fatal_insn ("this is the insn:", insn);
2129 }
2130 }
2131 \f
2132 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2133 data that is dead in INSN. */
2134
2135 static void
2136 delete_dead_insn (rtx_insn *insn)
2137 {
2138 rtx_insn *prev = prev_active_insn (insn);
2139 rtx prev_dest;
2140
2141 /* If the previous insn sets a register that dies in our insn make
2142 a note that we want to run DCE immediately after reload.
2143
2144 We used to delete the previous insn & recurse, but that's wrong for
2145 block local equivalences. Instead of trying to figure out the exact
2146 circumstances where we can delete the potentially dead insns, just
2147 let DCE do the job. */
2148 if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2149 && GET_CODE (PATTERN (prev)) == SET
2150 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
2151 && reg_mentioned_p (prev_dest, PATTERN (insn))
2152 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2153 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2154 need_dce = 1;
2155
2156 SET_INSN_DELETED (insn);
2157 }
2158
2159 /* Modify the home of pseudo-reg I.
2160 The new home is present in reg_renumber[I].
2161
2162 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2163 or it may be -1, meaning there is none or it is not relevant.
2164 This is used so that all pseudos spilled from a given hard reg
2165 can share one stack slot. */
2166
2167 static void
2168 alter_reg (int i, int from_reg, bool dont_share_p)
2169 {
2170 /* When outputting an inline function, this can happen
2171 for a reg that isn't actually used. */
2172 if (regno_reg_rtx[i] == 0)
2173 return;
2174
2175 /* If the reg got changed to a MEM at rtl-generation time,
2176 ignore it. */
2177 if (!REG_P (regno_reg_rtx[i]))
2178 return;
2179
2180 /* Modify the reg-rtx to contain the new hard reg
2181 number or else to contain its pseudo reg number. */
2182 SET_REGNO (regno_reg_rtx[i],
2183 reg_renumber[i] >= 0 ? reg_renumber[i] : i);
2184
2185 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2186 allocate a stack slot for it. */
2187
2188 if (reg_renumber[i] < 0
2189 && REG_N_REFS (i) > 0
2190 && reg_equiv_constant (i) == 0
2191 && (reg_equiv_invariant (i) == 0
2192 || reg_equiv_init (i) == 0)
2193 && reg_equiv_memory_loc (i) == 0)
2194 {
2195 rtx x = NULL_RTX;
2196 machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2197 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
2198 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode);
2199 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2200 unsigned int min_align = reg_max_ref_width[i] * BITS_PER_UNIT;
2201 int adjust = 0;
2202
2203 something_was_spilled = true;
2204
2205 if (ira_conflicts_p)
2206 {
2207 /* Mark the spill for IRA. */
2208 SET_REGNO_REG_SET (&spilled_pseudos, i);
2209 if (!dont_share_p)
2210 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2211 }
2212
2213 if (x)
2214 ;
2215
2216 /* Each pseudo reg has an inherent size which comes from its own mode,
2217 and a total size which provides room for paradoxical subregs
2218 which refer to the pseudo reg in wider modes.
2219
2220 We can use a slot already allocated if it provides both
2221 enough inherent space and enough total space.
2222 Otherwise, we allocate a new slot, making sure that it has no less
2223 inherent space, and no less total space, then the previous slot. */
2224 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2225 {
2226 rtx stack_slot;
2227
2228 /* No known place to spill from => no slot to reuse. */
2229 x = assign_stack_local (mode, total_size,
2230 min_align > inherent_align
2231 || total_size > inherent_size ? -1 : 0);
2232
2233 stack_slot = x;
2234
2235 /* Cancel the big-endian correction done in assign_stack_local.
2236 Get the address of the beginning of the slot. This is so we
2237 can do a big-endian correction unconditionally below. */
2238 if (BYTES_BIG_ENDIAN)
2239 {
2240 adjust = inherent_size - total_size;
2241 if (adjust)
2242 stack_slot
2243 = adjust_address_nv (x, mode_for_size (total_size
2244 * BITS_PER_UNIT,
2245 MODE_INT, 1),
2246 adjust);
2247 }
2248
2249 if (! dont_share_p && ira_conflicts_p)
2250 /* Inform IRA about allocation a new stack slot. */
2251 ira_mark_new_stack_slot (stack_slot, i, total_size);
2252 }
2253
2254 /* Reuse a stack slot if possible. */
2255 else if (spill_stack_slot[from_reg] != 0
2256 && spill_stack_slot_width[from_reg] >= total_size
2257 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2258 >= inherent_size)
2259 && MEM_ALIGN (spill_stack_slot[from_reg]) >= min_align)
2260 x = spill_stack_slot[from_reg];
2261
2262 /* Allocate a bigger slot. */
2263 else
2264 {
2265 /* Compute maximum size needed, both for inherent size
2266 and for total size. */
2267 rtx stack_slot;
2268
2269 if (spill_stack_slot[from_reg])
2270 {
2271 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2272 > inherent_size)
2273 mode = GET_MODE (spill_stack_slot[from_reg]);
2274 if (spill_stack_slot_width[from_reg] > total_size)
2275 total_size = spill_stack_slot_width[from_reg];
2276 if (MEM_ALIGN (spill_stack_slot[from_reg]) > min_align)
2277 min_align = MEM_ALIGN (spill_stack_slot[from_reg]);
2278 }
2279
2280 /* Make a slot with that size. */
2281 x = assign_stack_local (mode, total_size,
2282 min_align > inherent_align
2283 || total_size > inherent_size ? -1 : 0);
2284 stack_slot = x;
2285
2286 /* Cancel the big-endian correction done in assign_stack_local.
2287 Get the address of the beginning of the slot. This is so we
2288 can do a big-endian correction unconditionally below. */
2289 if (BYTES_BIG_ENDIAN)
2290 {
2291 adjust = GET_MODE_SIZE (mode) - total_size;
2292 if (adjust)
2293 stack_slot
2294 = adjust_address_nv (x, mode_for_size (total_size
2295 * BITS_PER_UNIT,
2296 MODE_INT, 1),
2297 adjust);
2298 }
2299
2300 spill_stack_slot[from_reg] = stack_slot;
2301 spill_stack_slot_width[from_reg] = total_size;
2302 }
2303
2304 /* On a big endian machine, the "address" of the slot
2305 is the address of the low part that fits its inherent mode. */
2306 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2307 adjust += (total_size - inherent_size);
2308
2309 /* If we have any adjustment to make, or if the stack slot is the
2310 wrong mode, make a new stack slot. */
2311 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2312
2313 /* Set all of the memory attributes as appropriate for a spill. */
2314 set_mem_attrs_for_spill (x);
2315
2316 /* Save the stack slot for later. */
2317 reg_equiv_memory_loc (i) = x;
2318 }
2319 }
2320
2321 /* Mark the slots in regs_ever_live for the hard regs used by
2322 pseudo-reg number REGNO, accessed in MODE. */
2323
2324 static void
2325 mark_home_live_1 (int regno, machine_mode mode)
2326 {
2327 int i, lim;
2328
2329 i = reg_renumber[regno];
2330 if (i < 0)
2331 return;
2332 lim = end_hard_regno (mode, i);
2333 while (i < lim)
2334 df_set_regs_ever_live (i++, true);
2335 }
2336
2337 /* Mark the slots in regs_ever_live for the hard regs
2338 used by pseudo-reg number REGNO. */
2339
2340 void
2341 mark_home_live (int regno)
2342 {
2343 if (reg_renumber[regno] >= 0)
2344 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno));
2345 }
2346 \f
2347 /* This function handles the tracking of elimination offsets around branches.
2348
2349 X is a piece of RTL being scanned.
2350
2351 INSN is the insn that it came from, if any.
2352
2353 INITIAL_P is nonzero if we are to set the offset to be the initial
2354 offset and zero if we are setting the offset of the label to be the
2355 current offset. */
2356
2357 static void
2358 set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2359 {
2360 enum rtx_code code = GET_CODE (x);
2361 rtx tem;
2362 unsigned int i;
2363 struct elim_table *p;
2364
2365 switch (code)
2366 {
2367 case LABEL_REF:
2368 if (LABEL_REF_NONLOCAL_P (x))
2369 return;
2370
2371 x = LABEL_REF_LABEL (x);
2372
2373 /* ... fall through ... */
2374
2375 case CODE_LABEL:
2376 /* If we know nothing about this label, set the desired offsets. Note
2377 that this sets the offset at a label to be the offset before a label
2378 if we don't know anything about the label. This is not correct for
2379 the label after a BARRIER, but is the best guess we can make. If
2380 we guessed wrong, we will suppress an elimination that might have
2381 been possible had we been able to guess correctly. */
2382
2383 if (! offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num])
2384 {
2385 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2386 offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2387 = (initial_p ? reg_eliminate[i].initial_offset
2388 : reg_eliminate[i].offset);
2389 offsets_known_at[CODE_LABEL_NUMBER (x) - first_label_num] = 1;
2390 }
2391
2392 /* Otherwise, if this is the definition of a label and it is
2393 preceded by a BARRIER, set our offsets to the known offset of
2394 that label. */
2395
2396 else if (x == insn
2397 && (tem = prev_nonnote_insn (insn)) != 0
2398 && BARRIER_P (tem))
2399 set_offsets_for_label (insn);
2400 else
2401 /* If neither of the above cases is true, compare each offset
2402 with those previously recorded and suppress any eliminations
2403 where the offsets disagree. */
2404
2405 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2406 if (offsets_at[CODE_LABEL_NUMBER (x) - first_label_num][i]
2407 != (initial_p ? reg_eliminate[i].initial_offset
2408 : reg_eliminate[i].offset))
2409 reg_eliminate[i].can_eliminate = 0;
2410
2411 return;
2412
2413 case JUMP_TABLE_DATA:
2414 set_label_offsets (PATTERN (insn), insn, initial_p);
2415 return;
2416
2417 case JUMP_INSN:
2418 set_label_offsets (PATTERN (insn), insn, initial_p);
2419
2420 /* ... fall through ... */
2421
2422 case INSN:
2423 case CALL_INSN:
2424 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2425 to indirectly and hence must have all eliminations at their
2426 initial offsets. */
2427 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2428 if (REG_NOTE_KIND (tem) == REG_LABEL_OPERAND)
2429 set_label_offsets (XEXP (tem, 0), insn, 1);
2430 return;
2431
2432 case PARALLEL:
2433 case ADDR_VEC:
2434 case ADDR_DIFF_VEC:
2435 /* Each of the labels in the parallel or address vector must be
2436 at their initial offsets. We want the first field for PARALLEL
2437 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2438
2439 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2440 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2441 insn, initial_p);
2442 return;
2443
2444 case SET:
2445 /* We only care about setting PC. If the source is not RETURN,
2446 IF_THEN_ELSE, or a label, disable any eliminations not at
2447 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2448 isn't one of those possibilities. For branches to a label,
2449 call ourselves recursively.
2450
2451 Note that this can disable elimination unnecessarily when we have
2452 a non-local goto since it will look like a non-constant jump to
2453 someplace in the current function. This isn't a significant
2454 problem since such jumps will normally be when all elimination
2455 pairs are back to their initial offsets. */
2456
2457 if (SET_DEST (x) != pc_rtx)
2458 return;
2459
2460 switch (GET_CODE (SET_SRC (x)))
2461 {
2462 case PC:
2463 case RETURN:
2464 return;
2465
2466 case LABEL_REF:
2467 set_label_offsets (SET_SRC (x), insn, initial_p);
2468 return;
2469
2470 case IF_THEN_ELSE:
2471 tem = XEXP (SET_SRC (x), 1);
2472 if (GET_CODE (tem) == LABEL_REF)
2473 set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2474 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2475 break;
2476
2477 tem = XEXP (SET_SRC (x), 2);
2478 if (GET_CODE (tem) == LABEL_REF)
2479 set_label_offsets (LABEL_REF_LABEL (tem), insn, initial_p);
2480 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2481 break;
2482 return;
2483
2484 default:
2485 break;
2486 }
2487
2488 /* If we reach here, all eliminations must be at their initial
2489 offset because we are doing a jump to a variable address. */
2490 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2491 if (p->offset != p->initial_offset)
2492 p->can_eliminate = 0;
2493 break;
2494
2495 default:
2496 break;
2497 }
2498 }
2499 \f
2500 /* This function examines every reg that occurs in X and adjusts the
2501 costs for its elimination which are gathered by IRA. INSN is the
2502 insn in which X occurs. We do not recurse into MEM expressions. */
2503
2504 static void
2505 note_reg_elim_costly (const_rtx x, rtx insn)
2506 {
2507 subrtx_iterator::array_type array;
2508 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
2509 {
2510 const_rtx x = *iter;
2511 if (MEM_P (x))
2512 iter.skip_subrtxes ();
2513 else if (REG_P (x)
2514 && REGNO (x) >= FIRST_PSEUDO_REGISTER
2515 && reg_equiv_init (REGNO (x))
2516 && reg_equiv_invariant (REGNO (x)))
2517 {
2518 rtx t = reg_equiv_invariant (REGNO (x));
2519 rtx new_rtx = eliminate_regs_1 (t, Pmode, insn, true, true);
2520 int cost = set_src_cost (new_rtx, optimize_bb_for_speed_p (elim_bb));
2521 int freq = REG_FREQ_FROM_BB (elim_bb);
2522
2523 if (cost != 0)
2524 ira_adjust_equiv_reg_cost (REGNO (x), -cost * freq);
2525 }
2526 }
2527 }
2528
2529 /* Scan X and replace any eliminable registers (such as fp) with a
2530 replacement (such as sp), plus an offset.
2531
2532 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2533 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2534 MEM, we are allowed to replace a sum of a register and the constant zero
2535 with the register, which we cannot do outside a MEM. In addition, we need
2536 to record the fact that a register is referenced outside a MEM.
2537
2538 If INSN is an insn, it is the insn containing X. If we replace a REG
2539 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2540 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2541 the REG is being modified.
2542
2543 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2544 That's used when we eliminate in expressions stored in notes.
2545 This means, do not set ref_outside_mem even if the reference
2546 is outside of MEMs.
2547
2548 If FOR_COSTS is true, we are being called before reload in order to
2549 estimate the costs of keeping registers with an equivalence unallocated.
2550
2551 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2552 replacements done assuming all offsets are at their initial values. If
2553 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2554 encounter, return the actual location so that find_reloads will do
2555 the proper thing. */
2556
2557 static rtx
2558 eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2559 bool may_use_invariant, bool for_costs)
2560 {
2561 enum rtx_code code = GET_CODE (x);
2562 struct elim_table *ep;
2563 int regno;
2564 rtx new_rtx;
2565 int i, j;
2566 const char *fmt;
2567 int copied = 0;
2568
2569 if (! current_function_decl)
2570 return x;
2571
2572 switch (code)
2573 {
2574 CASE_CONST_ANY:
2575 case CONST:
2576 case SYMBOL_REF:
2577 case CODE_LABEL:
2578 case PC:
2579 case CC0:
2580 case ASM_INPUT:
2581 case ADDR_VEC:
2582 case ADDR_DIFF_VEC:
2583 case RETURN:
2584 return x;
2585
2586 case REG:
2587 regno = REGNO (x);
2588
2589 /* First handle the case where we encounter a bare register that
2590 is eliminable. Replace it with a PLUS. */
2591 if (regno < FIRST_PSEUDO_REGISTER)
2592 {
2593 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2594 ep++)
2595 if (ep->from_rtx == x && ep->can_eliminate)
2596 return plus_constant (Pmode, ep->to_rtx, ep->previous_offset);
2597
2598 }
2599 else if (reg_renumber && reg_renumber[regno] < 0
2600 && reg_equivs
2601 && reg_equiv_invariant (regno))
2602 {
2603 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)))
2604 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)),
2605 mem_mode, insn, true, for_costs);
2606 /* There exists at least one use of REGNO that cannot be
2607 eliminated. Prevent the defining insn from being deleted. */
2608 reg_equiv_init (regno) = NULL_RTX;
2609 if (!for_costs)
2610 alter_reg (regno, -1, true);
2611 }
2612 return x;
2613
2614 /* You might think handling MINUS in a manner similar to PLUS is a
2615 good idea. It is not. It has been tried multiple times and every
2616 time the change has had to have been reverted.
2617
2618 Other parts of reload know a PLUS is special (gen_reload for example)
2619 and require special code to handle code a reloaded PLUS operand.
2620
2621 Also consider backends where the flags register is clobbered by a
2622 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2623 lea instruction comes to mind). If we try to reload a MINUS, we
2624 may kill the flags register that was holding a useful value.
2625
2626 So, please before trying to handle MINUS, consider reload as a
2627 whole instead of this little section as well as the backend issues. */
2628 case PLUS:
2629 /* If this is the sum of an eliminable register and a constant, rework
2630 the sum. */
2631 if (REG_P (XEXP (x, 0))
2632 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2633 && CONSTANT_P (XEXP (x, 1)))
2634 {
2635 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2636 ep++)
2637 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2638 {
2639 /* The only time we want to replace a PLUS with a REG (this
2640 occurs when the constant operand of the PLUS is the negative
2641 of the offset) is when we are inside a MEM. We won't want
2642 to do so at other times because that would change the
2643 structure of the insn in a way that reload can't handle.
2644 We special-case the commonest situation in
2645 eliminate_regs_in_insn, so just replace a PLUS with a
2646 PLUS here, unless inside a MEM. */
2647 if (mem_mode != 0 && CONST_INT_P (XEXP (x, 1))
2648 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2649 return ep->to_rtx;
2650 else
2651 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2652 plus_constant (Pmode, XEXP (x, 1),
2653 ep->previous_offset));
2654 }
2655
2656 /* If the register is not eliminable, we are done since the other
2657 operand is a constant. */
2658 return x;
2659 }
2660
2661 /* If this is part of an address, we want to bring any constant to the
2662 outermost PLUS. We will do this by doing register replacement in
2663 our operands and seeing if a constant shows up in one of them.
2664
2665 Note that there is no risk of modifying the structure of the insn,
2666 since we only get called for its operands, thus we are either
2667 modifying the address inside a MEM, or something like an address
2668 operand of a load-address insn. */
2669
2670 {
2671 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2672 for_costs);
2673 rtx new1 = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2674 for_costs);
2675
2676 if (reg_renumber && (new0 != XEXP (x, 0) || new1 != XEXP (x, 1)))
2677 {
2678 /* If one side is a PLUS and the other side is a pseudo that
2679 didn't get a hard register but has a reg_equiv_constant,
2680 we must replace the constant here since it may no longer
2681 be in the position of any operand. */
2682 if (GET_CODE (new0) == PLUS && REG_P (new1)
2683 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2684 && reg_renumber[REGNO (new1)] < 0
2685 && reg_equivs
2686 && reg_equiv_constant (REGNO (new1)) != 0)
2687 new1 = reg_equiv_constant (REGNO (new1));
2688 else if (GET_CODE (new1) == PLUS && REG_P (new0)
2689 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2690 && reg_renumber[REGNO (new0)] < 0
2691 && reg_equiv_constant (REGNO (new0)) != 0)
2692 new0 = reg_equiv_constant (REGNO (new0));
2693
2694 new_rtx = form_sum (GET_MODE (x), new0, new1);
2695
2696 /* As above, if we are not inside a MEM we do not want to
2697 turn a PLUS into something else. We might try to do so here
2698 for an addition of 0 if we aren't optimizing. */
2699 if (! mem_mode && GET_CODE (new_rtx) != PLUS)
2700 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx);
2701 else
2702 return new_rtx;
2703 }
2704 }
2705 return x;
2706
2707 case MULT:
2708 /* If this is the product of an eliminable register and a
2709 constant, apply the distribute law and move the constant out
2710 so that we have (plus (mult ..) ..). This is needed in order
2711 to keep load-address insns valid. This case is pathological.
2712 We ignore the possibility of overflow here. */
2713 if (REG_P (XEXP (x, 0))
2714 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2715 && CONST_INT_P (XEXP (x, 1)))
2716 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2717 ep++)
2718 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2719 {
2720 if (! mem_mode
2721 /* Refs inside notes or in DEBUG_INSNs don't count for
2722 this purpose. */
2723 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2724 || GET_CODE (insn) == INSN_LIST
2725 || DEBUG_INSN_P (insn))))
2726 ep->ref_outside_mem = 1;
2727
2728 return
2729 plus_constant (Pmode,
2730 gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2731 ep->previous_offset * INTVAL (XEXP (x, 1)));
2732 }
2733
2734 /* ... fall through ... */
2735
2736 case CALL:
2737 case COMPARE:
2738 /* See comments before PLUS about handling MINUS. */
2739 case MINUS:
2740 case DIV: case UDIV:
2741 case MOD: case UMOD:
2742 case AND: case IOR: case XOR:
2743 case ROTATERT: case ROTATE:
2744 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2745 case NE: case EQ:
2746 case GE: case GT: case GEU: case GTU:
2747 case LE: case LT: case LEU: case LTU:
2748 {
2749 rtx new0 = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2750 for_costs);
2751 rtx new1 = XEXP (x, 1)
2752 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, false,
2753 for_costs) : 0;
2754
2755 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2756 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2757 }
2758 return x;
2759
2760 case EXPR_LIST:
2761 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2762 if (XEXP (x, 0))
2763 {
2764 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, true,
2765 for_costs);
2766 if (new_rtx != XEXP (x, 0))
2767 {
2768 /* If this is a REG_DEAD note, it is not valid anymore.
2769 Using the eliminated version could result in creating a
2770 REG_DEAD note for the stack or frame pointer. */
2771 if (REG_NOTE_KIND (x) == REG_DEAD)
2772 return (XEXP (x, 1)
2773 ? eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2774 for_costs)
2775 : NULL_RTX);
2776
2777 x = alloc_reg_note (REG_NOTE_KIND (x), new_rtx, XEXP (x, 1));
2778 }
2779 }
2780
2781 /* ... fall through ... */
2782
2783 case INSN_LIST:
2784 case INT_LIST:
2785 /* Now do eliminations in the rest of the chain. If this was
2786 an EXPR_LIST, this might result in allocating more memory than is
2787 strictly needed, but it simplifies the code. */
2788 if (XEXP (x, 1))
2789 {
2790 new_rtx = eliminate_regs_1 (XEXP (x, 1), mem_mode, insn, true,
2791 for_costs);
2792 if (new_rtx != XEXP (x, 1))
2793 return
2794 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx);
2795 }
2796 return x;
2797
2798 case PRE_INC:
2799 case POST_INC:
2800 case PRE_DEC:
2801 case POST_DEC:
2802 /* We do not support elimination of a register that is modified.
2803 elimination_effects has already make sure that this does not
2804 happen. */
2805 return x;
2806
2807 case PRE_MODIFY:
2808 case POST_MODIFY:
2809 /* We do not support elimination of a register that is modified.
2810 elimination_effects has already make sure that this does not
2811 happen. The only remaining case we need to consider here is
2812 that the increment value may be an eliminable register. */
2813 if (GET_CODE (XEXP (x, 1)) == PLUS
2814 && XEXP (XEXP (x, 1), 0) == XEXP (x, 0))
2815 {
2816 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1), mem_mode,
2817 insn, true, for_costs);
2818
2819 if (new_rtx != XEXP (XEXP (x, 1), 1))
2820 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),
2821 gen_rtx_PLUS (GET_MODE (x),
2822 XEXP (x, 0), new_rtx));
2823 }
2824 return x;
2825
2826 case STRICT_LOW_PART:
2827 case NEG: case NOT:
2828 case SIGN_EXTEND: case ZERO_EXTEND:
2829 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2830 case FLOAT: case FIX:
2831 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2832 case ABS:
2833 case SQRT:
2834 case FFS:
2835 case CLZ:
2836 case CTZ:
2837 case POPCOUNT:
2838 case PARITY:
2839 case BSWAP:
2840 new_rtx = eliminate_regs_1 (XEXP (x, 0), mem_mode, insn, false,
2841 for_costs);
2842 if (new_rtx != XEXP (x, 0))
2843 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx);
2844 return x;
2845
2846 case SUBREG:
2847 /* Similar to above processing, but preserve SUBREG_BYTE.
2848 Convert (subreg (mem)) to (mem) if not paradoxical.
2849 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2850 pseudo didn't get a hard reg, we must replace this with the
2851 eliminated version of the memory location because push_reload
2852 may do the replacement in certain circumstances. */
2853 if (REG_P (SUBREG_REG (x))
2854 && !paradoxical_subreg_p (x)
2855 && reg_equivs
2856 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
2857 {
2858 new_rtx = SUBREG_REG (x);
2859 }
2860 else
2861 new_rtx = eliminate_regs_1 (SUBREG_REG (x), mem_mode, insn, false, for_costs);
2862
2863 if (new_rtx != SUBREG_REG (x))
2864 {
2865 int x_size = GET_MODE_SIZE (GET_MODE (x));
2866 int new_size = GET_MODE_SIZE (GET_MODE (new_rtx));
2867
2868 if (MEM_P (new_rtx)
2869 && ((x_size < new_size
2870 #ifdef WORD_REGISTER_OPERATIONS
2871 /* On these machines, combine can create rtl of the form
2872 (set (subreg:m1 (reg:m2 R) 0) ...)
2873 where m1 < m2, and expects something interesting to
2874 happen to the entire word. Moreover, it will use the
2875 (reg:m2 R) later, expecting all bits to be preserved.
2876 So if the number of words is the same, preserve the
2877 subreg so that push_reload can see it. */
2878 && ! ((x_size - 1) / UNITS_PER_WORD
2879 == (new_size -1 ) / UNITS_PER_WORD)
2880 #endif
2881 )
2882 || x_size == new_size)
2883 )
2884 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x));
2885 else
2886 return gen_rtx_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x));
2887 }
2888
2889 return x;
2890
2891 case MEM:
2892 /* Our only special processing is to pass the mode of the MEM to our
2893 recursive call and copy the flags. While we are here, handle this
2894 case more efficiently. */
2895
2896 new_rtx = eliminate_regs_1 (XEXP (x, 0), GET_MODE (x), insn, true,
2897 for_costs);
2898 if (for_costs
2899 && memory_address_p (GET_MODE (x), XEXP (x, 0))
2900 && !memory_address_p (GET_MODE (x), new_rtx))
2901 note_reg_elim_costly (XEXP (x, 0), insn);
2902
2903 return replace_equiv_address_nv (x, new_rtx);
2904
2905 case USE:
2906 /* Handle insn_list USE that a call to a pure function may generate. */
2907 new_rtx = eliminate_regs_1 (XEXP (x, 0), VOIDmode, insn, false,
2908 for_costs);
2909 if (new_rtx != XEXP (x, 0))
2910 return gen_rtx_USE (GET_MODE (x), new_rtx);
2911 return x;
2912
2913 case CLOBBER:
2914 case ASM_OPERANDS:
2915 gcc_assert (insn && DEBUG_INSN_P (insn));
2916 break;
2917
2918 case SET:
2919 gcc_unreachable ();
2920
2921 default:
2922 break;
2923 }
2924
2925 /* Process each of our operands recursively. If any have changed, make a
2926 copy of the rtx. */
2927 fmt = GET_RTX_FORMAT (code);
2928 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2929 {
2930 if (*fmt == 'e')
2931 {
2932 new_rtx = eliminate_regs_1 (XEXP (x, i), mem_mode, insn, false,
2933 for_costs);
2934 if (new_rtx != XEXP (x, i) && ! copied)
2935 {
2936 x = shallow_copy_rtx (x);
2937 copied = 1;
2938 }
2939 XEXP (x, i) = new_rtx;
2940 }
2941 else if (*fmt == 'E')
2942 {
2943 int copied_vec = 0;
2944 for (j = 0; j < XVECLEN (x, i); j++)
2945 {
2946 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j), mem_mode, insn, false,
2947 for_costs);
2948 if (new_rtx != XVECEXP (x, i, j) && ! copied_vec)
2949 {
2950 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2951 XVEC (x, i)->elem);
2952 if (! copied)
2953 {
2954 x = shallow_copy_rtx (x);
2955 copied = 1;
2956 }
2957 XVEC (x, i) = new_v;
2958 copied_vec = 1;
2959 }
2960 XVECEXP (x, i, j) = new_rtx;
2961 }
2962 }
2963 }
2964
2965 return x;
2966 }
2967
2968 rtx
2969 eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2970 {
2971 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2972 }
2973
2974 /* Scan rtx X for modifications of elimination target registers. Update
2975 the table of eliminables to reflect the changed state. MEM_MODE is
2976 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2977
2978 static void
2979 elimination_effects (rtx x, machine_mode mem_mode)
2980 {
2981 enum rtx_code code = GET_CODE (x);
2982 struct elim_table *ep;
2983 int regno;
2984 int i, j;
2985 const char *fmt;
2986
2987 switch (code)
2988 {
2989 CASE_CONST_ANY:
2990 case CONST:
2991 case SYMBOL_REF:
2992 case CODE_LABEL:
2993 case PC:
2994 case CC0:
2995 case ASM_INPUT:
2996 case ADDR_VEC:
2997 case ADDR_DIFF_VEC:
2998 case RETURN:
2999 return;
3000
3001 case REG:
3002 regno = REGNO (x);
3003
3004 /* First handle the case where we encounter a bare register that
3005 is eliminable. Replace it with a PLUS. */
3006 if (regno < FIRST_PSEUDO_REGISTER)
3007 {
3008 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3009 ep++)
3010 if (ep->from_rtx == x && ep->can_eliminate)
3011 {
3012 if (! mem_mode)
3013 ep->ref_outside_mem = 1;
3014 return;
3015 }
3016
3017 }
3018 else if (reg_renumber[regno] < 0
3019 && reg_equivs
3020 && reg_equiv_constant (regno)
3021 && ! function_invariant_p (reg_equiv_constant (regno)))
3022 elimination_effects (reg_equiv_constant (regno), mem_mode);
3023 return;
3024
3025 case PRE_INC:
3026 case POST_INC:
3027 case PRE_DEC:
3028 case POST_DEC:
3029 case POST_MODIFY:
3030 case PRE_MODIFY:
3031 /* If we modify the source of an elimination rule, disable it. */
3032 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3033 if (ep->from_rtx == XEXP (x, 0))
3034 ep->can_eliminate = 0;
3035
3036 /* If we modify the target of an elimination rule by adding a constant,
3037 update its offset. If we modify the target in any other way, we'll
3038 have to disable the rule as well. */
3039 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3040 if (ep->to_rtx == XEXP (x, 0))
3041 {
3042 int size = GET_MODE_SIZE (mem_mode);
3043
3044 /* If more bytes than MEM_MODE are pushed, account for them. */
3045 #ifdef PUSH_ROUNDING
3046 if (ep->to_rtx == stack_pointer_rtx)
3047 size = PUSH_ROUNDING (size);
3048 #endif
3049 if (code == PRE_DEC || code == POST_DEC)
3050 ep->offset += size;
3051 else if (code == PRE_INC || code == POST_INC)
3052 ep->offset -= size;
3053 else if (code == PRE_MODIFY || code == POST_MODIFY)
3054 {
3055 if (GET_CODE (XEXP (x, 1)) == PLUS
3056 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
3057 && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
3058 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
3059 else
3060 ep->can_eliminate = 0;
3061 }
3062 }
3063
3064 /* These two aren't unary operators. */
3065 if (code == POST_MODIFY || code == PRE_MODIFY)
3066 break;
3067
3068 /* Fall through to generic unary operation case. */
3069 case STRICT_LOW_PART:
3070 case NEG: case NOT:
3071 case SIGN_EXTEND: case ZERO_EXTEND:
3072 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3073 case FLOAT: case FIX:
3074 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3075 case ABS:
3076 case SQRT:
3077 case FFS:
3078 case CLZ:
3079 case CTZ:
3080 case POPCOUNT:
3081 case PARITY:
3082 case BSWAP:
3083 elimination_effects (XEXP (x, 0), mem_mode);
3084 return;
3085
3086 case SUBREG:
3087 if (REG_P (SUBREG_REG (x))
3088 && (GET_MODE_SIZE (GET_MODE (x))
3089 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3090 && reg_equivs
3091 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
3092 return;
3093
3094 elimination_effects (SUBREG_REG (x), mem_mode);
3095 return;
3096
3097 case USE:
3098 /* If using a register that is the source of an eliminate we still
3099 think can be performed, note it cannot be performed since we don't
3100 know how this register is used. */
3101 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3102 if (ep->from_rtx == XEXP (x, 0))
3103 ep->can_eliminate = 0;
3104
3105 elimination_effects (XEXP (x, 0), mem_mode);
3106 return;
3107
3108 case CLOBBER:
3109 /* If clobbering a register that is the replacement register for an
3110 elimination we still think can be performed, note that it cannot
3111 be performed. Otherwise, we need not be concerned about it. */
3112 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3113 if (ep->to_rtx == XEXP (x, 0))
3114 ep->can_eliminate = 0;
3115
3116 elimination_effects (XEXP (x, 0), mem_mode);
3117 return;
3118
3119 case SET:
3120 /* Check for setting a register that we know about. */
3121 if (REG_P (SET_DEST (x)))
3122 {
3123 /* See if this is setting the replacement register for an
3124 elimination.
3125
3126 If DEST is the hard frame pointer, we do nothing because we
3127 assume that all assignments to the frame pointer are for
3128 non-local gotos and are being done at a time when they are valid
3129 and do not disturb anything else. Some machines want to
3130 eliminate a fake argument pointer (or even a fake frame pointer)
3131 with either the real frame or the stack pointer. Assignments to
3132 the hard frame pointer must not prevent this elimination. */
3133
3134 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3135 ep++)
3136 if (ep->to_rtx == SET_DEST (x)
3137 && SET_DEST (x) != hard_frame_pointer_rtx)
3138 {
3139 /* If it is being incremented, adjust the offset. Otherwise,
3140 this elimination can't be done. */
3141 rtx src = SET_SRC (x);
3142
3143 if (GET_CODE (src) == PLUS
3144 && XEXP (src, 0) == SET_DEST (x)
3145 && CONST_INT_P (XEXP (src, 1)))
3146 ep->offset -= INTVAL (XEXP (src, 1));
3147 else
3148 ep->can_eliminate = 0;
3149 }
3150 }
3151
3152 elimination_effects (SET_DEST (x), VOIDmode);
3153 elimination_effects (SET_SRC (x), VOIDmode);
3154 return;
3155
3156 case MEM:
3157 /* Our only special processing is to pass the mode of the MEM to our
3158 recursive call. */
3159 elimination_effects (XEXP (x, 0), GET_MODE (x));
3160 return;
3161
3162 default:
3163 break;
3164 }
3165
3166 fmt = GET_RTX_FORMAT (code);
3167 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3168 {
3169 if (*fmt == 'e')
3170 elimination_effects (XEXP (x, i), mem_mode);
3171 else if (*fmt == 'E')
3172 for (j = 0; j < XVECLEN (x, i); j++)
3173 elimination_effects (XVECEXP (x, i, j), mem_mode);
3174 }
3175 }
3176
3177 /* Descend through rtx X and verify that no references to eliminable registers
3178 remain. If any do remain, mark the involved register as not
3179 eliminable. */
3180
3181 static void
3182 check_eliminable_occurrences (rtx x)
3183 {
3184 const char *fmt;
3185 int i;
3186 enum rtx_code code;
3187
3188 if (x == 0)
3189 return;
3190
3191 code = GET_CODE (x);
3192
3193 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3194 {
3195 struct elim_table *ep;
3196
3197 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3198 if (ep->from_rtx == x)
3199 ep->can_eliminate = 0;
3200 return;
3201 }
3202
3203 fmt = GET_RTX_FORMAT (code);
3204 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3205 {
3206 if (*fmt == 'e')
3207 check_eliminable_occurrences (XEXP (x, i));
3208 else if (*fmt == 'E')
3209 {
3210 int j;
3211 for (j = 0; j < XVECLEN (x, i); j++)
3212 check_eliminable_occurrences (XVECEXP (x, i, j));
3213 }
3214 }
3215 }
3216 \f
3217 /* Scan INSN and eliminate all eliminable registers in it.
3218
3219 If REPLACE is nonzero, do the replacement destructively. Also
3220 delete the insn as dead it if it is setting an eliminable register.
3221
3222 If REPLACE is zero, do all our allocations in reload_obstack.
3223
3224 If no eliminations were done and this insn doesn't require any elimination
3225 processing (these are not identical conditions: it might be updating sp,
3226 but not referencing fp; this needs to be seen during reload_as_needed so
3227 that the offset between fp and sp can be taken into consideration), zero
3228 is returned. Otherwise, 1 is returned. */
3229
3230 static int
3231 eliminate_regs_in_insn (rtx_insn *insn, int replace)
3232 {
3233 int icode = recog_memoized (insn);
3234 rtx old_body = PATTERN (insn);
3235 int insn_is_asm = asm_noperands (old_body) >= 0;
3236 rtx old_set = single_set (insn);
3237 rtx new_body;
3238 int val = 0;
3239 int i;
3240 rtx substed_operand[MAX_RECOG_OPERANDS];
3241 rtx orig_operand[MAX_RECOG_OPERANDS];
3242 struct elim_table *ep;
3243 rtx plus_src, plus_cst_src;
3244
3245 if (! insn_is_asm && icode < 0)
3246 {
3247 gcc_assert (DEBUG_INSN_P (insn)
3248 || GET_CODE (PATTERN (insn)) == USE
3249 || GET_CODE (PATTERN (insn)) == CLOBBER
3250 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3251 if (DEBUG_INSN_P (insn))
3252 INSN_VAR_LOCATION_LOC (insn)
3253 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn), VOIDmode, insn);
3254 return 0;
3255 }
3256
3257 if (old_set != 0 && REG_P (SET_DEST (old_set))
3258 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3259 {
3260 /* Check for setting an eliminable register. */
3261 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3262 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3263 {
3264 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3265 /* If this is setting the frame pointer register to the
3266 hardware frame pointer register and this is an elimination
3267 that will be done (tested above), this insn is really
3268 adjusting the frame pointer downward to compensate for
3269 the adjustment done before a nonlocal goto. */
3270 if (ep->from == FRAME_POINTER_REGNUM
3271 && ep->to == HARD_FRAME_POINTER_REGNUM)
3272 {
3273 rtx base = SET_SRC (old_set);
3274 rtx_insn *base_insn = insn;
3275 HOST_WIDE_INT offset = 0;
3276
3277 while (base != ep->to_rtx)
3278 {
3279 rtx_insn *prev_insn;
3280 rtx prev_set;
3281
3282 if (GET_CODE (base) == PLUS
3283 && CONST_INT_P (XEXP (base, 1)))
3284 {
3285 offset += INTVAL (XEXP (base, 1));
3286 base = XEXP (base, 0);
3287 }
3288 else if ((prev_insn = prev_nonnote_insn (base_insn)) != 0
3289 && (prev_set = single_set (prev_insn)) != 0
3290 && rtx_equal_p (SET_DEST (prev_set), base))
3291 {
3292 base = SET_SRC (prev_set);
3293 base_insn = prev_insn;
3294 }
3295 else
3296 break;
3297 }
3298
3299 if (base == ep->to_rtx)
3300 {
3301 rtx src = plus_constant (Pmode, ep->to_rtx,
3302 offset - ep->offset);
3303
3304 new_body = old_body;
3305 if (! replace)
3306 {
3307 new_body = copy_insn (old_body);
3308 if (REG_NOTES (insn))
3309 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3310 }
3311 PATTERN (insn) = new_body;
3312 old_set = single_set (insn);
3313
3314 /* First see if this insn remains valid when we
3315 make the change. If not, keep the INSN_CODE
3316 the same and let reload fit it up. */
3317 validate_change (insn, &SET_SRC (old_set), src, 1);
3318 validate_change (insn, &SET_DEST (old_set),
3319 ep->to_rtx, 1);
3320 if (! apply_change_group ())
3321 {
3322 SET_SRC (old_set) = src;
3323 SET_DEST (old_set) = ep->to_rtx;
3324 }
3325
3326 val = 1;
3327 goto done;
3328 }
3329 }
3330 #endif
3331
3332 /* In this case this insn isn't serving a useful purpose. We
3333 will delete it in reload_as_needed once we know that this
3334 elimination is, in fact, being done.
3335
3336 If REPLACE isn't set, we can't delete this insn, but needn't
3337 process it since it won't be used unless something changes. */
3338 if (replace)
3339 {
3340 delete_dead_insn (insn);
3341 return 1;
3342 }
3343 val = 1;
3344 goto done;
3345 }
3346 }
3347
3348 /* We allow one special case which happens to work on all machines we
3349 currently support: a single set with the source or a REG_EQUAL
3350 note being a PLUS of an eliminable register and a constant. */
3351 plus_src = plus_cst_src = 0;
3352 if (old_set && REG_P (SET_DEST (old_set)))
3353 {
3354 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3355 plus_src = SET_SRC (old_set);
3356 /* First see if the source is of the form (plus (...) CST). */
3357 if (plus_src
3358 && CONST_INT_P (XEXP (plus_src, 1)))
3359 plus_cst_src = plus_src;
3360 else if (REG_P (SET_SRC (old_set))
3361 || plus_src)
3362 {
3363 /* Otherwise, see if we have a REG_EQUAL note of the form
3364 (plus (...) CST). */
3365 rtx links;
3366 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3367 {
3368 if ((REG_NOTE_KIND (links) == REG_EQUAL
3369 || REG_NOTE_KIND (links) == REG_EQUIV)
3370 && GET_CODE (XEXP (links, 0)) == PLUS
3371 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3372 {
3373 plus_cst_src = XEXP (links, 0);
3374 break;
3375 }
3376 }
3377 }
3378
3379 /* Check that the first operand of the PLUS is a hard reg or
3380 the lowpart subreg of one. */
3381 if (plus_cst_src)
3382 {
3383 rtx reg = XEXP (plus_cst_src, 0);
3384 if (GET_CODE (reg) == SUBREG && subreg_lowpart_p (reg))
3385 reg = SUBREG_REG (reg);
3386
3387 if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3388 plus_cst_src = 0;
3389 }
3390 }
3391 if (plus_cst_src)
3392 {
3393 rtx reg = XEXP (plus_cst_src, 0);
3394 HOST_WIDE_INT offset = INTVAL (XEXP (plus_cst_src, 1));
3395
3396 if (GET_CODE (reg) == SUBREG)
3397 reg = SUBREG_REG (reg);
3398
3399 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3400 if (ep->from_rtx == reg && ep->can_eliminate)
3401 {
3402 rtx to_rtx = ep->to_rtx;
3403 offset += ep->offset;
3404 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src));
3405
3406 if (GET_CODE (XEXP (plus_cst_src, 0)) == SUBREG)
3407 to_rtx = gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0)),
3408 to_rtx);
3409 /* If we have a nonzero offset, and the source is already
3410 a simple REG, the following transformation would
3411 increase the cost of the insn by replacing a simple REG
3412 with (plus (reg sp) CST). So try only when we already
3413 had a PLUS before. */
3414 if (offset == 0 || plus_src)
3415 {
3416 rtx new_src = plus_constant (GET_MODE (to_rtx),
3417 to_rtx, offset);
3418
3419 new_body = old_body;
3420 if (! replace)
3421 {
3422 new_body = copy_insn (old_body);
3423 if (REG_NOTES (insn))
3424 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3425 }
3426 PATTERN (insn) = new_body;
3427 old_set = single_set (insn);
3428
3429 /* First see if this insn remains valid when we make the
3430 change. If not, try to replace the whole pattern with
3431 a simple set (this may help if the original insn was a
3432 PARALLEL that was only recognized as single_set due to
3433 REG_UNUSED notes). If this isn't valid either, keep
3434 the INSN_CODE the same and let reload fix it up. */
3435 if (!validate_change (insn, &SET_SRC (old_set), new_src, 0))
3436 {
3437 rtx new_pat = gen_rtx_SET (VOIDmode,
3438 SET_DEST (old_set), new_src);
3439
3440 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3441 SET_SRC (old_set) = new_src;
3442 }
3443 }
3444 else
3445 break;
3446
3447 val = 1;
3448 /* This can't have an effect on elimination offsets, so skip right
3449 to the end. */
3450 goto done;
3451 }
3452 }
3453
3454 /* Determine the effects of this insn on elimination offsets. */
3455 elimination_effects (old_body, VOIDmode);
3456
3457 /* Eliminate all eliminable registers occurring in operands that
3458 can be handled by reload. */
3459 extract_insn (insn);
3460 for (i = 0; i < recog_data.n_operands; i++)
3461 {
3462 orig_operand[i] = recog_data.operand[i];
3463 substed_operand[i] = recog_data.operand[i];
3464
3465 /* For an asm statement, every operand is eliminable. */
3466 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3467 {
3468 bool is_set_src, in_plus;
3469
3470 /* Check for setting a register that we know about. */
3471 if (recog_data.operand_type[i] != OP_IN
3472 && REG_P (orig_operand[i]))
3473 {
3474 /* If we are assigning to a register that can be eliminated, it
3475 must be as part of a PARALLEL, since the code above handles
3476 single SETs. We must indicate that we can no longer
3477 eliminate this reg. */
3478 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3479 ep++)
3480 if (ep->from_rtx == orig_operand[i])
3481 ep->can_eliminate = 0;
3482 }
3483
3484 /* Companion to the above plus substitution, we can allow
3485 invariants as the source of a plain move. */
3486 is_set_src = false;
3487 if (old_set
3488 && recog_data.operand_loc[i] == &SET_SRC (old_set))
3489 is_set_src = true;
3490 in_plus = false;
3491 if (plus_src
3492 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3493 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3494 in_plus = true;
3495
3496 substed_operand[i]
3497 = eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3498 replace ? insn : NULL_RTX,
3499 is_set_src || in_plus, false);
3500 if (substed_operand[i] != orig_operand[i])
3501 val = 1;
3502 /* Terminate the search in check_eliminable_occurrences at
3503 this point. */
3504 *recog_data.operand_loc[i] = 0;
3505
3506 /* If an output operand changed from a REG to a MEM and INSN is an
3507 insn, write a CLOBBER insn. */
3508 if (recog_data.operand_type[i] != OP_IN
3509 && REG_P (orig_operand[i])
3510 && MEM_P (substed_operand[i])
3511 && replace)
3512 emit_insn_after (gen_clobber (orig_operand[i]), insn);
3513 }
3514 }
3515
3516 for (i = 0; i < recog_data.n_dups; i++)
3517 *recog_data.dup_loc[i]
3518 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3519
3520 /* If any eliminable remain, they aren't eliminable anymore. */
3521 check_eliminable_occurrences (old_body);
3522
3523 /* Substitute the operands; the new values are in the substed_operand
3524 array. */
3525 for (i = 0; i < recog_data.n_operands; i++)
3526 *recog_data.operand_loc[i] = substed_operand[i];
3527 for (i = 0; i < recog_data.n_dups; i++)
3528 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3529
3530 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3531 re-recognize the insn. We do this in case we had a simple addition
3532 but now can do this as a load-address. This saves an insn in this
3533 common case.
3534 If re-recognition fails, the old insn code number will still be used,
3535 and some register operands may have changed into PLUS expressions.
3536 These will be handled by find_reloads by loading them into a register
3537 again. */
3538
3539 if (val)
3540 {
3541 /* If we aren't replacing things permanently and we changed something,
3542 make another copy to ensure that all the RTL is new. Otherwise
3543 things can go wrong if find_reload swaps commutative operands
3544 and one is inside RTL that has been copied while the other is not. */
3545 new_body = old_body;
3546 if (! replace)
3547 {
3548 new_body = copy_insn (old_body);
3549 if (REG_NOTES (insn))
3550 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3551 }
3552 PATTERN (insn) = new_body;
3553
3554 /* If we had a move insn but now we don't, rerecognize it. This will
3555 cause spurious re-recognition if the old move had a PARALLEL since
3556 the new one still will, but we can't call single_set without
3557 having put NEW_BODY into the insn and the re-recognition won't
3558 hurt in this rare case. */
3559 /* ??? Why this huge if statement - why don't we just rerecognize the
3560 thing always? */
3561 if (! insn_is_asm
3562 && old_set != 0
3563 && ((REG_P (SET_SRC (old_set))
3564 && (GET_CODE (new_body) != SET
3565 || !REG_P (SET_SRC (new_body))))
3566 /* If this was a load from or store to memory, compare
3567 the MEM in recog_data.operand to the one in the insn.
3568 If they are not equal, then rerecognize the insn. */
3569 || (old_set != 0
3570 && ((MEM_P (SET_SRC (old_set))
3571 && SET_SRC (old_set) != recog_data.operand[1])
3572 || (MEM_P (SET_DEST (old_set))
3573 && SET_DEST (old_set) != recog_data.operand[0])))
3574 /* If this was an add insn before, rerecognize. */
3575 || GET_CODE (SET_SRC (old_set)) == PLUS))
3576 {
3577 int new_icode = recog (PATTERN (insn), insn, 0);
3578 if (new_icode >= 0)
3579 INSN_CODE (insn) = new_icode;
3580 }
3581 }
3582
3583 /* Restore the old body. If there were any changes to it, we made a copy
3584 of it while the changes were still in place, so we'll correctly return
3585 a modified insn below. */
3586 if (! replace)
3587 {
3588 /* Restore the old body. */
3589 for (i = 0; i < recog_data.n_operands; i++)
3590 /* Restoring a top-level match_parallel would clobber the new_body
3591 we installed in the insn. */
3592 if (recog_data.operand_loc[i] != &PATTERN (insn))
3593 *recog_data.operand_loc[i] = orig_operand[i];
3594 for (i = 0; i < recog_data.n_dups; i++)
3595 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3596 }
3597
3598 /* Update all elimination pairs to reflect the status after the current
3599 insn. The changes we make were determined by the earlier call to
3600 elimination_effects.
3601
3602 We also detect cases where register elimination cannot be done,
3603 namely, if a register would be both changed and referenced outside a MEM
3604 in the resulting insn since such an insn is often undefined and, even if
3605 not, we cannot know what meaning will be given to it. Note that it is
3606 valid to have a register used in an address in an insn that changes it
3607 (presumably with a pre- or post-increment or decrement).
3608
3609 If anything changes, return nonzero. */
3610
3611 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3612 {
3613 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3614 ep->can_eliminate = 0;
3615
3616 ep->ref_outside_mem = 0;
3617
3618 if (ep->previous_offset != ep->offset)
3619 val = 1;
3620 }
3621
3622 done:
3623 /* If we changed something, perform elimination in REG_NOTES. This is
3624 needed even when REPLACE is zero because a REG_DEAD note might refer
3625 to a register that we eliminate and could cause a different number
3626 of spill registers to be needed in the final reload pass than in
3627 the pre-passes. */
3628 if (val && REG_NOTES (insn) != 0)
3629 REG_NOTES (insn)
3630 = eliminate_regs_1 (REG_NOTES (insn), VOIDmode, REG_NOTES (insn), true,
3631 false);
3632
3633 return val;
3634 }
3635
3636 /* Like eliminate_regs_in_insn, but only estimate costs for the use of the
3637 register allocator. INSN is the instruction we need to examine, we perform
3638 eliminations in its operands and record cases where eliminating a reg with
3639 an invariant equivalence would add extra cost. */
3640
3641 static void
3642 elimination_costs_in_insn (rtx_insn *insn)
3643 {
3644 int icode = recog_memoized (insn);
3645 rtx old_body = PATTERN (insn);
3646 int insn_is_asm = asm_noperands (old_body) >= 0;
3647 rtx old_set = single_set (insn);
3648 int i;
3649 rtx orig_operand[MAX_RECOG_OPERANDS];
3650 rtx orig_dup[MAX_RECOG_OPERANDS];
3651 struct elim_table *ep;
3652 rtx plus_src, plus_cst_src;
3653 bool sets_reg_p;
3654
3655 if (! insn_is_asm && icode < 0)
3656 {
3657 gcc_assert (DEBUG_INSN_P (insn)
3658 || GET_CODE (PATTERN (insn)) == USE
3659 || GET_CODE (PATTERN (insn)) == CLOBBER
3660 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
3661 return;
3662 }
3663
3664 if (old_set != 0 && REG_P (SET_DEST (old_set))
3665 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3666 {
3667 /* Check for setting an eliminable register. */
3668 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3669 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3670 return;
3671 }
3672
3673 /* We allow one special case which happens to work on all machines we
3674 currently support: a single set with the source or a REG_EQUAL
3675 note being a PLUS of an eliminable register and a constant. */
3676 plus_src = plus_cst_src = 0;
3677 sets_reg_p = false;
3678 if (old_set && REG_P (SET_DEST (old_set)))
3679 {
3680 sets_reg_p = true;
3681 if (GET_CODE (SET_SRC (old_set)) == PLUS)
3682 plus_src = SET_SRC (old_set);
3683 /* First see if the source is of the form (plus (...) CST). */
3684 if (plus_src
3685 && CONST_INT_P (XEXP (plus_src, 1)))
3686 plus_cst_src = plus_src;
3687 else if (REG_P (SET_SRC (old_set))
3688 || plus_src)
3689 {
3690 /* Otherwise, see if we have a REG_EQUAL note of the form
3691 (plus (...) CST). */
3692 rtx links;
3693 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
3694 {
3695 if ((REG_NOTE_KIND (links) == REG_EQUAL
3696 || REG_NOTE_KIND (links) == REG_EQUIV)
3697 && GET_CODE (XEXP (links, 0)) == PLUS
3698 && CONST_INT_P (XEXP (XEXP (links, 0), 1)))
3699 {
3700 plus_cst_src = XEXP (links, 0);
3701 break;
3702 }
3703 }
3704 }
3705 }
3706
3707 /* Determine the effects of this insn on elimination offsets. */
3708 elimination_effects (old_body, VOIDmode);
3709
3710 /* Eliminate all eliminable registers occurring in operands that
3711 can be handled by reload. */
3712 extract_insn (insn);
3713 for (i = 0; i < recog_data.n_dups; i++)
3714 orig_dup[i] = *recog_data.dup_loc[i];
3715
3716 for (i = 0; i < recog_data.n_operands; i++)
3717 {
3718 orig_operand[i] = recog_data.operand[i];
3719
3720 /* For an asm statement, every operand is eliminable. */
3721 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3722 {
3723 bool is_set_src, in_plus;
3724
3725 /* Check for setting a register that we know about. */
3726 if (recog_data.operand_type[i] != OP_IN
3727 && REG_P (orig_operand[i]))
3728 {
3729 /* If we are assigning to a register that can be eliminated, it
3730 must be as part of a PARALLEL, since the code above handles
3731 single SETs. We must indicate that we can no longer
3732 eliminate this reg. */
3733 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3734 ep++)
3735 if (ep->from_rtx == orig_operand[i])
3736 ep->can_eliminate = 0;
3737 }
3738
3739 /* Companion to the above plus substitution, we can allow
3740 invariants as the source of a plain move. */
3741 is_set_src = false;
3742 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set))
3743 is_set_src = true;
3744 if (is_set_src && !sets_reg_p)
3745 note_reg_elim_costly (SET_SRC (old_set), insn);
3746 in_plus = false;
3747 if (plus_src && sets_reg_p
3748 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)
3749 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)))
3750 in_plus = true;
3751
3752 eliminate_regs_1 (recog_data.operand[i], VOIDmode,
3753 NULL_RTX,
3754 is_set_src || in_plus, true);
3755 /* Terminate the search in check_eliminable_occurrences at
3756 this point. */
3757 *recog_data.operand_loc[i] = 0;
3758 }
3759 }
3760
3761 for (i = 0; i < recog_data.n_dups; i++)
3762 *recog_data.dup_loc[i]
3763 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3764
3765 /* If any eliminable remain, they aren't eliminable anymore. */
3766 check_eliminable_occurrences (old_body);
3767
3768 /* Restore the old body. */
3769 for (i = 0; i < recog_data.n_operands; i++)
3770 *recog_data.operand_loc[i] = orig_operand[i];
3771 for (i = 0; i < recog_data.n_dups; i++)
3772 *recog_data.dup_loc[i] = orig_dup[i];
3773
3774 /* Update all elimination pairs to reflect the status after the current
3775 insn. The changes we make were determined by the earlier call to
3776 elimination_effects. */
3777
3778 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3779 {
3780 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3781 ep->can_eliminate = 0;
3782
3783 ep->ref_outside_mem = 0;
3784 }
3785
3786 return;
3787 }
3788
3789 /* Loop through all elimination pairs.
3790 Recalculate the number not at initial offset.
3791
3792 Compute the maximum offset (minimum offset if the stack does not
3793 grow downward) for each elimination pair. */
3794
3795 static void
3796 update_eliminable_offsets (void)
3797 {
3798 struct elim_table *ep;
3799
3800 num_not_at_initial_offset = 0;
3801 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3802 {
3803 ep->previous_offset = ep->offset;
3804 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3805 num_not_at_initial_offset++;
3806 }
3807 }
3808
3809 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3810 replacement we currently believe is valid, mark it as not eliminable if X
3811 modifies DEST in any way other than by adding a constant integer to it.
3812
3813 If DEST is the frame pointer, we do nothing because we assume that
3814 all assignments to the hard frame pointer are nonlocal gotos and are being
3815 done at a time when they are valid and do not disturb anything else.
3816 Some machines want to eliminate a fake argument pointer with either the
3817 frame or stack pointer. Assignments to the hard frame pointer must not
3818 prevent this elimination.
3819
3820 Called via note_stores from reload before starting its passes to scan
3821 the insns of the function. */
3822
3823 static void
3824 mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
3825 {
3826 unsigned int i;
3827
3828 /* A SUBREG of a hard register here is just changing its mode. We should
3829 not see a SUBREG of an eliminable hard register, but check just in
3830 case. */
3831 if (GET_CODE (dest) == SUBREG)
3832 dest = SUBREG_REG (dest);
3833
3834 if (dest == hard_frame_pointer_rtx)
3835 return;
3836
3837 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3838 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3839 && (GET_CODE (x) != SET
3840 || GET_CODE (SET_SRC (x)) != PLUS
3841 || XEXP (SET_SRC (x), 0) != dest
3842 || !CONST_INT_P (XEXP (SET_SRC (x), 1))))
3843 {
3844 reg_eliminate[i].can_eliminate_previous
3845 = reg_eliminate[i].can_eliminate = 0;
3846 num_eliminable--;
3847 }
3848 }
3849
3850 /* Verify that the initial elimination offsets did not change since the
3851 last call to set_initial_elim_offsets. This is used to catch cases
3852 where something illegal happened during reload_as_needed that could
3853 cause incorrect code to be generated if we did not check for it. */
3854
3855 static bool
3856 verify_initial_elim_offsets (void)
3857 {
3858 HOST_WIDE_INT t;
3859
3860 if (!num_eliminable)
3861 return true;
3862
3863 #ifdef ELIMINABLE_REGS
3864 {
3865 struct elim_table *ep;
3866
3867 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3868 {
3869 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3870 if (t != ep->initial_offset)
3871 return false;
3872 }
3873 }
3874 #else
3875 INITIAL_FRAME_POINTER_OFFSET (t);
3876 if (t != reg_eliminate[0].initial_offset)
3877 return false;
3878 #endif
3879
3880 return true;
3881 }
3882
3883 /* Reset all offsets on eliminable registers to their initial values. */
3884
3885 static void
3886 set_initial_elim_offsets (void)
3887 {
3888 struct elim_table *ep = reg_eliminate;
3889
3890 #ifdef ELIMINABLE_REGS
3891 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3892 {
3893 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3894 ep->previous_offset = ep->offset = ep->initial_offset;
3895 }
3896 #else
3897 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3898 ep->previous_offset = ep->offset = ep->initial_offset;
3899 #endif
3900
3901 num_not_at_initial_offset = 0;
3902 }
3903
3904 /* Subroutine of set_initial_label_offsets called via for_each_eh_label. */
3905
3906 static void
3907 set_initial_eh_label_offset (rtx label)
3908 {
3909 set_label_offsets (label, NULL, 1);
3910 }
3911
3912 /* Initialize the known label offsets.
3913 Set a known offset for each forced label to be at the initial offset
3914 of each elimination. We do this because we assume that all
3915 computed jumps occur from a location where each elimination is
3916 at its initial offset.
3917 For all other labels, show that we don't know the offsets. */
3918
3919 static void
3920 set_initial_label_offsets (void)
3921 {
3922 memset (offsets_known_at, 0, num_labels);
3923
3924 for (rtx_insn_list *x = forced_labels; x; x = x->next ())
3925 if (x->insn ())
3926 set_label_offsets (x->insn (), NULL, 1);
3927
3928 for (rtx_insn_list *x = nonlocal_goto_handler_labels; x; x = x->next ())
3929 if (x->insn ())
3930 set_label_offsets (x->insn (), NULL, 1);
3931
3932 for_each_eh_label (set_initial_eh_label_offset);
3933 }
3934
3935 /* Set all elimination offsets to the known values for the code label given
3936 by INSN. */
3937
3938 static void
3939 set_offsets_for_label (rtx_insn *insn)
3940 {
3941 unsigned int i;
3942 int label_nr = CODE_LABEL_NUMBER (insn);
3943 struct elim_table *ep;
3944
3945 num_not_at_initial_offset = 0;
3946 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3947 {
3948 ep->offset = ep->previous_offset
3949 = offsets_at[label_nr - first_label_num][i];
3950 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3951 num_not_at_initial_offset++;
3952 }
3953 }
3954
3955 /* See if anything that happened changes which eliminations are valid.
3956 For example, on the SPARC, whether or not the frame pointer can
3957 be eliminated can depend on what registers have been used. We need
3958 not check some conditions again (such as flag_omit_frame_pointer)
3959 since they can't have changed. */
3960
3961 static void
3962 update_eliminables (HARD_REG_SET *pset)
3963 {
3964 int previous_frame_pointer_needed = frame_pointer_needed;
3965 struct elim_table *ep;
3966
3967 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3968 if ((ep->from == HARD_FRAME_POINTER_REGNUM
3969 && targetm.frame_pointer_required ())
3970 #ifdef ELIMINABLE_REGS
3971 || ! targetm.can_eliminate (ep->from, ep->to)
3972 #endif
3973 )
3974 ep->can_eliminate = 0;
3975
3976 /* Look for the case where we have discovered that we can't replace
3977 register A with register B and that means that we will now be
3978 trying to replace register A with register C. This means we can
3979 no longer replace register C with register B and we need to disable
3980 such an elimination, if it exists. This occurs often with A == ap,
3981 B == sp, and C == fp. */
3982
3983 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3984 {
3985 struct elim_table *op;
3986 int new_to = -1;
3987
3988 if (! ep->can_eliminate && ep->can_eliminate_previous)
3989 {
3990 /* Find the current elimination for ep->from, if there is a
3991 new one. */
3992 for (op = reg_eliminate;
3993 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3994 if (op->from == ep->from && op->can_eliminate)
3995 {
3996 new_to = op->to;
3997 break;
3998 }
3999
4000 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
4001 disable it. */
4002 for (op = reg_eliminate;
4003 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
4004 if (op->from == new_to && op->to == ep->to)
4005 op->can_eliminate = 0;
4006 }
4007 }
4008
4009 /* See if any registers that we thought we could eliminate the previous
4010 time are no longer eliminable. If so, something has changed and we
4011 must spill the register. Also, recompute the number of eliminable
4012 registers and see if the frame pointer is needed; it is if there is
4013 no elimination of the frame pointer that we can perform. */
4014
4015 frame_pointer_needed = 1;
4016 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4017 {
4018 if (ep->can_eliminate
4019 && ep->from == FRAME_POINTER_REGNUM
4020 && ep->to != HARD_FRAME_POINTER_REGNUM
4021 && (! SUPPORTS_STACK_ALIGNMENT
4022 || ! crtl->stack_realign_needed))
4023 frame_pointer_needed = 0;
4024
4025 if (! ep->can_eliminate && ep->can_eliminate_previous)
4026 {
4027 ep->can_eliminate_previous = 0;
4028 SET_HARD_REG_BIT (*pset, ep->from);
4029 num_eliminable--;
4030 }
4031 }
4032
4033 /* If we didn't need a frame pointer last time, but we do now, spill
4034 the hard frame pointer. */
4035 if (frame_pointer_needed && ! previous_frame_pointer_needed)
4036 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
4037 }
4038
4039 /* Call update_eliminables an spill any registers we can't eliminate anymore.
4040 Return true iff a register was spilled. */
4041
4042 static bool
4043 update_eliminables_and_spill (void)
4044 {
4045 int i;
4046 bool did_spill = false;
4047 HARD_REG_SET to_spill;
4048 CLEAR_HARD_REG_SET (to_spill);
4049 update_eliminables (&to_spill);
4050 AND_COMPL_HARD_REG_SET (used_spill_regs, to_spill);
4051
4052 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4053 if (TEST_HARD_REG_BIT (to_spill, i))
4054 {
4055 spill_hard_reg (i, 1);
4056 did_spill = true;
4057
4058 /* Regardless of the state of spills, if we previously had
4059 a register that we thought we could eliminate, but now can
4060 not eliminate, we must run another pass.
4061
4062 Consider pseudos which have an entry in reg_equiv_* which
4063 reference an eliminable register. We must make another pass
4064 to update reg_equiv_* so that we do not substitute in the
4065 old value from when we thought the elimination could be
4066 performed. */
4067 }
4068 return did_spill;
4069 }
4070
4071 /* Return true if X is used as the target register of an elimination. */
4072
4073 bool
4074 elimination_target_reg_p (rtx x)
4075 {
4076 struct elim_table *ep;
4077
4078 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4079 if (ep->to_rtx == x && ep->can_eliminate)
4080 return true;
4081
4082 return false;
4083 }
4084
4085 /* Initialize the table of registers to eliminate.
4086 Pre-condition: global flag frame_pointer_needed has been set before
4087 calling this function. */
4088
4089 static void
4090 init_elim_table (void)
4091 {
4092 struct elim_table *ep;
4093 #ifdef ELIMINABLE_REGS
4094 const struct elim_table_1 *ep1;
4095 #endif
4096
4097 if (!reg_eliminate)
4098 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS);
4099
4100 num_eliminable = 0;
4101
4102 #ifdef ELIMINABLE_REGS
4103 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
4104 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
4105 {
4106 ep->from = ep1->from;
4107 ep->to = ep1->to;
4108 ep->can_eliminate = ep->can_eliminate_previous
4109 = (targetm.can_eliminate (ep->from, ep->to)
4110 && ! (ep->to == STACK_POINTER_REGNUM
4111 && frame_pointer_needed
4112 && (! SUPPORTS_STACK_ALIGNMENT
4113 || ! stack_realign_fp)));
4114 }
4115 #else
4116 reg_eliminate[0].from = reg_eliminate_1[0].from;
4117 reg_eliminate[0].to = reg_eliminate_1[0].to;
4118 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
4119 = ! frame_pointer_needed;
4120 #endif
4121
4122 /* Count the number of eliminable registers and build the FROM and TO
4123 REG rtx's. Note that code in gen_rtx_REG will cause, e.g.,
4124 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
4125 We depend on this. */
4126 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
4127 {
4128 num_eliminable += ep->can_eliminate;
4129 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
4130 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
4131 }
4132 }
4133
4134 /* Find all the pseudo registers that didn't get hard regs
4135 but do have known equivalent constants or memory slots.
4136 These include parameters (known equivalent to parameter slots)
4137 and cse'd or loop-moved constant memory addresses.
4138
4139 Record constant equivalents in reg_equiv_constant
4140 so they will be substituted by find_reloads.
4141 Record memory equivalents in reg_mem_equiv so they can
4142 be substituted eventually by altering the REG-rtx's. */
4143
4144 static void
4145 init_eliminable_invariants (rtx_insn *first, bool do_subregs)
4146 {
4147 int i;
4148 rtx_insn *insn;
4149
4150 grow_reg_equivs ();
4151 if (do_subregs)
4152 reg_max_ref_width = XCNEWVEC (unsigned int, max_regno);
4153 else
4154 reg_max_ref_width = NULL;
4155
4156 num_eliminable_invariants = 0;
4157
4158 first_label_num = get_first_label_num ();
4159 num_labels = max_label_num () - first_label_num;
4160
4161 /* Allocate the tables used to store offset information at labels. */
4162 offsets_known_at = XNEWVEC (char, num_labels);
4163 offsets_at = (HOST_WIDE_INT (*)[NUM_ELIMINABLE_REGS]) xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (HOST_WIDE_INT));
4164
4165 /* Look for REG_EQUIV notes; record what each pseudo is equivalent
4166 to. If DO_SUBREGS is true, also find all paradoxical subregs and
4167 find largest such for each pseudo. FIRST is the head of the insn
4168 list. */
4169
4170 for (insn = first; insn; insn = NEXT_INSN (insn))
4171 {
4172 rtx set = single_set (insn);
4173
4174 /* We may introduce USEs that we want to remove at the end, so
4175 we'll mark them with QImode. Make sure there are no
4176 previously-marked insns left by say regmove. */
4177 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
4178 && GET_MODE (insn) != VOIDmode)
4179 PUT_MODE (insn, VOIDmode);
4180
4181 if (do_subregs && NONDEBUG_INSN_P (insn))
4182 scan_paradoxical_subregs (PATTERN (insn));
4183
4184 if (set != 0 && REG_P (SET_DEST (set)))
4185 {
4186 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
4187 rtx x;
4188
4189 if (! note)
4190 continue;
4191
4192 i = REGNO (SET_DEST (set));
4193 x = XEXP (note, 0);
4194
4195 if (i <= LAST_VIRTUAL_REGISTER)
4196 continue;
4197
4198 /* If flag_pic and we have constant, verify it's legitimate. */
4199 if (!CONSTANT_P (x)
4200 || !flag_pic || LEGITIMATE_PIC_OPERAND_P (x))
4201 {
4202 /* It can happen that a REG_EQUIV note contains a MEM
4203 that is not a legitimate memory operand. As later
4204 stages of reload assume that all addresses found
4205 in the reg_equiv_* arrays were originally legitimate,
4206 we ignore such REG_EQUIV notes. */
4207 if (memory_operand (x, VOIDmode))
4208 {
4209 /* Always unshare the equivalence, so we can
4210 substitute into this insn without touching the
4211 equivalence. */
4212 reg_equiv_memory_loc (i) = copy_rtx (x);
4213 }
4214 else if (function_invariant_p (x))
4215 {
4216 machine_mode mode;
4217
4218 mode = GET_MODE (SET_DEST (set));
4219 if (GET_CODE (x) == PLUS)
4220 {
4221 /* This is PLUS of frame pointer and a constant,
4222 and might be shared. Unshare it. */
4223 reg_equiv_invariant (i) = copy_rtx (x);
4224 num_eliminable_invariants++;
4225 }
4226 else if (x == frame_pointer_rtx || x == arg_pointer_rtx)
4227 {
4228 reg_equiv_invariant (i) = x;
4229 num_eliminable_invariants++;
4230 }
4231 else if (targetm.legitimate_constant_p (mode, x))
4232 reg_equiv_constant (i) = x;
4233 else
4234 {
4235 reg_equiv_memory_loc (i) = force_const_mem (mode, x);
4236 if (! reg_equiv_memory_loc (i))
4237 reg_equiv_init (i) = NULL_RTX;
4238 }
4239 }
4240 else
4241 {
4242 reg_equiv_init (i) = NULL_RTX;
4243 continue;
4244 }
4245 }
4246 else
4247 reg_equiv_init (i) = NULL_RTX;
4248 }
4249 }
4250
4251 if (dump_file)
4252 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4253 if (reg_equiv_init (i))
4254 {
4255 fprintf (dump_file, "init_insns for %u: ", i);
4256 print_inline_rtx (dump_file, reg_equiv_init (i), 20);
4257 fprintf (dump_file, "\n");
4258 }
4259 }
4260
4261 /* Indicate that we no longer have known memory locations or constants.
4262 Free all data involved in tracking these. */
4263
4264 static void
4265 free_reg_equiv (void)
4266 {
4267 int i;
4268
4269 free (offsets_known_at);
4270 free (offsets_at);
4271 offsets_at = 0;
4272 offsets_known_at = 0;
4273
4274 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4275 if (reg_equiv_alt_mem_list (i))
4276 free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
4277 vec_free (reg_equivs);
4278 }
4279 \f
4280 /* Kick all pseudos out of hard register REGNO.
4281
4282 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
4283 because we found we can't eliminate some register. In the case, no pseudos
4284 are allowed to be in the register, even if they are only in a block that
4285 doesn't require spill registers, unlike the case when we are spilling this
4286 hard reg to produce another spill register.
4287
4288 Return nonzero if any pseudos needed to be kicked out. */
4289
4290 static void
4291 spill_hard_reg (unsigned int regno, int cant_eliminate)
4292 {
4293 int i;
4294
4295 if (cant_eliminate)
4296 {
4297 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
4298 df_set_regs_ever_live (regno, true);
4299 }
4300
4301 /* Spill every pseudo reg that was allocated to this reg
4302 or to something that overlaps this reg. */
4303
4304 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4305 if (reg_renumber[i] >= 0
4306 && (unsigned int) reg_renumber[i] <= regno
4307 && end_hard_regno (PSEUDO_REGNO_MODE (i), reg_renumber[i]) > regno)
4308 SET_REGNO_REG_SET (&spilled_pseudos, i);
4309 }
4310
4311 /* After find_reload_regs has been run for all insn that need reloads,
4312 and/or spill_hard_regs was called, this function is used to actually
4313 spill pseudo registers and try to reallocate them. It also sets up the
4314 spill_regs array for use by choose_reload_regs. */
4315
4316 static int
4317 finish_spills (int global)
4318 {
4319 struct insn_chain *chain;
4320 int something_changed = 0;
4321 unsigned i;
4322 reg_set_iterator rsi;
4323
4324 /* Build the spill_regs array for the function. */
4325 /* If there are some registers still to eliminate and one of the spill regs
4326 wasn't ever used before, additional stack space may have to be
4327 allocated to store this register. Thus, we may have changed the offset
4328 between the stack and frame pointers, so mark that something has changed.
4329
4330 One might think that we need only set VAL to 1 if this is a call-used
4331 register. However, the set of registers that must be saved by the
4332 prologue is not identical to the call-used set. For example, the
4333 register used by the call insn for the return PC is a call-used register,
4334 but must be saved by the prologue. */
4335
4336 n_spills = 0;
4337 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4338 if (TEST_HARD_REG_BIT (used_spill_regs, i))
4339 {
4340 spill_reg_order[i] = n_spills;
4341 spill_regs[n_spills++] = i;
4342 if (num_eliminable && ! df_regs_ever_live_p (i))
4343 something_changed = 1;
4344 df_set_regs_ever_live (i, true);
4345 }
4346 else
4347 spill_reg_order[i] = -1;
4348
4349 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)
4350 if (! ira_conflicts_p || reg_renumber[i] >= 0)
4351 {
4352 /* Record the current hard register the pseudo is allocated to
4353 in pseudo_previous_regs so we avoid reallocating it to the
4354 same hard reg in a later pass. */
4355 gcc_assert (reg_renumber[i] >= 0);
4356
4357 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
4358 /* Mark it as no longer having a hard register home. */
4359 reg_renumber[i] = -1;
4360 if (ira_conflicts_p)
4361 /* Inform IRA about the change. */
4362 ira_mark_allocation_change (i);
4363 /* We will need to scan everything again. */
4364 something_changed = 1;
4365 }
4366
4367 /* Retry global register allocation if possible. */
4368 if (global && ira_conflicts_p)
4369 {
4370 unsigned int n;
4371
4372 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
4373 /* For every insn that needs reloads, set the registers used as spill
4374 regs in pseudo_forbidden_regs for every pseudo live across the
4375 insn. */
4376 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
4377 {
4378 EXECUTE_IF_SET_IN_REG_SET
4379 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)
4380 {
4381 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4382 chain->used_spill_regs);
4383 }
4384 EXECUTE_IF_SET_IN_REG_SET
4385 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)
4386 {
4387 IOR_HARD_REG_SET (pseudo_forbidden_regs[i],
4388 chain->used_spill_regs);
4389 }
4390 }
4391
4392 /* Retry allocating the pseudos spilled in IRA and the
4393 reload. For each reg, merge the various reg sets that
4394 indicate which hard regs can't be used, and call
4395 ira_reassign_pseudos. */
4396 for (n = 0, i = FIRST_PSEUDO_REGISTER; i < (unsigned) max_regno; i++)
4397 if (reg_old_renumber[i] != reg_renumber[i])
4398 {
4399 if (reg_renumber[i] < 0)
4400 temp_pseudo_reg_arr[n++] = i;
4401 else
4402 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
4403 }
4404 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n,
4405 bad_spill_regs_global,
4406 pseudo_forbidden_regs, pseudo_previous_regs,
4407 &spilled_pseudos))
4408 something_changed = 1;
4409 }
4410 /* Fix up the register information in the insn chain.
4411 This involves deleting those of the spilled pseudos which did not get
4412 a new hard register home from the live_{before,after} sets. */
4413 for (chain = reload_insn_chain; chain; chain = chain->next)
4414 {
4415 HARD_REG_SET used_by_pseudos;
4416 HARD_REG_SET used_by_pseudos2;
4417
4418 if (! ira_conflicts_p)
4419 {
4420 /* Don't do it for IRA because IRA and the reload still can
4421 assign hard registers to the spilled pseudos on next
4422 reload iterations. */
4423 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
4424 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
4425 }
4426 /* Mark any unallocated hard regs as available for spills. That
4427 makes inheritance work somewhat better. */
4428 if (chain->need_reload)
4429 {
4430 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
4431 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
4432 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
4433
4434 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
4435 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
4436 /* Value of chain->used_spill_regs from previous iteration
4437 may be not included in the value calculated here because
4438 of possible removing caller-saves insns (see function
4439 delete_caller_save_insns. */
4440 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
4441 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
4442 }
4443 }
4444
4445 CLEAR_REG_SET (&changed_allocation_pseudos);
4446 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
4447 for (i = FIRST_PSEUDO_REGISTER; i < (unsigned)max_regno; i++)
4448 {
4449 int regno = reg_renumber[i];
4450 if (reg_old_renumber[i] == regno)
4451 continue;
4452
4453 SET_REGNO_REG_SET (&changed_allocation_pseudos, i);
4454
4455 alter_reg (i, reg_old_renumber[i], false);
4456 reg_old_renumber[i] = regno;
4457 if (dump_file)
4458 {
4459 if (regno == -1)
4460 fprintf (dump_file, " Register %d now on stack.\n\n", i);
4461 else
4462 fprintf (dump_file, " Register %d now in %d.\n\n",
4463 i, reg_renumber[i]);
4464 }
4465 }
4466
4467 return something_changed;
4468 }
4469 \f
4470 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
4471
4472 static void
4473 scan_paradoxical_subregs (rtx x)
4474 {
4475 int i;
4476 const char *fmt;
4477 enum rtx_code code = GET_CODE (x);
4478
4479 switch (code)
4480 {
4481 case REG:
4482 case CONST:
4483 case SYMBOL_REF:
4484 case LABEL_REF:
4485 CASE_CONST_ANY:
4486 case CC0:
4487 case PC:
4488 case USE:
4489 case CLOBBER:
4490 return;
4491
4492 case SUBREG:
4493 if (REG_P (SUBREG_REG (x))
4494 && (GET_MODE_SIZE (GET_MODE (x))
4495 > reg_max_ref_width[REGNO (SUBREG_REG (x))]))
4496 {
4497 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4498 = GET_MODE_SIZE (GET_MODE (x));
4499 mark_home_live_1 (REGNO (SUBREG_REG (x)), GET_MODE (x));
4500 }
4501 return;
4502
4503 default:
4504 break;
4505 }
4506
4507 fmt = GET_RTX_FORMAT (code);
4508 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4509 {
4510 if (fmt[i] == 'e')
4511 scan_paradoxical_subregs (XEXP (x, i));
4512 else if (fmt[i] == 'E')
4513 {
4514 int j;
4515 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4516 scan_paradoxical_subregs (XVECEXP (x, i, j));
4517 }
4518 }
4519 }
4520
4521 /* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload.
4522 If *OP_PTR is a paradoxical subreg, try to remove that subreg
4523 and apply the corresponding narrowing subreg to *OTHER_PTR.
4524 Return true if the operands were changed, false otherwise. */
4525
4526 static bool
4527 strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr)
4528 {
4529 rtx op, inner, other, tem;
4530
4531 op = *op_ptr;
4532 if (!paradoxical_subreg_p (op))
4533 return false;
4534 inner = SUBREG_REG (op);
4535
4536 other = *other_ptr;
4537 tem = gen_lowpart_common (GET_MODE (inner), other);
4538 if (!tem)
4539 return false;
4540
4541 /* If the lowpart operation turned a hard register into a subreg,
4542 rather than simplifying it to another hard register, then the
4543 mode change cannot be properly represented. For example, OTHER
4544 might be valid in its current mode, but not in the new one. */
4545 if (GET_CODE (tem) == SUBREG
4546 && REG_P (other)
4547 && HARD_REGISTER_P (other))
4548 return false;
4549
4550 *op_ptr = inner;
4551 *other_ptr = tem;
4552 return true;
4553 }
4554 \f
4555 /* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note,
4556 examine all of the reload insns between PREV and NEXT exclusive, and
4557 annotate all that may trap. */
4558
4559 static void
4560 fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4561 {
4562 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
4563 if (note == NULL)
4564 return;
4565 if (!insn_could_throw_p (insn))
4566 remove_note (insn, note);
4567 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next);
4568 }
4569
4570 /* Reload pseudo-registers into hard regs around each insn as needed.
4571 Additional register load insns are output before the insn that needs it
4572 and perhaps store insns after insns that modify the reloaded pseudo reg.
4573
4574 reg_last_reload_reg and reg_reloaded_contents keep track of
4575 which registers are already available in reload registers.
4576 We update these for the reloads that we perform,
4577 as the insns are scanned. */
4578
4579 static void
4580 reload_as_needed (int live_known)
4581 {
4582 struct insn_chain *chain;
4583 #if defined (AUTO_INC_DEC)
4584 int i;
4585 #endif
4586 rtx_note *marker;
4587
4588 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx);
4589 memset (spill_reg_store, 0, sizeof spill_reg_store);
4590 reg_last_reload_reg = XCNEWVEC (rtx, max_regno);
4591 INIT_REG_SET (&reg_has_output_reload);
4592 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4593 CLEAR_HARD_REG_SET (reg_reloaded_call_part_clobbered);
4594
4595 set_initial_elim_offsets ();
4596
4597 /* Generate a marker insn that we will move around. */
4598 marker = emit_note (NOTE_INSN_DELETED);
4599 unlink_insn_chain (marker, marker);
4600
4601 for (chain = reload_insn_chain; chain; chain = chain->next)
4602 {
4603 rtx_insn *prev = 0;
4604 rtx_insn *insn = chain->insn;
4605 rtx_insn *old_next = NEXT_INSN (insn);
4606 #ifdef AUTO_INC_DEC
4607 rtx_insn *old_prev = PREV_INSN (insn);
4608 #endif
4609
4610 if (will_delete_init_insn_p (insn))
4611 continue;
4612
4613 /* If we pass a label, copy the offsets from the label information
4614 into the current offsets of each elimination. */
4615 if (LABEL_P (insn))
4616 set_offsets_for_label (insn);
4617
4618 else if (INSN_P (insn))
4619 {
4620 regset_head regs_to_forget;
4621 INIT_REG_SET (&regs_to_forget);
4622 note_stores (PATTERN (insn), forget_old_reloads_1, &regs_to_forget);
4623
4624 /* If this is a USE and CLOBBER of a MEM, ensure that any
4625 references to eliminable registers have been removed. */
4626
4627 if ((GET_CODE (PATTERN (insn)) == USE
4628 || GET_CODE (PATTERN (insn)) == CLOBBER)
4629 && MEM_P (XEXP (PATTERN (insn), 0)))
4630 XEXP (XEXP (PATTERN (insn), 0), 0)
4631 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4632 GET_MODE (XEXP (PATTERN (insn), 0)),
4633 NULL_RTX);
4634
4635 /* If we need to do register elimination processing, do so.
4636 This might delete the insn, in which case we are done. */
4637 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
4638 {
4639 eliminate_regs_in_insn (insn, 1);
4640 if (NOTE_P (insn))
4641 {
4642 update_eliminable_offsets ();
4643 CLEAR_REG_SET (&regs_to_forget);
4644 continue;
4645 }
4646 }
4647
4648 /* If need_elim is nonzero but need_reload is zero, one might think
4649 that we could simply set n_reloads to 0. However, find_reloads
4650 could have done some manipulation of the insn (such as swapping
4651 commutative operands), and these manipulations are lost during
4652 the first pass for every insn that needs register elimination.
4653 So the actions of find_reloads must be redone here. */
4654
4655 if (! chain->need_elim && ! chain->need_reload
4656 && ! chain->need_operand_change)
4657 n_reloads = 0;
4658 /* First find the pseudo regs that must be reloaded for this insn.
4659 This info is returned in the tables reload_... (see reload.h).
4660 Also modify the body of INSN by substituting RELOAD
4661 rtx's for those pseudo regs. */
4662 else
4663 {
4664 CLEAR_REG_SET (&reg_has_output_reload);
4665 CLEAR_HARD_REG_SET (reg_is_output_reload);
4666
4667 find_reloads (insn, 1, spill_indirect_levels, live_known,
4668 spill_reg_order);
4669 }
4670
4671 if (n_reloads > 0)
4672 {
4673 rtx_insn *next = NEXT_INSN (insn);
4674
4675 /* ??? PREV can get deleted by reload inheritance.
4676 Work around this by emitting a marker note. */
4677 prev = PREV_INSN (insn);
4678 reorder_insns_nobb (marker, marker, prev);
4679
4680 /* Now compute which reload regs to reload them into. Perhaps
4681 reusing reload regs from previous insns, or else output
4682 load insns to reload them. Maybe output store insns too.
4683 Record the choices of reload reg in reload_reg_rtx. */
4684 choose_reload_regs (chain);
4685
4686 /* Generate the insns to reload operands into or out of
4687 their reload regs. */
4688 emit_reload_insns (chain);
4689
4690 /* Substitute the chosen reload regs from reload_reg_rtx
4691 into the insn's body (or perhaps into the bodies of other
4692 load and store insn that we just made for reloading
4693 and that we moved the structure into). */
4694 subst_reloads (insn);
4695
4696 prev = PREV_INSN (marker);
4697 unlink_insn_chain (marker, marker);
4698
4699 /* Adjust the exception region notes for loads and stores. */
4700 if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
4701 fixup_eh_region_note (insn, prev, next);
4702
4703 /* Adjust the location of REG_ARGS_SIZE. */
4704 rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4705 if (p)
4706 {
4707 remove_note (insn, p);
4708 fixup_args_size_notes (prev, PREV_INSN (next),
4709 INTVAL (XEXP (p, 0)));
4710 }
4711
4712 /* If this was an ASM, make sure that all the reload insns
4713 we have generated are valid. If not, give an error
4714 and delete them. */
4715 if (asm_noperands (PATTERN (insn)) >= 0)
4716 for (rtx_insn *p = NEXT_INSN (prev);
4717 p != next;
4718 p = NEXT_INSN (p))
4719 if (p != insn && INSN_P (p)
4720 && GET_CODE (PATTERN (p)) != USE
4721 && (recog_memoized (p) < 0
4722 || (extract_insn (p),
4723 !(constrain_operands (1,
4724 get_enabled_alternatives (p))))))
4725 {
4726 error_for_asm (insn,
4727 "%<asm%> operand requires "
4728 "impossible reload");
4729 delete_insn (p);
4730 }
4731 }
4732
4733 if (num_eliminable && chain->need_elim)
4734 update_eliminable_offsets ();
4735
4736 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4737 is no longer validly lying around to save a future reload.
4738 Note that this does not detect pseudos that were reloaded
4739 for this insn in order to be stored in
4740 (obeying register constraints). That is correct; such reload
4741 registers ARE still valid. */
4742 forget_marked_reloads (&regs_to_forget);
4743 CLEAR_REG_SET (&regs_to_forget);
4744
4745 /* There may have been CLOBBER insns placed after INSN. So scan
4746 between INSN and NEXT and use them to forget old reloads. */
4747 for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4748 if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
4749 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
4750
4751 #ifdef AUTO_INC_DEC
4752 /* Likewise for regs altered by auto-increment in this insn.
4753 REG_INC notes have been changed by reloading:
4754 find_reloads_address_1 records substitutions for them,
4755 which have been performed by subst_reloads above. */
4756 for (i = n_reloads - 1; i >= 0; i--)
4757 {
4758 rtx in_reg = rld[i].in_reg;
4759 if (in_reg)
4760 {
4761 enum rtx_code code = GET_CODE (in_reg);
4762 /* PRE_INC / PRE_DEC will have the reload register ending up
4763 with the same value as the stack slot, but that doesn't
4764 hold true for POST_INC / POST_DEC. Either we have to
4765 convert the memory access to a true POST_INC / POST_DEC,
4766 or we can't use the reload register for inheritance. */
4767 if ((code == POST_INC || code == POST_DEC)
4768 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4769 REGNO (rld[i].reg_rtx))
4770 /* Make sure it is the inc/dec pseudo, and not
4771 some other (e.g. output operand) pseudo. */
4772 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4773 == REGNO (XEXP (in_reg, 0))))
4774
4775 {
4776 rtx reload_reg = rld[i].reg_rtx;
4777 machine_mode mode = GET_MODE (reload_reg);
4778 int n = 0;
4779 rtx_insn *p;
4780
4781 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4782 {
4783 /* We really want to ignore REG_INC notes here, so
4784 use PATTERN (p) as argument to reg_set_p . */
4785 if (reg_set_p (reload_reg, PATTERN (p)))
4786 break;
4787 n = count_occurrences (PATTERN (p), reload_reg, 0);
4788 if (! n)
4789 continue;
4790 if (n == 1)
4791 {
4792 rtx replace_reg
4793 = gen_rtx_fmt_e (code, mode, reload_reg);
4794
4795 validate_replace_rtx_group (reload_reg,
4796 replace_reg, p);
4797 n = verify_changes (0);
4798
4799 /* We must also verify that the constraints
4800 are met after the replacement. Make sure
4801 extract_insn is only called for an insn
4802 where the replacements were found to be
4803 valid so far. */
4804 if (n)
4805 {
4806 extract_insn (p);
4807 n = constrain_operands (1,
4808 get_enabled_alternatives (p));
4809 }
4810
4811 /* If the constraints were not met, then
4812 undo the replacement, else confirm it. */
4813 if (!n)
4814 cancel_changes (0);
4815 else
4816 confirm_change_group ();
4817 }
4818 break;
4819 }
4820 if (n == 1)
4821 {
4822 add_reg_note (p, REG_INC, reload_reg);
4823 /* Mark this as having an output reload so that the
4824 REG_INC processing code below won't invalidate
4825 the reload for inheritance. */
4826 SET_HARD_REG_BIT (reg_is_output_reload,
4827 REGNO (reload_reg));
4828 SET_REGNO_REG_SET (&reg_has_output_reload,
4829 REGNO (XEXP (in_reg, 0)));
4830 }
4831 else
4832 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4833 NULL);
4834 }
4835 else if ((code == PRE_INC || code == PRE_DEC)
4836 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4837 REGNO (rld[i].reg_rtx))
4838 /* Make sure it is the inc/dec pseudo, and not
4839 some other (e.g. output operand) pseudo. */
4840 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4841 == REGNO (XEXP (in_reg, 0))))
4842 {
4843 SET_HARD_REG_BIT (reg_is_output_reload,
4844 REGNO (rld[i].reg_rtx));
4845 SET_REGNO_REG_SET (&reg_has_output_reload,
4846 REGNO (XEXP (in_reg, 0)));
4847 }
4848 else if (code == PRE_INC || code == PRE_DEC
4849 || code == POST_INC || code == POST_DEC)
4850 {
4851 int in_regno = REGNO (XEXP (in_reg, 0));
4852
4853 if (reg_last_reload_reg[in_regno] != NULL_RTX)
4854 {
4855 int in_hard_regno;
4856 bool forget_p = true;
4857
4858 in_hard_regno = REGNO (reg_last_reload_reg[in_regno]);
4859 if (TEST_HARD_REG_BIT (reg_reloaded_valid,
4860 in_hard_regno))
4861 {
4862 for (rtx_insn *x = (old_prev ?
4863 NEXT_INSN (old_prev) : insn);
4864 x != old_next;
4865 x = NEXT_INSN (x))
4866 if (x == reg_reloaded_insn[in_hard_regno])
4867 {
4868 forget_p = false;
4869 break;
4870 }
4871 }
4872 /* If for some reasons, we didn't set up
4873 reg_last_reload_reg in this insn,
4874 invalidate inheritance from previous
4875 insns for the incremented/decremented
4876 register. Such registers will be not in
4877 reg_has_output_reload. Invalidate it
4878 also if the corresponding element in
4879 reg_reloaded_insn is also
4880 invalidated. */
4881 if (forget_p)
4882 forget_old_reloads_1 (XEXP (in_reg, 0),
4883 NULL_RTX, NULL);
4884 }
4885 }
4886 }
4887 }
4888 /* If a pseudo that got a hard register is auto-incremented,
4889 we must purge records of copying it into pseudos without
4890 hard registers. */
4891 for (rtx x = REG_NOTES (insn); x; x = XEXP (x, 1))
4892 if (REG_NOTE_KIND (x) == REG_INC)
4893 {
4894 /* See if this pseudo reg was reloaded in this insn.
4895 If so, its last-reload info is still valid
4896 because it is based on this insn's reload. */
4897 for (i = 0; i < n_reloads; i++)
4898 if (rld[i].out == XEXP (x, 0))
4899 break;
4900
4901 if (i == n_reloads)
4902 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4903 }
4904 #endif
4905 }
4906 /* A reload reg's contents are unknown after a label. */
4907 if (LABEL_P (insn))
4908 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4909
4910 /* Don't assume a reload reg is still good after a call insn
4911 if it is a call-used reg, or if it contains a value that will
4912 be partially clobbered by the call. */
4913 else if (CALL_P (insn))
4914 {
4915 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
4916 AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
4917
4918 /* If this is a call to a setjmp-type function, we must not
4919 reuse any reload reg contents across the call; that will
4920 just be clobbered by other uses of the register in later
4921 code, before the longjmp. */
4922 if (find_reg_note (insn, REG_SETJMP, NULL_RTX))
4923 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4924 }
4925 }
4926
4927 /* Clean up. */
4928 free (reg_last_reload_reg);
4929 CLEAR_REG_SET (&reg_has_output_reload);
4930 }
4931
4932 /* Discard all record of any value reloaded from X,
4933 or reloaded in X from someplace else;
4934 unless X is an output reload reg of the current insn.
4935
4936 X may be a hard reg (the reload reg)
4937 or it may be a pseudo reg that was reloaded from.
4938
4939 When DATA is non-NULL just mark the registers in regset
4940 to be forgotten later. */
4941
4942 static void
4943 forget_old_reloads_1 (rtx x, const_rtx ignored ATTRIBUTE_UNUSED,
4944 void *data)
4945 {
4946 unsigned int regno;
4947 unsigned int nr;
4948 regset regs = (regset) data;
4949
4950 /* note_stores does give us subregs of hard regs,
4951 subreg_regno_offset requires a hard reg. */
4952 while (GET_CODE (x) == SUBREG)
4953 {
4954 /* We ignore the subreg offset when calculating the regno,
4955 because we are using the entire underlying hard register
4956 below. */
4957 x = SUBREG_REG (x);
4958 }
4959
4960 if (!REG_P (x))
4961 return;
4962
4963 regno = REGNO (x);
4964
4965 if (regno >= FIRST_PSEUDO_REGISTER)
4966 nr = 1;
4967 else
4968 {
4969 unsigned int i;
4970
4971 nr = hard_regno_nregs[regno][GET_MODE (x)];
4972 /* Storing into a spilled-reg invalidates its contents.
4973 This can happen if a block-local pseudo is allocated to that reg
4974 and it wasn't spilled because this block's total need is 0.
4975 Then some insn might have an optional reload and use this reg. */
4976 if (!regs)
4977 for (i = 0; i < nr; i++)
4978 /* But don't do this if the reg actually serves as an output
4979 reload reg in the current instruction. */
4980 if (n_reloads == 0
4981 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4982 {
4983 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4984 spill_reg_store[regno + i] = 0;
4985 }
4986 }
4987
4988 if (regs)
4989 while (nr-- > 0)
4990 SET_REGNO_REG_SET (regs, regno + nr);
4991 else
4992 {
4993 /* Since value of X has changed,
4994 forget any value previously copied from it. */
4995
4996 while (nr-- > 0)
4997 /* But don't forget a copy if this is the output reload
4998 that establishes the copy's validity. */
4999 if (n_reloads == 0
5000 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr))
5001 reg_last_reload_reg[regno + nr] = 0;
5002 }
5003 }
5004
5005 /* Forget the reloads marked in regset by previous function. */
5006 static void
5007 forget_marked_reloads (regset regs)
5008 {
5009 unsigned int reg;
5010 reg_set_iterator rsi;
5011 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)
5012 {
5013 if (reg < FIRST_PSEUDO_REGISTER
5014 /* But don't do this if the reg actually serves as an output
5015 reload reg in the current instruction. */
5016 && (n_reloads == 0
5017 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg)))
5018 {
5019 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg);
5020 spill_reg_store[reg] = 0;
5021 }
5022 if (n_reloads == 0
5023 || !REGNO_REG_SET_P (&reg_has_output_reload, reg))
5024 reg_last_reload_reg[reg] = 0;
5025 }
5026 }
5027 \f
5028 /* The following HARD_REG_SETs indicate when each hard register is
5029 used for a reload of various parts of the current insn. */
5030
5031 /* If reg is unavailable for all reloads. */
5032 static HARD_REG_SET reload_reg_unavailable;
5033 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
5034 static HARD_REG_SET reload_reg_used;
5035 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
5036 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5037 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
5038 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5039 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
5040 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5041 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
5042 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5043 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
5044 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5045 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
5046 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5047 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
5048 static HARD_REG_SET reload_reg_used_in_op_addr;
5049 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
5050 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
5051 /* If reg is in use for a RELOAD_FOR_INSN reload. */
5052 static HARD_REG_SET reload_reg_used_in_insn;
5053 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
5054 static HARD_REG_SET reload_reg_used_in_other_addr;
5055
5056 /* If reg is in use as a reload reg for any sort of reload. */
5057 static HARD_REG_SET reload_reg_used_at_all;
5058
5059 /* If reg is use as an inherited reload. We just mark the first register
5060 in the group. */
5061 static HARD_REG_SET reload_reg_used_for_inherit;
5062
5063 /* Records which hard regs are used in any way, either as explicit use or
5064 by being allocated to a pseudo during any point of the current insn. */
5065 static HARD_REG_SET reg_used_in_insn;
5066
5067 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
5068 TYPE. MODE is used to indicate how many consecutive regs are
5069 actually used. */
5070
5071 static void
5072 mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type,
5073 machine_mode mode)
5074 {
5075 switch (type)
5076 {
5077 case RELOAD_OTHER:
5078 add_to_hard_reg_set (&reload_reg_used, mode, regno);
5079 break;
5080
5081 case RELOAD_FOR_INPUT_ADDRESS:
5082 add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno);
5083 break;
5084
5085 case RELOAD_FOR_INPADDR_ADDRESS:
5086 add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno);
5087 break;
5088
5089 case RELOAD_FOR_OUTPUT_ADDRESS:
5090 add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno);
5091 break;
5092
5093 case RELOAD_FOR_OUTADDR_ADDRESS:
5094 add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno);
5095 break;
5096
5097 case RELOAD_FOR_OPERAND_ADDRESS:
5098 add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno);
5099 break;
5100
5101 case RELOAD_FOR_OPADDR_ADDR:
5102 add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno);
5103 break;
5104
5105 case RELOAD_FOR_OTHER_ADDRESS:
5106 add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno);
5107 break;
5108
5109 case RELOAD_FOR_INPUT:
5110 add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno);
5111 break;
5112
5113 case RELOAD_FOR_OUTPUT:
5114 add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno);
5115 break;
5116
5117 case RELOAD_FOR_INSN:
5118 add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno);
5119 break;
5120 }
5121
5122 add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno);
5123 }
5124
5125 /* Similarly, but show REGNO is no longer in use for a reload. */
5126
5127 static void
5128 clear_reload_reg_in_use (unsigned int regno, int opnum,
5129 enum reload_type type, machine_mode mode)
5130 {
5131 unsigned int nregs = hard_regno_nregs[regno][mode];
5132 unsigned int start_regno, end_regno, r;
5133 int i;
5134 /* A complication is that for some reload types, inheritance might
5135 allow multiple reloads of the same types to share a reload register.
5136 We set check_opnum if we have to check only reloads with the same
5137 operand number, and check_any if we have to check all reloads. */
5138 int check_opnum = 0;
5139 int check_any = 0;
5140 HARD_REG_SET *used_in_set;
5141
5142 switch (type)
5143 {
5144 case RELOAD_OTHER:
5145 used_in_set = &reload_reg_used;
5146 break;
5147
5148 case RELOAD_FOR_INPUT_ADDRESS:
5149 used_in_set = &reload_reg_used_in_input_addr[opnum];
5150 break;
5151
5152 case RELOAD_FOR_INPADDR_ADDRESS:
5153 check_opnum = 1;
5154 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
5155 break;
5156
5157 case RELOAD_FOR_OUTPUT_ADDRESS:
5158 used_in_set = &reload_reg_used_in_output_addr[opnum];
5159 break;
5160
5161 case RELOAD_FOR_OUTADDR_ADDRESS:
5162 check_opnum = 1;
5163 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
5164 break;
5165
5166 case RELOAD_FOR_OPERAND_ADDRESS:
5167 used_in_set = &reload_reg_used_in_op_addr;
5168 break;
5169
5170 case RELOAD_FOR_OPADDR_ADDR:
5171 check_any = 1;
5172 used_in_set = &reload_reg_used_in_op_addr_reload;
5173 break;
5174
5175 case RELOAD_FOR_OTHER_ADDRESS:
5176 used_in_set = &reload_reg_used_in_other_addr;
5177 check_any = 1;
5178 break;
5179
5180 case RELOAD_FOR_INPUT:
5181 used_in_set = &reload_reg_used_in_input[opnum];
5182 break;
5183
5184 case RELOAD_FOR_OUTPUT:
5185 used_in_set = &reload_reg_used_in_output[opnum];
5186 break;
5187
5188 case RELOAD_FOR_INSN:
5189 used_in_set = &reload_reg_used_in_insn;
5190 break;
5191 default:
5192 gcc_unreachable ();
5193 }
5194 /* We resolve conflicts with remaining reloads of the same type by
5195 excluding the intervals of reload registers by them from the
5196 interval of freed reload registers. Since we only keep track of
5197 one set of interval bounds, we might have to exclude somewhat
5198 more than what would be necessary if we used a HARD_REG_SET here.
5199 But this should only happen very infrequently, so there should
5200 be no reason to worry about it. */
5201
5202 start_regno = regno;
5203 end_regno = regno + nregs;
5204 if (check_opnum || check_any)
5205 {
5206 for (i = n_reloads - 1; i >= 0; i--)
5207 {
5208 if (rld[i].when_needed == type
5209 && (check_any || rld[i].opnum == opnum)
5210 && rld[i].reg_rtx)
5211 {
5212 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
5213 unsigned int conflict_end
5214 = end_hard_regno (rld[i].mode, conflict_start);
5215
5216 /* If there is an overlap with the first to-be-freed register,
5217 adjust the interval start. */
5218 if (conflict_start <= start_regno && conflict_end > start_regno)
5219 start_regno = conflict_end;
5220 /* Otherwise, if there is a conflict with one of the other
5221 to-be-freed registers, adjust the interval end. */
5222 if (conflict_start > start_regno && conflict_start < end_regno)
5223 end_regno = conflict_start;
5224 }
5225 }
5226 }
5227
5228 for (r = start_regno; r < end_regno; r++)
5229 CLEAR_HARD_REG_BIT (*used_in_set, r);
5230 }
5231
5232 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
5233 specified by OPNUM and TYPE. */
5234
5235 static int
5236 reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type)
5237 {
5238 int i;
5239
5240 /* In use for a RELOAD_OTHER means it's not available for anything. */
5241 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
5242 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5243 return 0;
5244
5245 switch (type)
5246 {
5247 case RELOAD_OTHER:
5248 /* In use for anything means we can't use it for RELOAD_OTHER. */
5249 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
5250 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5251 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5252 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5253 return 0;
5254
5255 for (i = 0; i < reload_n_operands; i++)
5256 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5257 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5258 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5259 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5260 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5261 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5262 return 0;
5263
5264 return 1;
5265
5266 case RELOAD_FOR_INPUT:
5267 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5268 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
5269 return 0;
5270
5271 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5272 return 0;
5273
5274 /* If it is used for some other input, can't use it. */
5275 for (i = 0; i < reload_n_operands; i++)
5276 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5277 return 0;
5278
5279 /* If it is used in a later operand's address, can't use it. */
5280 for (i = opnum + 1; i < reload_n_operands; i++)
5281 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5282 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5283 return 0;
5284
5285 return 1;
5286
5287 case RELOAD_FOR_INPUT_ADDRESS:
5288 /* Can't use a register if it is used for an input address for this
5289 operand or used as an input in an earlier one. */
5290 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
5291 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5292 return 0;
5293
5294 for (i = 0; i < opnum; i++)
5295 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5296 return 0;
5297
5298 return 1;
5299
5300 case RELOAD_FOR_INPADDR_ADDRESS:
5301 /* Can't use a register if it is used for an input address
5302 for this operand or used as an input in an earlier
5303 one. */
5304 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
5305 return 0;
5306
5307 for (i = 0; i < opnum; i++)
5308 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5309 return 0;
5310
5311 return 1;
5312
5313 case RELOAD_FOR_OUTPUT_ADDRESS:
5314 /* Can't use a register if it is used for an output address for this
5315 operand or used as an output in this or a later operand. Note
5316 that multiple output operands are emitted in reverse order, so
5317 the conflicting ones are those with lower indices. */
5318 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
5319 return 0;
5320
5321 for (i = 0; i <= opnum; i++)
5322 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5323 return 0;
5324
5325 return 1;
5326
5327 case RELOAD_FOR_OUTADDR_ADDRESS:
5328 /* Can't use a register if it is used for an output address
5329 for this operand or used as an output in this or a
5330 later operand. Note that multiple output operands are
5331 emitted in reverse order, so the conflicting ones are
5332 those with lower indices. */
5333 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5334 return 0;
5335
5336 for (i = 0; i <= opnum; i++)
5337 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5338 return 0;
5339
5340 return 1;
5341
5342 case RELOAD_FOR_OPERAND_ADDRESS:
5343 for (i = 0; i < reload_n_operands; i++)
5344 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5345 return 0;
5346
5347 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5348 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5349
5350 case RELOAD_FOR_OPADDR_ADDR:
5351 for (i = 0; i < reload_n_operands; i++)
5352 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5353 return 0;
5354
5355 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
5356
5357 case RELOAD_FOR_OUTPUT:
5358 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
5359 outputs, or an operand address for this or an earlier output.
5360 Note that multiple output operands are emitted in reverse order,
5361 so the conflicting ones are those with higher indices. */
5362 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
5363 return 0;
5364
5365 for (i = 0; i < reload_n_operands; i++)
5366 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5367 return 0;
5368
5369 for (i = opnum; i < reload_n_operands; i++)
5370 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5371 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5372 return 0;
5373
5374 return 1;
5375
5376 case RELOAD_FOR_INSN:
5377 for (i = 0; i < reload_n_operands; i++)
5378 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
5379 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5380 return 0;
5381
5382 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5383 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
5384
5385 case RELOAD_FOR_OTHER_ADDRESS:
5386 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
5387
5388 default:
5389 gcc_unreachable ();
5390 }
5391 }
5392
5393 /* Return 1 if the value in reload reg REGNO, as used by the reload with
5394 the number RELOADNUM, is still available in REGNO at the end of the insn.
5395
5396 We can assume that the reload reg was already tested for availability
5397 at the time it is needed, and we should not check this again,
5398 in case the reg has already been marked in use. */
5399
5400 static int
5401 reload_reg_reaches_end_p (unsigned int regno, int reloadnum)
5402 {
5403 int opnum = rld[reloadnum].opnum;
5404 enum reload_type type = rld[reloadnum].when_needed;
5405 int i;
5406
5407 /* See if there is a reload with the same type for this operand, using
5408 the same register. This case is not handled by the code below. */
5409 for (i = reloadnum + 1; i < n_reloads; i++)
5410 {
5411 rtx reg;
5412 int nregs;
5413
5414 if (rld[i].opnum != opnum || rld[i].when_needed != type)
5415 continue;
5416 reg = rld[i].reg_rtx;
5417 if (reg == NULL_RTX)
5418 continue;
5419 nregs = hard_regno_nregs[REGNO (reg)][GET_MODE (reg)];
5420 if (regno >= REGNO (reg) && regno < REGNO (reg) + nregs)
5421 return 0;
5422 }
5423
5424 switch (type)
5425 {
5426 case RELOAD_OTHER:
5427 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
5428 its value must reach the end. */
5429 return 1;
5430
5431 /* If this use is for part of the insn,
5432 its value reaches if no subsequent part uses the same register.
5433 Just like the above function, don't try to do this with lots
5434 of fallthroughs. */
5435
5436 case RELOAD_FOR_OTHER_ADDRESS:
5437 /* Here we check for everything else, since these don't conflict
5438 with anything else and everything comes later. */
5439
5440 for (i = 0; i < reload_n_operands; i++)
5441 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5442 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5443 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5444 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5445 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5446 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5447 return 0;
5448
5449 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5450 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
5451 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5452 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5453
5454 case RELOAD_FOR_INPUT_ADDRESS:
5455 case RELOAD_FOR_INPADDR_ADDRESS:
5456 /* Similar, except that we check only for this and subsequent inputs
5457 and the address of only subsequent inputs and we do not need
5458 to check for RELOAD_OTHER objects since they are known not to
5459 conflict. */
5460
5461 for (i = opnum; i < reload_n_operands; i++)
5462 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5463 return 0;
5464
5465 /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS
5466 could be killed if the register is also used by reload with type
5467 RELOAD_FOR_INPUT_ADDRESS, so check it. */
5468 if (type == RELOAD_FOR_INPADDR_ADDRESS
5469 && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
5470 return 0;
5471
5472 for (i = opnum + 1; i < reload_n_operands; i++)
5473 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5474 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5475 return 0;
5476
5477 for (i = 0; i < reload_n_operands; i++)
5478 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5479 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5480 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5481 return 0;
5482
5483 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5484 return 0;
5485
5486 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5487 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5488 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5489
5490 case RELOAD_FOR_INPUT:
5491 /* Similar to input address, except we start at the next operand for
5492 both input and input address and we do not check for
5493 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5494 would conflict. */
5495
5496 for (i = opnum + 1; i < reload_n_operands; i++)
5497 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5498 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5499 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5500 return 0;
5501
5502 /* ... fall through ... */
5503
5504 case RELOAD_FOR_OPERAND_ADDRESS:
5505 /* Check outputs and their addresses. */
5506
5507 for (i = 0; i < reload_n_operands; i++)
5508 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5509 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5510 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5511 return 0;
5512
5513 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
5514
5515 case RELOAD_FOR_OPADDR_ADDR:
5516 for (i = 0; i < reload_n_operands; i++)
5517 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5518 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5519 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5520 return 0;
5521
5522 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5523 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5524 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
5525
5526 case RELOAD_FOR_INSN:
5527 /* These conflict with other outputs with RELOAD_OTHER. So
5528 we need only check for output addresses. */
5529
5530 opnum = reload_n_operands;
5531
5532 /* ... fall through ... */
5533
5534 case RELOAD_FOR_OUTPUT:
5535 case RELOAD_FOR_OUTPUT_ADDRESS:
5536 case RELOAD_FOR_OUTADDR_ADDRESS:
5537 /* We already know these can't conflict with a later output. So the
5538 only thing to check are later output addresses.
5539 Note that multiple output operands are emitted in reverse order,
5540 so the conflicting ones are those with lower indices. */
5541 for (i = 0; i < opnum; i++)
5542 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5543 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5544 return 0;
5545
5546 /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS
5547 could be killed if the register is also used by reload with type
5548 RELOAD_FOR_OUTPUT_ADDRESS, so check it. */
5549 if (type == RELOAD_FOR_OUTADDR_ADDRESS
5550 && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
5551 return 0;
5552
5553 return 1;
5554
5555 default:
5556 gcc_unreachable ();
5557 }
5558 }
5559
5560 /* Like reload_reg_reaches_end_p, but check that the condition holds for
5561 every register in REG. */
5562
5563 static bool
5564 reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum)
5565 {
5566 unsigned int i;
5567
5568 for (i = REGNO (reg); i < END_REGNO (reg); i++)
5569 if (!reload_reg_reaches_end_p (i, reloadnum))
5570 return false;
5571 return true;
5572 }
5573 \f
5574
5575 /* Returns whether R1 and R2 are uniquely chained: the value of one
5576 is used by the other, and that value is not used by any other
5577 reload for this insn. This is used to partially undo the decision
5578 made in find_reloads when in the case of multiple
5579 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all
5580 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS
5581 reloads. This code tries to avoid the conflict created by that
5582 change. It might be cleaner to explicitly keep track of which
5583 RELOAD_FOR_OPADDR_ADDR reload is associated with which
5584 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect
5585 this after the fact. */
5586 static bool
5587 reloads_unique_chain_p (int r1, int r2)
5588 {
5589 int i;
5590
5591 /* We only check input reloads. */
5592 if (! rld[r1].in || ! rld[r2].in)
5593 return false;
5594
5595 /* Avoid anything with output reloads. */
5596 if (rld[r1].out || rld[r2].out)
5597 return false;
5598
5599 /* "chained" means one reload is a component of the other reload,
5600 not the same as the other reload. */
5601 if (rld[r1].opnum != rld[r2].opnum
5602 || rtx_equal_p (rld[r1].in, rld[r2].in)
5603 || rld[r1].optional || rld[r2].optional
5604 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in)
5605 || reg_mentioned_p (rld[r2].in, rld[r1].in)))
5606 return false;
5607
5608 /* The following loop assumes that r1 is the reload that feeds r2. */
5609 if (r1 > r2)
5610 {
5611 int tmp = r2;
5612 r2 = r1;
5613 r1 = tmp;
5614 }
5615
5616 for (i = 0; i < n_reloads; i ++)
5617 /* Look for input reloads that aren't our two */
5618 if (i != r1 && i != r2 && rld[i].in)
5619 {
5620 /* If our reload is mentioned at all, it isn't a simple chain. */
5621 if (reg_mentioned_p (rld[r1].in, rld[i].in))
5622 return false;
5623 }
5624 return true;
5625 }
5626
5627 /* The recursive function change all occurrences of WHAT in *WHERE
5628 to REPL. */
5629 static void
5630 substitute (rtx *where, const_rtx what, rtx repl)
5631 {
5632 const char *fmt;
5633 int i;
5634 enum rtx_code code;
5635
5636 if (*where == 0)
5637 return;
5638
5639 if (*where == what || rtx_equal_p (*where, what))
5640 {
5641 /* Record the location of the changed rtx. */
5642 substitute_stack.safe_push (where);
5643 *where = repl;
5644 return;
5645 }
5646
5647 code = GET_CODE (*where);
5648 fmt = GET_RTX_FORMAT (code);
5649 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5650 {
5651 if (fmt[i] == 'E')
5652 {
5653 int j;
5654
5655 for (j = XVECLEN (*where, i) - 1; j >= 0; j--)
5656 substitute (&XVECEXP (*where, i, j), what, repl);
5657 }
5658 else if (fmt[i] == 'e')
5659 substitute (&XEXP (*where, i), what, repl);
5660 }
5661 }
5662
5663 /* The function returns TRUE if chain of reload R1 and R2 (in any
5664 order) can be evaluated without usage of intermediate register for
5665 the reload containing another reload. It is important to see
5666 gen_reload to understand what the function is trying to do. As an
5667 example, let us have reload chain
5668
5669 r2: const
5670 r1: <something> + const
5671
5672 and reload R2 got reload reg HR. The function returns true if
5673 there is a correct insn HR = HR + <something>. Otherwise,
5674 gen_reload will use intermediate register (and this is the reload
5675 reg for R1) to reload <something>.
5676
5677 We need this function to find a conflict for chain reloads. In our
5678 example, if HR = HR + <something> is incorrect insn, then we cannot
5679 use HR as a reload register for R2. If we do use it then we get a
5680 wrong code:
5681
5682 HR = const
5683 HR = <something>
5684 HR = HR + HR
5685
5686 */
5687 static bool
5688 gen_reload_chain_without_interm_reg_p (int r1, int r2)
5689 {
5690 /* Assume other cases in gen_reload are not possible for
5691 chain reloads or do need an intermediate hard registers. */
5692 bool result = true;
5693 int regno, n, code;
5694 rtx out, in;
5695 rtx_insn *insn;
5696 rtx_insn *last = get_last_insn ();
5697
5698 /* Make r2 a component of r1. */
5699 if (reg_mentioned_p (rld[r1].in, rld[r2].in))
5700 {
5701 n = r1;
5702 r1 = r2;
5703 r2 = n;
5704 }
5705 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in));
5706 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno;
5707 gcc_assert (regno >= 0);
5708 out = gen_rtx_REG (rld[r1].mode, regno);
5709 in = rld[r1].in;
5710 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno));
5711
5712 /* If IN is a paradoxical SUBREG, remove it and try to put the
5713 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
5714 strip_paradoxical_subreg (&in, &out);
5715
5716 if (GET_CODE (in) == PLUS
5717 && (REG_P (XEXP (in, 0))
5718 || GET_CODE (XEXP (in, 0)) == SUBREG
5719 || MEM_P (XEXP (in, 0)))
5720 && (REG_P (XEXP (in, 1))
5721 || GET_CODE (XEXP (in, 1)) == SUBREG
5722 || CONSTANT_P (XEXP (in, 1))
5723 || MEM_P (XEXP (in, 1))))
5724 {
5725 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
5726 code = recog_memoized (insn);
5727 result = false;
5728
5729 if (code >= 0)
5730 {
5731 extract_insn (insn);
5732 /* We want constrain operands to treat this insn strictly in
5733 its validity determination, i.e., the way it would after
5734 reload has completed. */
5735 result = constrain_operands (1, get_enabled_alternatives (insn));
5736 }
5737
5738 delete_insns_since (last);
5739 }
5740
5741 /* Restore the original value at each changed address within R1. */
5742 while (!substitute_stack.is_empty ())
5743 {
5744 rtx *where = substitute_stack.pop ();
5745 *where = rld[r2].in;
5746 }
5747
5748 return result;
5749 }
5750
5751 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5752 Return 0 otherwise.
5753
5754 This function uses the same algorithm as reload_reg_free_p above. */
5755
5756 static int
5757 reloads_conflict (int r1, int r2)
5758 {
5759 enum reload_type r1_type = rld[r1].when_needed;
5760 enum reload_type r2_type = rld[r2].when_needed;
5761 int r1_opnum = rld[r1].opnum;
5762 int r2_opnum = rld[r2].opnum;
5763
5764 /* RELOAD_OTHER conflicts with everything. */
5765 if (r2_type == RELOAD_OTHER)
5766 return 1;
5767
5768 /* Otherwise, check conflicts differently for each type. */
5769
5770 switch (r1_type)
5771 {
5772 case RELOAD_FOR_INPUT:
5773 return (r2_type == RELOAD_FOR_INSN
5774 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5775 || r2_type == RELOAD_FOR_OPADDR_ADDR
5776 || r2_type == RELOAD_FOR_INPUT
5777 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5778 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5779 && r2_opnum > r1_opnum));
5780
5781 case RELOAD_FOR_INPUT_ADDRESS:
5782 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5783 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5784
5785 case RELOAD_FOR_INPADDR_ADDRESS:
5786 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5787 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5788
5789 case RELOAD_FOR_OUTPUT_ADDRESS:
5790 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5791 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5792
5793 case RELOAD_FOR_OUTADDR_ADDRESS:
5794 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5795 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum <= r1_opnum));
5796
5797 case RELOAD_FOR_OPERAND_ADDRESS:
5798 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5799 || (r2_type == RELOAD_FOR_OPERAND_ADDRESS
5800 && (!reloads_unique_chain_p (r1, r2)
5801 || !gen_reload_chain_without_interm_reg_p (r1, r2))));
5802
5803 case RELOAD_FOR_OPADDR_ADDR:
5804 return (r2_type == RELOAD_FOR_INPUT
5805 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5806
5807 case RELOAD_FOR_OUTPUT:
5808 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5809 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5810 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5811 && r2_opnum >= r1_opnum));
5812
5813 case RELOAD_FOR_INSN:
5814 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5815 || r2_type == RELOAD_FOR_INSN
5816 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5817
5818 case RELOAD_FOR_OTHER_ADDRESS:
5819 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5820
5821 case RELOAD_OTHER:
5822 return 1;
5823
5824 default:
5825 gcc_unreachable ();
5826 }
5827 }
5828 \f
5829 /* Indexed by reload number, 1 if incoming value
5830 inherited from previous insns. */
5831 static char reload_inherited[MAX_RELOADS];
5832
5833 /* For an inherited reload, this is the insn the reload was inherited from,
5834 if we know it. Otherwise, this is 0. */
5835 static rtx_insn *reload_inheritance_insn[MAX_RELOADS];
5836
5837 /* If nonzero, this is a place to get the value of the reload,
5838 rather than using reload_in. */
5839 static rtx reload_override_in[MAX_RELOADS];
5840
5841 /* For each reload, the hard register number of the register used,
5842 or -1 if we did not need a register for this reload. */
5843 static int reload_spill_index[MAX_RELOADS];
5844
5845 /* Index X is the value of rld[X].reg_rtx, adjusted for the input mode. */
5846 static rtx reload_reg_rtx_for_input[MAX_RELOADS];
5847
5848 /* Index X is the value of rld[X].reg_rtx, adjusted for the output mode. */
5849 static rtx reload_reg_rtx_for_output[MAX_RELOADS];
5850
5851 /* Subroutine of free_for_value_p, used to check a single register.
5852 START_REGNO is the starting regno of the full reload register
5853 (possibly comprising multiple hard registers) that we are considering. */
5854
5855 static int
5856 reload_reg_free_for_value_p (int start_regno, int regno, int opnum,
5857 enum reload_type type, rtx value, rtx out,
5858 int reloadnum, int ignore_address_reloads)
5859 {
5860 int time1;
5861 /* Set if we see an input reload that must not share its reload register
5862 with any new earlyclobber, but might otherwise share the reload
5863 register with an output or input-output reload. */
5864 int check_earlyclobber = 0;
5865 int i;
5866 int copy = 0;
5867
5868 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
5869 return 0;
5870
5871 if (out == const0_rtx)
5872 {
5873 copy = 1;
5874 out = NULL_RTX;
5875 }
5876
5877 /* We use some pseudo 'time' value to check if the lifetimes of the
5878 new register use would overlap with the one of a previous reload
5879 that is not read-only or uses a different value.
5880 The 'time' used doesn't have to be linear in any shape or form, just
5881 monotonic.
5882 Some reload types use different 'buckets' for each operand.
5883 So there are MAX_RECOG_OPERANDS different time values for each
5884 such reload type.
5885 We compute TIME1 as the time when the register for the prospective
5886 new reload ceases to be live, and TIME2 for each existing
5887 reload as the time when that the reload register of that reload
5888 becomes live.
5889 Where there is little to be gained by exact lifetime calculations,
5890 we just make conservative assumptions, i.e. a longer lifetime;
5891 this is done in the 'default:' cases. */
5892 switch (type)
5893 {
5894 case RELOAD_FOR_OTHER_ADDRESS:
5895 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
5896 time1 = copy ? 0 : 1;
5897 break;
5898 case RELOAD_OTHER:
5899 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5900 break;
5901 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5902 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5903 respectively, to the time values for these, we get distinct time
5904 values. To get distinct time values for each operand, we have to
5905 multiply opnum by at least three. We round that up to four because
5906 multiply by four is often cheaper. */
5907 case RELOAD_FOR_INPADDR_ADDRESS:
5908 time1 = opnum * 4 + 2;
5909 break;
5910 case RELOAD_FOR_INPUT_ADDRESS:
5911 time1 = opnum * 4 + 3;
5912 break;
5913 case RELOAD_FOR_INPUT:
5914 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5915 executes (inclusive). */
5916 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
5917 break;
5918 case RELOAD_FOR_OPADDR_ADDR:
5919 /* opnum * 4 + 4
5920 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5921 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5922 break;
5923 case RELOAD_FOR_OPERAND_ADDRESS:
5924 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5925 is executed. */
5926 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5927 break;
5928 case RELOAD_FOR_OUTADDR_ADDRESS:
5929 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
5930 break;
5931 case RELOAD_FOR_OUTPUT_ADDRESS:
5932 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
5933 break;
5934 default:
5935 time1 = MAX_RECOG_OPERANDS * 5 + 5;
5936 }
5937
5938 for (i = 0; i < n_reloads; i++)
5939 {
5940 rtx reg = rld[i].reg_rtx;
5941 if (reg && REG_P (reg)
5942 && ((unsigned) regno - true_regnum (reg)
5943 <= hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] - (unsigned) 1)
5944 && i != reloadnum)
5945 {
5946 rtx other_input = rld[i].in;
5947
5948 /* If the other reload loads the same input value, that
5949 will not cause a conflict only if it's loading it into
5950 the same register. */
5951 if (true_regnum (reg) != start_regno)
5952 other_input = NULL_RTX;
5953 if (! other_input || ! rtx_equal_p (other_input, value)
5954 || rld[i].out || out)
5955 {
5956 int time2;
5957 switch (rld[i].when_needed)
5958 {
5959 case RELOAD_FOR_OTHER_ADDRESS:
5960 time2 = 0;
5961 break;
5962 case RELOAD_FOR_INPADDR_ADDRESS:
5963 /* find_reloads makes sure that a
5964 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5965 by at most one - the first -
5966 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5967 address reload is inherited, the address address reload
5968 goes away, so we can ignore this conflict. */
5969 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5970 && ignore_address_reloads
5971 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5972 Then the address address is still needed to store
5973 back the new address. */
5974 && ! rld[reloadnum].out)
5975 continue;
5976 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5977 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5978 reloads go away. */
5979 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5980 && ignore_address_reloads
5981 /* Unless we are reloading an auto_inc expression. */
5982 && ! rld[reloadnum].out)
5983 continue;
5984 time2 = rld[i].opnum * 4 + 2;
5985 break;
5986 case RELOAD_FOR_INPUT_ADDRESS:
5987 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
5988 && ignore_address_reloads
5989 && ! rld[reloadnum].out)
5990 continue;
5991 time2 = rld[i].opnum * 4 + 3;
5992 break;
5993 case RELOAD_FOR_INPUT:
5994 time2 = rld[i].opnum * 4 + 4;
5995 check_earlyclobber = 1;
5996 break;
5997 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5998 == MAX_RECOG_OPERAND * 4 */
5999 case RELOAD_FOR_OPADDR_ADDR:
6000 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
6001 && ignore_address_reloads
6002 && ! rld[reloadnum].out)
6003 continue;
6004 time2 = MAX_RECOG_OPERANDS * 4 + 1;
6005 break;
6006 case RELOAD_FOR_OPERAND_ADDRESS:
6007 time2 = MAX_RECOG_OPERANDS * 4 + 2;
6008 check_earlyclobber = 1;
6009 break;
6010 case RELOAD_FOR_INSN:
6011 time2 = MAX_RECOG_OPERANDS * 4 + 3;
6012 break;
6013 case RELOAD_FOR_OUTPUT:
6014 /* All RELOAD_FOR_OUTPUT reloads become live just after the
6015 instruction is executed. */
6016 time2 = MAX_RECOG_OPERANDS * 4 + 4;
6017 break;
6018 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
6019 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
6020 value. */
6021 case RELOAD_FOR_OUTADDR_ADDRESS:
6022 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
6023 && ignore_address_reloads
6024 && ! rld[reloadnum].out)
6025 continue;
6026 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
6027 break;
6028 case RELOAD_FOR_OUTPUT_ADDRESS:
6029 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
6030 break;
6031 case RELOAD_OTHER:
6032 /* If there is no conflict in the input part, handle this
6033 like an output reload. */
6034 if (! rld[i].in || rtx_equal_p (other_input, value))
6035 {
6036 time2 = MAX_RECOG_OPERANDS * 4 + 4;
6037 /* Earlyclobbered outputs must conflict with inputs. */
6038 if (earlyclobber_operand_p (rld[i].out))
6039 time2 = MAX_RECOG_OPERANDS * 4 + 3;
6040
6041 break;
6042 }
6043 time2 = 1;
6044 /* RELOAD_OTHER might be live beyond instruction execution,
6045 but this is not obvious when we set time2 = 1. So check
6046 here if there might be a problem with the new reload
6047 clobbering the register used by the RELOAD_OTHER. */
6048 if (out)
6049 return 0;
6050 break;
6051 default:
6052 return 0;
6053 }
6054 if ((time1 >= time2
6055 && (! rld[i].in || rld[i].out
6056 || ! rtx_equal_p (other_input, value)))
6057 || (out && rld[reloadnum].out_reg
6058 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
6059 return 0;
6060 }
6061 }
6062 }
6063
6064 /* Earlyclobbered outputs must conflict with inputs. */
6065 if (check_earlyclobber && out && earlyclobber_operand_p (out))
6066 return 0;
6067
6068 return 1;
6069 }
6070
6071 /* Return 1 if the value in reload reg REGNO, as used by a reload
6072 needed for the part of the insn specified by OPNUM and TYPE,
6073 may be used to load VALUE into it.
6074
6075 MODE is the mode in which the register is used, this is needed to
6076 determine how many hard regs to test.
6077
6078 Other read-only reloads with the same value do not conflict
6079 unless OUT is nonzero and these other reloads have to live while
6080 output reloads live.
6081 If OUT is CONST0_RTX, this is a special case: it means that the
6082 test should not be for using register REGNO as reload register, but
6083 for copying from register REGNO into the reload register.
6084
6085 RELOADNUM is the number of the reload we want to load this value for;
6086 a reload does not conflict with itself.
6087
6088 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
6089 reloads that load an address for the very reload we are considering.
6090
6091 The caller has to make sure that there is no conflict with the return
6092 register. */
6093
6094 static int
6095 free_for_value_p (int regno, machine_mode mode, int opnum,
6096 enum reload_type type, rtx value, rtx out, int reloadnum,
6097 int ignore_address_reloads)
6098 {
6099 int nregs = hard_regno_nregs[regno][mode];
6100 while (nregs-- > 0)
6101 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
6102 value, out, reloadnum,
6103 ignore_address_reloads))
6104 return 0;
6105 return 1;
6106 }
6107
6108 /* Return nonzero if the rtx X is invariant over the current function. */
6109 /* ??? Actually, the places where we use this expect exactly what is
6110 tested here, and not everything that is function invariant. In
6111 particular, the frame pointer and arg pointer are special cased;
6112 pic_offset_table_rtx is not, and we must not spill these things to
6113 memory. */
6114
6115 int
6116 function_invariant_p (const_rtx x)
6117 {
6118 if (CONSTANT_P (x))
6119 return 1;
6120 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
6121 return 1;
6122 if (GET_CODE (x) == PLUS
6123 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
6124 && GET_CODE (XEXP (x, 1)) == CONST_INT)
6125 return 1;
6126 return 0;
6127 }
6128
6129 /* Determine whether the reload reg X overlaps any rtx'es used for
6130 overriding inheritance. Return nonzero if so. */
6131
6132 static int
6133 conflicts_with_override (rtx x)
6134 {
6135 int i;
6136 for (i = 0; i < n_reloads; i++)
6137 if (reload_override_in[i]
6138 && reg_overlap_mentioned_p (x, reload_override_in[i]))
6139 return 1;
6140 return 0;
6141 }
6142 \f
6143 /* Give an error message saying we failed to find a reload for INSN,
6144 and clear out reload R. */
6145 static void
6146 failed_reload (rtx_insn *insn, int r)
6147 {
6148 if (asm_noperands (PATTERN (insn)) < 0)
6149 /* It's the compiler's fault. */
6150 fatal_insn ("could not find a spill register", insn);
6151
6152 /* It's the user's fault; the operand's mode and constraint
6153 don't match. Disable this reload so we don't crash in final. */
6154 error_for_asm (insn,
6155 "%<asm%> operand constraint incompatible with operand size");
6156 rld[r].in = 0;
6157 rld[r].out = 0;
6158 rld[r].reg_rtx = 0;
6159 rld[r].optional = 1;
6160 rld[r].secondary_p = 1;
6161 }
6162
6163 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
6164 for reload R. If it's valid, get an rtx for it. Return nonzero if
6165 successful. */
6166 static int
6167 set_reload_reg (int i, int r)
6168 {
6169 /* regno is 'set but not used' if HARD_REGNO_MODE_OK doesn't use its first
6170 parameter. */
6171 int regno ATTRIBUTE_UNUSED;
6172 rtx reg = spill_reg_rtx[i];
6173
6174 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
6175 spill_reg_rtx[i] = reg
6176 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
6177
6178 regno = true_regnum (reg);
6179
6180 /* Detect when the reload reg can't hold the reload mode.
6181 This used to be one `if', but Sequent compiler can't handle that. */
6182 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
6183 {
6184 machine_mode test_mode = VOIDmode;
6185 if (rld[r].in)
6186 test_mode = GET_MODE (rld[r].in);
6187 /* If rld[r].in has VOIDmode, it means we will load it
6188 in whatever mode the reload reg has: to wit, rld[r].mode.
6189 We have already tested that for validity. */
6190 /* Aside from that, we need to test that the expressions
6191 to reload from or into have modes which are valid for this
6192 reload register. Otherwise the reload insns would be invalid. */
6193 if (! (rld[r].in != 0 && test_mode != VOIDmode
6194 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
6195 if (! (rld[r].out != 0
6196 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
6197 {
6198 /* The reg is OK. */
6199 last_spill_reg = i;
6200
6201 /* Mark as in use for this insn the reload regs we use
6202 for this. */
6203 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
6204 rld[r].when_needed, rld[r].mode);
6205
6206 rld[r].reg_rtx = reg;
6207 reload_spill_index[r] = spill_regs[i];
6208 return 1;
6209 }
6210 }
6211 return 0;
6212 }
6213
6214 /* Find a spill register to use as a reload register for reload R.
6215 LAST_RELOAD is nonzero if this is the last reload for the insn being
6216 processed.
6217
6218 Set rld[R].reg_rtx to the register allocated.
6219
6220 We return 1 if successful, or 0 if we couldn't find a spill reg and
6221 we didn't change anything. */
6222
6223 static int
6224 allocate_reload_reg (struct insn_chain *chain ATTRIBUTE_UNUSED, int r,
6225 int last_reload)
6226 {
6227 int i, pass, count;
6228
6229 /* If we put this reload ahead, thinking it is a group,
6230 then insist on finding a group. Otherwise we can grab a
6231 reg that some other reload needs.
6232 (That can happen when we have a 68000 DATA_OR_FP_REG
6233 which is a group of data regs or one fp reg.)
6234 We need not be so restrictive if there are no more reloads
6235 for this insn.
6236
6237 ??? Really it would be nicer to have smarter handling
6238 for that kind of reg class, where a problem like this is normal.
6239 Perhaps those classes should be avoided for reloading
6240 by use of more alternatives. */
6241
6242 int force_group = rld[r].nregs > 1 && ! last_reload;
6243
6244 /* If we want a single register and haven't yet found one,
6245 take any reg in the right class and not in use.
6246 If we want a consecutive group, here is where we look for it.
6247
6248 We use three passes so we can first look for reload regs to
6249 reuse, which are already in use for other reloads in this insn,
6250 and only then use additional registers which are not "bad", then
6251 finally any register.
6252
6253 I think that maximizing reuse is needed to make sure we don't
6254 run out of reload regs. Suppose we have three reloads, and
6255 reloads A and B can share regs. These need two regs.
6256 Suppose A and B are given different regs.
6257 That leaves none for C. */
6258 for (pass = 0; pass < 3; pass++)
6259 {
6260 /* I is the index in spill_regs.
6261 We advance it round-robin between insns to use all spill regs
6262 equally, so that inherited reloads have a chance
6263 of leapfrogging each other. */
6264
6265 i = last_spill_reg;
6266
6267 for (count = 0; count < n_spills; count++)
6268 {
6269 int rclass = (int) rld[r].rclass;
6270 int regnum;
6271
6272 i++;
6273 if (i >= n_spills)
6274 i -= n_spills;
6275 regnum = spill_regs[i];
6276
6277 if ((reload_reg_free_p (regnum, rld[r].opnum,
6278 rld[r].when_needed)
6279 || (rld[r].in
6280 /* We check reload_reg_used to make sure we
6281 don't clobber the return register. */
6282 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
6283 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
6284 rld[r].when_needed, rld[r].in,
6285 rld[r].out, r, 1)))
6286 && TEST_HARD_REG_BIT (reg_class_contents[rclass], regnum)
6287 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
6288 /* Look first for regs to share, then for unshared. But
6289 don't share regs used for inherited reloads; they are
6290 the ones we want to preserve. */
6291 && (pass
6292 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
6293 regnum)
6294 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
6295 regnum))))
6296 {
6297 int nr = hard_regno_nregs[regnum][rld[r].mode];
6298
6299 /* During the second pass we want to avoid reload registers
6300 which are "bad" for this reload. */
6301 if (pass == 1
6302 && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
6303 continue;
6304
6305 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
6306 (on 68000) got us two FP regs. If NR is 1,
6307 we would reject both of them. */
6308 if (force_group)
6309 nr = rld[r].nregs;
6310 /* If we need only one reg, we have already won. */
6311 if (nr == 1)
6312 {
6313 /* But reject a single reg if we demand a group. */
6314 if (force_group)
6315 continue;
6316 break;
6317 }
6318 /* Otherwise check that as many consecutive regs as we need
6319 are available here. */
6320 while (nr > 1)
6321 {
6322 int regno = regnum + nr - 1;
6323 if (!(TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)
6324 && spill_reg_order[regno] >= 0
6325 && reload_reg_free_p (regno, rld[r].opnum,
6326 rld[r].when_needed)))
6327 break;
6328 nr--;
6329 }
6330 if (nr == 1)
6331 break;
6332 }
6333 }
6334
6335 /* If we found something on the current pass, omit later passes. */
6336 if (count < n_spills)
6337 break;
6338 }
6339
6340 /* We should have found a spill register by now. */
6341 if (count >= n_spills)
6342 return 0;
6343
6344 /* I is the index in SPILL_REG_RTX of the reload register we are to
6345 allocate. Get an rtx for it and find its register number. */
6346
6347 return set_reload_reg (i, r);
6348 }
6349 \f
6350 /* Initialize all the tables needed to allocate reload registers.
6351 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
6352 is the array we use to restore the reg_rtx field for every reload. */
6353
6354 static void
6355 choose_reload_regs_init (struct insn_chain *chain, rtx *save_reload_reg_rtx)
6356 {
6357 int i;
6358
6359 for (i = 0; i < n_reloads; i++)
6360 rld[i].reg_rtx = save_reload_reg_rtx[i];
6361
6362 memset (reload_inherited, 0, MAX_RELOADS);
6363 memset (reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
6364 memset (reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
6365
6366 CLEAR_HARD_REG_SET (reload_reg_used);
6367 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
6368 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
6369 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
6370 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
6371 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
6372
6373 CLEAR_HARD_REG_SET (reg_used_in_insn);
6374 {
6375 HARD_REG_SET tmp;
6376 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
6377 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6378 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
6379 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
6380 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
6381 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
6382 }
6383
6384 for (i = 0; i < reload_n_operands; i++)
6385 {
6386 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
6387 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
6388 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
6389 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
6390 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
6391 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
6392 }
6393
6394 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
6395
6396 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
6397
6398 for (i = 0; i < n_reloads; i++)
6399 /* If we have already decided to use a certain register,
6400 don't use it in another way. */
6401 if (rld[i].reg_rtx)
6402 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
6403 rld[i].when_needed, rld[i].mode);
6404 }
6405
6406 #ifdef SECONDARY_MEMORY_NEEDED
6407 /* If X is not a subreg, return it unmodified. If it is a subreg,
6408 look up whether we made a replacement for the SUBREG_REG. Return
6409 either the replacement or the SUBREG_REG. */
6410
6411 static rtx
6412 replaced_subreg (rtx x)
6413 {
6414 if (GET_CODE (x) == SUBREG)
6415 return find_replacement (&SUBREG_REG (x));
6416 return x;
6417 }
6418 #endif
6419
6420 /* Compute the offset to pass to subreg_regno_offset, for a pseudo of
6421 mode OUTERMODE that is available in a hard reg of mode INNERMODE.
6422 SUBREG is non-NULL if the pseudo is a subreg whose reg is a pseudo,
6423 otherwise it is NULL. */
6424
6425 static int
6426 compute_reload_subreg_offset (machine_mode outermode,
6427 rtx subreg,
6428 machine_mode innermode)
6429 {
6430 int outer_offset;
6431 machine_mode middlemode;
6432
6433 if (!subreg)
6434 return subreg_lowpart_offset (outermode, innermode);
6435
6436 outer_offset = SUBREG_BYTE (subreg);
6437 middlemode = GET_MODE (SUBREG_REG (subreg));
6438
6439 /* If SUBREG is paradoxical then return the normal lowpart offset
6440 for OUTERMODE and INNERMODE. Our caller has already checked
6441 that OUTERMODE fits in INNERMODE. */
6442 if (outer_offset == 0
6443 && GET_MODE_SIZE (outermode) > GET_MODE_SIZE (middlemode))
6444 return subreg_lowpart_offset (outermode, innermode);
6445
6446 /* SUBREG is normal, but may not be lowpart; return OUTER_OFFSET
6447 plus the normal lowpart offset for MIDDLEMODE and INNERMODE. */
6448 return outer_offset + subreg_lowpart_offset (middlemode, innermode);
6449 }
6450
6451 /* Assign hard reg targets for the pseudo-registers we must reload
6452 into hard regs for this insn.
6453 Also output the instructions to copy them in and out of the hard regs.
6454
6455 For machines with register classes, we are responsible for
6456 finding a reload reg in the proper class. */
6457
6458 static void
6459 choose_reload_regs (struct insn_chain *chain)
6460 {
6461 rtx_insn *insn = chain->insn;
6462 int i, j;
6463 unsigned int max_group_size = 1;
6464 enum reg_class group_class = NO_REGS;
6465 int pass, win, inheritance;
6466
6467 rtx save_reload_reg_rtx[MAX_RELOADS];
6468
6469 /* In order to be certain of getting the registers we need,
6470 we must sort the reloads into order of increasing register class.
6471 Then our grabbing of reload registers will parallel the process
6472 that provided the reload registers.
6473
6474 Also note whether any of the reloads wants a consecutive group of regs.
6475 If so, record the maximum size of the group desired and what
6476 register class contains all the groups needed by this insn. */
6477
6478 for (j = 0; j < n_reloads; j++)
6479 {
6480 reload_order[j] = j;
6481 if (rld[j].reg_rtx != NULL_RTX)
6482 {
6483 gcc_assert (REG_P (rld[j].reg_rtx)
6484 && HARD_REGISTER_P (rld[j].reg_rtx));
6485 reload_spill_index[j] = REGNO (rld[j].reg_rtx);
6486 }
6487 else
6488 reload_spill_index[j] = -1;
6489
6490 if (rld[j].nregs > 1)
6491 {
6492 max_group_size = MAX (rld[j].nregs, max_group_size);
6493 group_class
6494 = reg_class_superunion[(int) rld[j].rclass][(int) group_class];
6495 }
6496
6497 save_reload_reg_rtx[j] = rld[j].reg_rtx;
6498 }
6499
6500 if (n_reloads > 1)
6501 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
6502
6503 /* If -O, try first with inheritance, then turning it off.
6504 If not -O, don't do inheritance.
6505 Using inheritance when not optimizing leads to paradoxes
6506 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
6507 because one side of the comparison might be inherited. */
6508 win = 0;
6509 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
6510 {
6511 choose_reload_regs_init (chain, save_reload_reg_rtx);
6512
6513 /* Process the reloads in order of preference just found.
6514 Beyond this point, subregs can be found in reload_reg_rtx.
6515
6516 This used to look for an existing reloaded home for all of the
6517 reloads, and only then perform any new reloads. But that could lose
6518 if the reloads were done out of reg-class order because a later
6519 reload with a looser constraint might have an old home in a register
6520 needed by an earlier reload with a tighter constraint.
6521
6522 To solve this, we make two passes over the reloads, in the order
6523 described above. In the first pass we try to inherit a reload
6524 from a previous insn. If there is a later reload that needs a
6525 class that is a proper subset of the class being processed, we must
6526 also allocate a spill register during the first pass.
6527
6528 Then make a second pass over the reloads to allocate any reloads
6529 that haven't been given registers yet. */
6530
6531 for (j = 0; j < n_reloads; j++)
6532 {
6533 int r = reload_order[j];
6534 rtx search_equiv = NULL_RTX;
6535
6536 /* Ignore reloads that got marked inoperative. */
6537 if (rld[r].out == 0 && rld[r].in == 0
6538 && ! rld[r].secondary_p)
6539 continue;
6540
6541 /* If find_reloads chose to use reload_in or reload_out as a reload
6542 register, we don't need to chose one. Otherwise, try even if it
6543 found one since we might save an insn if we find the value lying
6544 around.
6545 Try also when reload_in is a pseudo without a hard reg. */
6546 if (rld[r].in != 0 && rld[r].reg_rtx != 0
6547 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
6548 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
6549 && !MEM_P (rld[r].in)
6550 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
6551 continue;
6552
6553 #if 0 /* No longer needed for correct operation.
6554 It might give better code, or might not; worth an experiment? */
6555 /* If this is an optional reload, we can't inherit from earlier insns
6556 until we are sure that any non-optional reloads have been allocated.
6557 The following code takes advantage of the fact that optional reloads
6558 are at the end of reload_order. */
6559 if (rld[r].optional != 0)
6560 for (i = 0; i < j; i++)
6561 if ((rld[reload_order[i]].out != 0
6562 || rld[reload_order[i]].in != 0
6563 || rld[reload_order[i]].secondary_p)
6564 && ! rld[reload_order[i]].optional
6565 && rld[reload_order[i]].reg_rtx == 0)
6566 allocate_reload_reg (chain, reload_order[i], 0);
6567 #endif
6568
6569 /* First see if this pseudo is already available as reloaded
6570 for a previous insn. We cannot try to inherit for reloads
6571 that are smaller than the maximum number of registers needed
6572 for groups unless the register we would allocate cannot be used
6573 for the groups.
6574
6575 We could check here to see if this is a secondary reload for
6576 an object that is already in a register of the desired class.
6577 This would avoid the need for the secondary reload register.
6578 But this is complex because we can't easily determine what
6579 objects might want to be loaded via this reload. So let a
6580 register be allocated here. In `emit_reload_insns' we suppress
6581 one of the loads in the case described above. */
6582
6583 if (inheritance)
6584 {
6585 int byte = 0;
6586 int regno = -1;
6587 machine_mode mode = VOIDmode;
6588 rtx subreg = NULL_RTX;
6589
6590 if (rld[r].in == 0)
6591 ;
6592 else if (REG_P (rld[r].in))
6593 {
6594 regno = REGNO (rld[r].in);
6595 mode = GET_MODE (rld[r].in);
6596 }
6597 else if (REG_P (rld[r].in_reg))
6598 {
6599 regno = REGNO (rld[r].in_reg);
6600 mode = GET_MODE (rld[r].in_reg);
6601 }
6602 else if (GET_CODE (rld[r].in_reg) == SUBREG
6603 && REG_P (SUBREG_REG (rld[r].in_reg)))
6604 {
6605 regno = REGNO (SUBREG_REG (rld[r].in_reg));
6606 if (regno < FIRST_PSEUDO_REGISTER)
6607 regno = subreg_regno (rld[r].in_reg);
6608 else
6609 {
6610 subreg = rld[r].in_reg;
6611 byte = SUBREG_BYTE (subreg);
6612 }
6613 mode = GET_MODE (rld[r].in_reg);
6614 }
6615 #ifdef AUTO_INC_DEC
6616 else if (GET_RTX_CLASS (GET_CODE (rld[r].in_reg)) == RTX_AUTOINC
6617 && REG_P (XEXP (rld[r].in_reg, 0)))
6618 {
6619 regno = REGNO (XEXP (rld[r].in_reg, 0));
6620 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
6621 rld[r].out = rld[r].in;
6622 }
6623 #endif
6624 #if 0
6625 /* This won't work, since REGNO can be a pseudo reg number.
6626 Also, it takes much more hair to keep track of all the things
6627 that can invalidate an inherited reload of part of a pseudoreg. */
6628 else if (GET_CODE (rld[r].in) == SUBREG
6629 && REG_P (SUBREG_REG (rld[r].in)))
6630 regno = subreg_regno (rld[r].in);
6631 #endif
6632
6633 if (regno >= 0
6634 && reg_last_reload_reg[regno] != 0
6635 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
6636 >= GET_MODE_SIZE (mode) + byte)
6637 #ifdef CANNOT_CHANGE_MODE_CLASS
6638 /* Verify that the register it's in can be used in
6639 mode MODE. */
6640 && !REG_CANNOT_CHANGE_MODE_P (REGNO (reg_last_reload_reg[regno]),
6641 GET_MODE (reg_last_reload_reg[regno]),
6642 mode)
6643 #endif
6644 )
6645 {
6646 enum reg_class rclass = rld[r].rclass, last_class;
6647 rtx last_reg = reg_last_reload_reg[regno];
6648
6649 i = REGNO (last_reg);
6650 byte = compute_reload_subreg_offset (mode,
6651 subreg,
6652 GET_MODE (last_reg));
6653 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
6654 last_class = REGNO_REG_CLASS (i);
6655
6656 if (reg_reloaded_contents[i] == regno
6657 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
6658 && HARD_REGNO_MODE_OK (i, rld[r].mode)
6659 && (TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], i)
6660 /* Even if we can't use this register as a reload
6661 register, we might use it for reload_override_in,
6662 if copying it to the desired class is cheap
6663 enough. */
6664 || ((register_move_cost (mode, last_class, rclass)
6665 < memory_move_cost (mode, rclass, true))
6666 && (secondary_reload_class (1, rclass, mode,
6667 last_reg)
6668 == NO_REGS)
6669 #ifdef SECONDARY_MEMORY_NEEDED
6670 && ! SECONDARY_MEMORY_NEEDED (last_class, rclass,
6671 mode)
6672 #endif
6673 ))
6674
6675 && (rld[r].nregs == max_group_size
6676 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
6677 i))
6678 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
6679 rld[r].when_needed, rld[r].in,
6680 const0_rtx, r, 1))
6681 {
6682 /* If a group is needed, verify that all the subsequent
6683 registers still have their values intact. */
6684 int nr = hard_regno_nregs[i][rld[r].mode];
6685 int k;
6686
6687 for (k = 1; k < nr; k++)
6688 if (reg_reloaded_contents[i + k] != regno
6689 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
6690 break;
6691
6692 if (k == nr)
6693 {
6694 int i1;
6695 int bad_for_class;
6696
6697 last_reg = (GET_MODE (last_reg) == mode
6698 ? last_reg : gen_rtx_REG (mode, i));
6699
6700 bad_for_class = 0;
6701 for (k = 0; k < nr; k++)
6702 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6703 i+k);
6704
6705 /* We found a register that contains the
6706 value we need. If this register is the
6707 same as an `earlyclobber' operand of the
6708 current insn, just mark it as a place to
6709 reload from since we can't use it as the
6710 reload register itself. */
6711
6712 for (i1 = 0; i1 < n_earlyclobbers; i1++)
6713 if (reg_overlap_mentioned_for_reload_p
6714 (reg_last_reload_reg[regno],
6715 reload_earlyclobbers[i1]))
6716 break;
6717
6718 if (i1 != n_earlyclobbers
6719 || ! (free_for_value_p (i, rld[r].mode,
6720 rld[r].opnum,
6721 rld[r].when_needed, rld[r].in,
6722 rld[r].out, r, 1))
6723 /* Don't use it if we'd clobber a pseudo reg. */
6724 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
6725 && rld[r].out
6726 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
6727 /* Don't clobber the frame pointer. */
6728 || (i == HARD_FRAME_POINTER_REGNUM
6729 && frame_pointer_needed
6730 && rld[r].out)
6731 /* Don't really use the inherited spill reg
6732 if we need it wider than we've got it. */
6733 || (GET_MODE_SIZE (rld[r].mode)
6734 > GET_MODE_SIZE (mode))
6735 || bad_for_class
6736
6737 /* If find_reloads chose reload_out as reload
6738 register, stay with it - that leaves the
6739 inherited register for subsequent reloads. */
6740 || (rld[r].out && rld[r].reg_rtx
6741 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
6742 {
6743 if (! rld[r].optional)
6744 {
6745 reload_override_in[r] = last_reg;
6746 reload_inheritance_insn[r]
6747 = reg_reloaded_insn[i];
6748 }
6749 }
6750 else
6751 {
6752 int k;
6753 /* We can use this as a reload reg. */
6754 /* Mark the register as in use for this part of
6755 the insn. */
6756 mark_reload_reg_in_use (i,
6757 rld[r].opnum,
6758 rld[r].when_needed,
6759 rld[r].mode);
6760 rld[r].reg_rtx = last_reg;
6761 reload_inherited[r] = 1;
6762 reload_inheritance_insn[r]
6763 = reg_reloaded_insn[i];
6764 reload_spill_index[r] = i;
6765 for (k = 0; k < nr; k++)
6766 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6767 i + k);
6768 }
6769 }
6770 }
6771 }
6772 }
6773
6774 /* Here's another way to see if the value is already lying around. */
6775 if (inheritance
6776 && rld[r].in != 0
6777 && ! reload_inherited[r]
6778 && rld[r].out == 0
6779 && (CONSTANT_P (rld[r].in)
6780 || GET_CODE (rld[r].in) == PLUS
6781 || REG_P (rld[r].in)
6782 || MEM_P (rld[r].in))
6783 && (rld[r].nregs == max_group_size
6784 || ! reg_classes_intersect_p (rld[r].rclass, group_class)))
6785 search_equiv = rld[r].in;
6786
6787 if (search_equiv)
6788 {
6789 rtx equiv
6790 = find_equiv_reg (search_equiv, insn, rld[r].rclass,
6791 -1, NULL, 0, rld[r].mode);
6792 int regno = 0;
6793
6794 if (equiv != 0)
6795 {
6796 if (REG_P (equiv))
6797 regno = REGNO (equiv);
6798 else
6799 {
6800 /* This must be a SUBREG of a hard register.
6801 Make a new REG since this might be used in an
6802 address and not all machines support SUBREGs
6803 there. */
6804 gcc_assert (GET_CODE (equiv) == SUBREG);
6805 regno = subreg_regno (equiv);
6806 equiv = gen_rtx_REG (rld[r].mode, regno);
6807 /* If we choose EQUIV as the reload register, but the
6808 loop below decides to cancel the inheritance, we'll
6809 end up reloading EQUIV in rld[r].mode, not the mode
6810 it had originally. That isn't safe when EQUIV isn't
6811 available as a spill register since its value might
6812 still be live at this point. */
6813 for (i = regno; i < regno + (int) rld[r].nregs; i++)
6814 if (TEST_HARD_REG_BIT (reload_reg_unavailable, i))
6815 equiv = 0;
6816 }
6817 }
6818
6819 /* If we found a spill reg, reject it unless it is free
6820 and of the desired class. */
6821 if (equiv != 0)
6822 {
6823 int regs_used = 0;
6824 int bad_for_class = 0;
6825 int max_regno = regno + rld[r].nregs;
6826
6827 for (i = regno; i < max_regno; i++)
6828 {
6829 regs_used |= TEST_HARD_REG_BIT (reload_reg_used_at_all,
6830 i);
6831 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].rclass],
6832 i);
6833 }
6834
6835 if ((regs_used
6836 && ! free_for_value_p (regno, rld[r].mode,
6837 rld[r].opnum, rld[r].when_needed,
6838 rld[r].in, rld[r].out, r, 1))
6839 || bad_for_class)
6840 equiv = 0;
6841 }
6842
6843 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
6844 equiv = 0;
6845
6846 /* We found a register that contains the value we need.
6847 If this register is the same as an `earlyclobber' operand
6848 of the current insn, just mark it as a place to reload from
6849 since we can't use it as the reload register itself. */
6850
6851 if (equiv != 0)
6852 for (i = 0; i < n_earlyclobbers; i++)
6853 if (reg_overlap_mentioned_for_reload_p (equiv,
6854 reload_earlyclobbers[i]))
6855 {
6856 if (! rld[r].optional)
6857 reload_override_in[r] = equiv;
6858 equiv = 0;
6859 break;
6860 }
6861
6862 /* If the equiv register we have found is explicitly clobbered
6863 in the current insn, it depends on the reload type if we
6864 can use it, use it for reload_override_in, or not at all.
6865 In particular, we then can't use EQUIV for a
6866 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6867
6868 if (equiv != 0)
6869 {
6870 if (regno_clobbered_p (regno, insn, rld[r].mode, 2))
6871 switch (rld[r].when_needed)
6872 {
6873 case RELOAD_FOR_OTHER_ADDRESS:
6874 case RELOAD_FOR_INPADDR_ADDRESS:
6875 case RELOAD_FOR_INPUT_ADDRESS:
6876 case RELOAD_FOR_OPADDR_ADDR:
6877 break;
6878 case RELOAD_OTHER:
6879 case RELOAD_FOR_INPUT:
6880 case RELOAD_FOR_OPERAND_ADDRESS:
6881 if (! rld[r].optional)
6882 reload_override_in[r] = equiv;
6883 /* Fall through. */
6884 default:
6885 equiv = 0;
6886 break;
6887 }
6888 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
6889 switch (rld[r].when_needed)
6890 {
6891 case RELOAD_FOR_OTHER_ADDRESS:
6892 case RELOAD_FOR_INPADDR_ADDRESS:
6893 case RELOAD_FOR_INPUT_ADDRESS:
6894 case RELOAD_FOR_OPADDR_ADDR:
6895 case RELOAD_FOR_OPERAND_ADDRESS:
6896 case RELOAD_FOR_INPUT:
6897 break;
6898 case RELOAD_OTHER:
6899 if (! rld[r].optional)
6900 reload_override_in[r] = equiv;
6901 /* Fall through. */
6902 default:
6903 equiv = 0;
6904 break;
6905 }
6906 }
6907
6908 /* If we found an equivalent reg, say no code need be generated
6909 to load it, and use it as our reload reg. */
6910 if (equiv != 0
6911 && (regno != HARD_FRAME_POINTER_REGNUM
6912 || !frame_pointer_needed))
6913 {
6914 int nr = hard_regno_nregs[regno][rld[r].mode];
6915 int k;
6916 rld[r].reg_rtx = equiv;
6917 reload_spill_index[r] = regno;
6918 reload_inherited[r] = 1;
6919
6920 /* If reg_reloaded_valid is not set for this register,
6921 there might be a stale spill_reg_store lying around.
6922 We must clear it, since otherwise emit_reload_insns
6923 might delete the store. */
6924 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6925 spill_reg_store[regno] = NULL;
6926 /* If any of the hard registers in EQUIV are spill
6927 registers, mark them as in use for this insn. */
6928 for (k = 0; k < nr; k++)
6929 {
6930 i = spill_reg_order[regno + k];
6931 if (i >= 0)
6932 {
6933 mark_reload_reg_in_use (regno, rld[r].opnum,
6934 rld[r].when_needed,
6935 rld[r].mode);
6936 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6937 regno + k);
6938 }
6939 }
6940 }
6941 }
6942
6943 /* If we found a register to use already, or if this is an optional
6944 reload, we are done. */
6945 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
6946 continue;
6947
6948 #if 0
6949 /* No longer needed for correct operation. Might or might
6950 not give better code on the average. Want to experiment? */
6951
6952 /* See if there is a later reload that has a class different from our
6953 class that intersects our class or that requires less register
6954 than our reload. If so, we must allocate a register to this
6955 reload now, since that reload might inherit a previous reload
6956 and take the only available register in our class. Don't do this
6957 for optional reloads since they will force all previous reloads
6958 to be allocated. Also don't do this for reloads that have been
6959 turned off. */
6960
6961 for (i = j + 1; i < n_reloads; i++)
6962 {
6963 int s = reload_order[i];
6964
6965 if ((rld[s].in == 0 && rld[s].out == 0
6966 && ! rld[s].secondary_p)
6967 || rld[s].optional)
6968 continue;
6969
6970 if ((rld[s].rclass != rld[r].rclass
6971 && reg_classes_intersect_p (rld[r].rclass,
6972 rld[s].rclass))
6973 || rld[s].nregs < rld[r].nregs)
6974 break;
6975 }
6976
6977 if (i == n_reloads)
6978 continue;
6979
6980 allocate_reload_reg (chain, r, j == n_reloads - 1);
6981 #endif
6982 }
6983
6984 /* Now allocate reload registers for anything non-optional that
6985 didn't get one yet. */
6986 for (j = 0; j < n_reloads; j++)
6987 {
6988 int r = reload_order[j];
6989
6990 /* Ignore reloads that got marked inoperative. */
6991 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
6992 continue;
6993
6994 /* Skip reloads that already have a register allocated or are
6995 optional. */
6996 if (rld[r].reg_rtx != 0 || rld[r].optional)
6997 continue;
6998
6999 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
7000 break;
7001 }
7002
7003 /* If that loop got all the way, we have won. */
7004 if (j == n_reloads)
7005 {
7006 win = 1;
7007 break;
7008 }
7009
7010 /* Loop around and try without any inheritance. */
7011 }
7012
7013 if (! win)
7014 {
7015 /* First undo everything done by the failed attempt
7016 to allocate with inheritance. */
7017 choose_reload_regs_init (chain, save_reload_reg_rtx);
7018
7019 /* Some sanity tests to verify that the reloads found in the first
7020 pass are identical to the ones we have now. */
7021 gcc_assert (chain->n_reloads == n_reloads);
7022
7023 for (i = 0; i < n_reloads; i++)
7024 {
7025 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
7026 continue;
7027 gcc_assert (chain->rld[i].when_needed == rld[i].when_needed);
7028 for (j = 0; j < n_spills; j++)
7029 if (spill_regs[j] == chain->rld[i].regno)
7030 if (! set_reload_reg (j, i))
7031 failed_reload (chain->insn, i);
7032 }
7033 }
7034
7035 /* If we thought we could inherit a reload, because it seemed that
7036 nothing else wanted the same reload register earlier in the insn,
7037 verify that assumption, now that all reloads have been assigned.
7038 Likewise for reloads where reload_override_in has been set. */
7039
7040 /* If doing expensive optimizations, do one preliminary pass that doesn't
7041 cancel any inheritance, but removes reloads that have been needed only
7042 for reloads that we know can be inherited. */
7043 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
7044 {
7045 for (j = 0; j < n_reloads; j++)
7046 {
7047 int r = reload_order[j];
7048 rtx check_reg;
7049 #ifdef SECONDARY_MEMORY_NEEDED
7050 rtx tem;
7051 #endif
7052 if (reload_inherited[r] && rld[r].reg_rtx)
7053 check_reg = rld[r].reg_rtx;
7054 else if (reload_override_in[r]
7055 && (REG_P (reload_override_in[r])
7056 || GET_CODE (reload_override_in[r]) == SUBREG))
7057 check_reg = reload_override_in[r];
7058 else
7059 continue;
7060 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
7061 rld[r].opnum, rld[r].when_needed, rld[r].in,
7062 (reload_inherited[r]
7063 ? rld[r].out : const0_rtx),
7064 r, 1))
7065 {
7066 if (pass)
7067 continue;
7068 reload_inherited[r] = 0;
7069 reload_override_in[r] = 0;
7070 }
7071 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
7072 reload_override_in, then we do not need its related
7073 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
7074 likewise for other reload types.
7075 We handle this by removing a reload when its only replacement
7076 is mentioned in reload_in of the reload we are going to inherit.
7077 A special case are auto_inc expressions; even if the input is
7078 inherited, we still need the address for the output. We can
7079 recognize them because they have RELOAD_OUT set to RELOAD_IN.
7080 If we succeeded removing some reload and we are doing a preliminary
7081 pass just to remove such reloads, make another pass, since the
7082 removal of one reload might allow us to inherit another one. */
7083 else if (rld[r].in
7084 && rld[r].out != rld[r].in
7085 && remove_address_replacements (rld[r].in))
7086 {
7087 if (pass)
7088 pass = 2;
7089 }
7090 #ifdef SECONDARY_MEMORY_NEEDED
7091 /* If we needed a memory location for the reload, we also have to
7092 remove its related reloads. */
7093 else if (rld[r].in
7094 && rld[r].out != rld[r].in
7095 && (tem = replaced_subreg (rld[r].in), REG_P (tem))
7096 && REGNO (tem) < FIRST_PSEUDO_REGISTER
7097 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem)),
7098 rld[r].rclass, rld[r].inmode)
7099 && remove_address_replacements
7100 (get_secondary_mem (tem, rld[r].inmode, rld[r].opnum,
7101 rld[r].when_needed)))
7102 {
7103 if (pass)
7104 pass = 2;
7105 }
7106 #endif
7107 }
7108 }
7109
7110 /* Now that reload_override_in is known valid,
7111 actually override reload_in. */
7112 for (j = 0; j < n_reloads; j++)
7113 if (reload_override_in[j])
7114 rld[j].in = reload_override_in[j];
7115
7116 /* If this reload won't be done because it has been canceled or is
7117 optional and not inherited, clear reload_reg_rtx so other
7118 routines (such as subst_reloads) don't get confused. */
7119 for (j = 0; j < n_reloads; j++)
7120 if (rld[j].reg_rtx != 0
7121 && ((rld[j].optional && ! reload_inherited[j])
7122 || (rld[j].in == 0 && rld[j].out == 0
7123 && ! rld[j].secondary_p)))
7124 {
7125 int regno = true_regnum (rld[j].reg_rtx);
7126
7127 if (spill_reg_order[regno] >= 0)
7128 clear_reload_reg_in_use (regno, rld[j].opnum,
7129 rld[j].when_needed, rld[j].mode);
7130 rld[j].reg_rtx = 0;
7131 reload_spill_index[j] = -1;
7132 }
7133
7134 /* Record which pseudos and which spill regs have output reloads. */
7135 for (j = 0; j < n_reloads; j++)
7136 {
7137 int r = reload_order[j];
7138
7139 i = reload_spill_index[r];
7140
7141 /* I is nonneg if this reload uses a register.
7142 If rld[r].reg_rtx is 0, this is an optional reload
7143 that we opted to ignore. */
7144 if (rld[r].out_reg != 0 && REG_P (rld[r].out_reg)
7145 && rld[r].reg_rtx != 0)
7146 {
7147 int nregno = REGNO (rld[r].out_reg);
7148 int nr = 1;
7149
7150 if (nregno < FIRST_PSEUDO_REGISTER)
7151 nr = hard_regno_nregs[nregno][rld[r].mode];
7152
7153 while (--nr >= 0)
7154 SET_REGNO_REG_SET (&reg_has_output_reload,
7155 nregno + nr);
7156
7157 if (i >= 0)
7158 add_to_hard_reg_set (&reg_is_output_reload, rld[r].mode, i);
7159
7160 gcc_assert (rld[r].when_needed == RELOAD_OTHER
7161 || rld[r].when_needed == RELOAD_FOR_OUTPUT
7162 || rld[r].when_needed == RELOAD_FOR_INSN);
7163 }
7164 }
7165 }
7166
7167 /* Deallocate the reload register for reload R. This is called from
7168 remove_address_replacements. */
7169
7170 void
7171 deallocate_reload_reg (int r)
7172 {
7173 int regno;
7174
7175 if (! rld[r].reg_rtx)
7176 return;
7177 regno = true_regnum (rld[r].reg_rtx);
7178 rld[r].reg_rtx = 0;
7179 if (spill_reg_order[regno] >= 0)
7180 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
7181 rld[r].mode);
7182 reload_spill_index[r] = -1;
7183 }
7184 \f
7185 /* These arrays are filled by emit_reload_insns and its subroutines. */
7186 static rtx_insn *input_reload_insns[MAX_RECOG_OPERANDS];
7187 static rtx_insn *other_input_address_reload_insns = 0;
7188 static rtx_insn *other_input_reload_insns = 0;
7189 static rtx_insn *input_address_reload_insns[MAX_RECOG_OPERANDS];
7190 static rtx_insn *inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7191 static rtx_insn *output_reload_insns[MAX_RECOG_OPERANDS];
7192 static rtx_insn *output_address_reload_insns[MAX_RECOG_OPERANDS];
7193 static rtx_insn *outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
7194 static rtx_insn *operand_reload_insns = 0;
7195 static rtx_insn *other_operand_reload_insns = 0;
7196 static rtx_insn *other_output_reload_insns[MAX_RECOG_OPERANDS];
7197
7198 /* Values to be put in spill_reg_store are put here first. Instructions
7199 must only be placed here if the associated reload register reaches
7200 the end of the instruction's reload sequence. */
7201 static rtx_insn *new_spill_reg_store[FIRST_PSEUDO_REGISTER];
7202 static HARD_REG_SET reg_reloaded_died;
7203
7204 /* Check if *RELOAD_REG is suitable as an intermediate or scratch register
7205 of class NEW_CLASS with mode NEW_MODE. Or alternatively, if alt_reload_reg
7206 is nonzero, if that is suitable. On success, change *RELOAD_REG to the
7207 adjusted register, and return true. Otherwise, return false. */
7208 static bool
7209 reload_adjust_reg_for_temp (rtx *reload_reg, rtx alt_reload_reg,
7210 enum reg_class new_class,
7211 machine_mode new_mode)
7212
7213 {
7214 rtx reg;
7215
7216 for (reg = *reload_reg; reg; reg = alt_reload_reg, alt_reload_reg = 0)
7217 {
7218 unsigned regno = REGNO (reg);
7219
7220 if (!TEST_HARD_REG_BIT (reg_class_contents[(int) new_class], regno))
7221 continue;
7222 if (GET_MODE (reg) != new_mode)
7223 {
7224 if (!HARD_REGNO_MODE_OK (regno, new_mode))
7225 continue;
7226 if (hard_regno_nregs[regno][new_mode]
7227 > hard_regno_nregs[regno][GET_MODE (reg)])
7228 continue;
7229 reg = reload_adjust_reg_for_mode (reg, new_mode);
7230 }
7231 *reload_reg = reg;
7232 return true;
7233 }
7234 return false;
7235 }
7236
7237 /* Check if *RELOAD_REG is suitable as a scratch register for the reload
7238 pattern with insn_code ICODE, or alternatively, if alt_reload_reg is
7239 nonzero, if that is suitable. On success, change *RELOAD_REG to the
7240 adjusted register, and return true. Otherwise, return false. */
7241 static bool
7242 reload_adjust_reg_for_icode (rtx *reload_reg, rtx alt_reload_reg,
7243 enum insn_code icode)
7244
7245 {
7246 enum reg_class new_class = scratch_reload_class (icode);
7247 machine_mode new_mode = insn_data[(int) icode].operand[2].mode;
7248
7249 return reload_adjust_reg_for_temp (reload_reg, alt_reload_reg,
7250 new_class, new_mode);
7251 }
7252
7253 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
7254 has the number J. OLD contains the value to be used as input. */
7255
7256 static void
7257 emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
7258 rtx old, int j)
7259 {
7260 rtx_insn *insn = chain->insn;
7261 rtx reloadreg;
7262 rtx oldequiv_reg = 0;
7263 rtx oldequiv = 0;
7264 int special = 0;
7265 machine_mode mode;
7266 rtx_insn **where;
7267
7268 /* delete_output_reload is only invoked properly if old contains
7269 the original pseudo register. Since this is replaced with a
7270 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
7271 find the pseudo in RELOAD_IN_REG. This is also used to
7272 determine whether a secondary reload is needed. */
7273 if (reload_override_in[j]
7274 && (REG_P (rl->in_reg)
7275 || (GET_CODE (rl->in_reg) == SUBREG
7276 && REG_P (SUBREG_REG (rl->in_reg)))))
7277 {
7278 oldequiv = old;
7279 old = rl->in_reg;
7280 }
7281 if (oldequiv == 0)
7282 oldequiv = old;
7283 else if (REG_P (oldequiv))
7284 oldequiv_reg = oldequiv;
7285 else if (GET_CODE (oldequiv) == SUBREG)
7286 oldequiv_reg = SUBREG_REG (oldequiv);
7287
7288 reloadreg = reload_reg_rtx_for_input[j];
7289 mode = GET_MODE (reloadreg);
7290
7291 /* If we are reloading from a register that was recently stored in
7292 with an output-reload, see if we can prove there was
7293 actually no need to store the old value in it. */
7294
7295 if (optimize && REG_P (oldequiv)
7296 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
7297 && spill_reg_store[REGNO (oldequiv)]
7298 && REG_P (old)
7299 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
7300 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
7301 rl->out_reg)))
7302 delete_output_reload (insn, j, REGNO (oldequiv), reloadreg);
7303
7304 /* Encapsulate OLDEQUIV into the reload mode, then load RELOADREG from
7305 OLDEQUIV. */
7306
7307 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
7308 oldequiv = SUBREG_REG (oldequiv);
7309 if (GET_MODE (oldequiv) != VOIDmode
7310 && mode != GET_MODE (oldequiv))
7311 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
7312
7313 /* Switch to the right place to emit the reload insns. */
7314 switch (rl->when_needed)
7315 {
7316 case RELOAD_OTHER:
7317 where = &other_input_reload_insns;
7318 break;
7319 case RELOAD_FOR_INPUT:
7320 where = &input_reload_insns[rl->opnum];
7321 break;
7322 case RELOAD_FOR_INPUT_ADDRESS:
7323 where = &input_address_reload_insns[rl->opnum];
7324 break;
7325 case RELOAD_FOR_INPADDR_ADDRESS:
7326 where = &inpaddr_address_reload_insns[rl->opnum];
7327 break;
7328 case RELOAD_FOR_OUTPUT_ADDRESS:
7329 where = &output_address_reload_insns[rl->opnum];
7330 break;
7331 case RELOAD_FOR_OUTADDR_ADDRESS:
7332 where = &outaddr_address_reload_insns[rl->opnum];
7333 break;
7334 case RELOAD_FOR_OPERAND_ADDRESS:
7335 where = &operand_reload_insns;
7336 break;
7337 case RELOAD_FOR_OPADDR_ADDR:
7338 where = &other_operand_reload_insns;
7339 break;
7340 case RELOAD_FOR_OTHER_ADDRESS:
7341 where = &other_input_address_reload_insns;
7342 break;
7343 default:
7344 gcc_unreachable ();
7345 }
7346
7347 push_to_sequence (*where);
7348
7349 /* Auto-increment addresses must be reloaded in a special way. */
7350 if (rl->out && ! rl->out_reg)
7351 {
7352 /* We are not going to bother supporting the case where a
7353 incremented register can't be copied directly from
7354 OLDEQUIV since this seems highly unlikely. */
7355 gcc_assert (rl->secondary_in_reload < 0);
7356
7357 if (reload_inherited[j])
7358 oldequiv = reloadreg;
7359
7360 old = XEXP (rl->in_reg, 0);
7361
7362 /* Prevent normal processing of this reload. */
7363 special = 1;
7364 /* Output a special code sequence for this case. */
7365 inc_for_reload (reloadreg, oldequiv, rl->out, rl->inc);
7366 }
7367
7368 /* If we are reloading a pseudo-register that was set by the previous
7369 insn, see if we can get rid of that pseudo-register entirely
7370 by redirecting the previous insn into our reload register. */
7371
7372 else if (optimize && REG_P (old)
7373 && REGNO (old) >= FIRST_PSEUDO_REGISTER
7374 && dead_or_set_p (insn, old)
7375 /* This is unsafe if some other reload
7376 uses the same reg first. */
7377 && ! conflicts_with_override (reloadreg)
7378 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
7379 rl->when_needed, old, rl->out, j, 0))
7380 {
7381 rtx_insn *temp = PREV_INSN (insn);
7382 while (temp && (NOTE_P (temp) || DEBUG_INSN_P (temp)))
7383 temp = PREV_INSN (temp);
7384 if (temp
7385 && NONJUMP_INSN_P (temp)
7386 && GET_CODE (PATTERN (temp)) == SET
7387 && SET_DEST (PATTERN (temp)) == old
7388 /* Make sure we can access insn_operand_constraint. */
7389 && asm_noperands (PATTERN (temp)) < 0
7390 /* This is unsafe if operand occurs more than once in current
7391 insn. Perhaps some occurrences aren't reloaded. */
7392 && count_occurrences (PATTERN (insn), old, 0) == 1)
7393 {
7394 rtx old = SET_DEST (PATTERN (temp));
7395 /* Store into the reload register instead of the pseudo. */
7396 SET_DEST (PATTERN (temp)) = reloadreg;
7397
7398 /* Verify that resulting insn is valid.
7399
7400 Note that we have replaced the destination of TEMP with
7401 RELOADREG. If TEMP references RELOADREG within an
7402 autoincrement addressing mode, then the resulting insn
7403 is ill-formed and we must reject this optimization. */
7404 extract_insn (temp);
7405 if (constrain_operands (1, get_enabled_alternatives (temp))
7406 #ifdef AUTO_INC_DEC
7407 && ! find_reg_note (temp, REG_INC, reloadreg)
7408 #endif
7409 )
7410 {
7411 /* If the previous insn is an output reload, the source is
7412 a reload register, and its spill_reg_store entry will
7413 contain the previous destination. This is now
7414 invalid. */
7415 if (REG_P (SET_SRC (PATTERN (temp)))
7416 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
7417 {
7418 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7419 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
7420 }
7421
7422 /* If these are the only uses of the pseudo reg,
7423 pretend for GDB it lives in the reload reg we used. */
7424 if (REG_N_DEATHS (REGNO (old)) == 1
7425 && REG_N_SETS (REGNO (old)) == 1)
7426 {
7427 reg_renumber[REGNO (old)] = REGNO (reloadreg);
7428 if (ira_conflicts_p)
7429 /* Inform IRA about the change. */
7430 ira_mark_allocation_change (REGNO (old));
7431 alter_reg (REGNO (old), -1, false);
7432 }
7433 special = 1;
7434
7435 /* Adjust any debug insns between temp and insn. */
7436 while ((temp = NEXT_INSN (temp)) != insn)
7437 if (DEBUG_INSN_P (temp))
7438 replace_rtx (PATTERN (temp), old, reloadreg);
7439 else
7440 gcc_assert (NOTE_P (temp));
7441 }
7442 else
7443 {
7444 SET_DEST (PATTERN (temp)) = old;
7445 }
7446 }
7447 }
7448
7449 /* We can't do that, so output an insn to load RELOADREG. */
7450
7451 /* If we have a secondary reload, pick up the secondary register
7452 and icode, if any. If OLDEQUIV and OLD are different or
7453 if this is an in-out reload, recompute whether or not we
7454 still need a secondary register and what the icode should
7455 be. If we still need a secondary register and the class or
7456 icode is different, go back to reloading from OLD if using
7457 OLDEQUIV means that we got the wrong type of register. We
7458 cannot have different class or icode due to an in-out reload
7459 because we don't make such reloads when both the input and
7460 output need secondary reload registers. */
7461
7462 if (! special && rl->secondary_in_reload >= 0)
7463 {
7464 rtx second_reload_reg = 0;
7465 rtx third_reload_reg = 0;
7466 int secondary_reload = rl->secondary_in_reload;
7467 rtx real_oldequiv = oldequiv;
7468 rtx real_old = old;
7469 rtx tmp;
7470 enum insn_code icode;
7471 enum insn_code tertiary_icode = CODE_FOR_nothing;
7472
7473 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
7474 and similarly for OLD.
7475 See comments in get_secondary_reload in reload.c. */
7476 /* If it is a pseudo that cannot be replaced with its
7477 equivalent MEM, we must fall back to reload_in, which
7478 will have all the necessary substitutions registered.
7479 Likewise for a pseudo that can't be replaced with its
7480 equivalent constant.
7481
7482 Take extra care for subregs of such pseudos. Note that
7483 we cannot use reg_equiv_mem in this case because it is
7484 not in the right mode. */
7485
7486 tmp = oldequiv;
7487 if (GET_CODE (tmp) == SUBREG)
7488 tmp = SUBREG_REG (tmp);
7489 if (REG_P (tmp)
7490 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7491 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7492 || reg_equiv_constant (REGNO (tmp)) != 0))
7493 {
7494 if (! reg_equiv_mem (REGNO (tmp))
7495 || num_not_at_initial_offset
7496 || GET_CODE (oldequiv) == SUBREG)
7497 real_oldequiv = rl->in;
7498 else
7499 real_oldequiv = reg_equiv_mem (REGNO (tmp));
7500 }
7501
7502 tmp = old;
7503 if (GET_CODE (tmp) == SUBREG)
7504 tmp = SUBREG_REG (tmp);
7505 if (REG_P (tmp)
7506 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
7507 && (reg_equiv_memory_loc (REGNO (tmp)) != 0
7508 || reg_equiv_constant (REGNO (tmp)) != 0))
7509 {
7510 if (! reg_equiv_mem (REGNO (tmp))
7511 || num_not_at_initial_offset
7512 || GET_CODE (old) == SUBREG)
7513 real_old = rl->in;
7514 else
7515 real_old = reg_equiv_mem (REGNO (tmp));
7516 }
7517
7518 second_reload_reg = rld[secondary_reload].reg_rtx;
7519 if (rld[secondary_reload].secondary_in_reload >= 0)
7520 {
7521 int tertiary_reload = rld[secondary_reload].secondary_in_reload;
7522
7523 third_reload_reg = rld[tertiary_reload].reg_rtx;
7524 tertiary_icode = rld[secondary_reload].secondary_in_icode;
7525 /* We'd have to add more code for quartary reloads. */
7526 gcc_assert (rld[tertiary_reload].secondary_in_reload < 0);
7527 }
7528 icode = rl->secondary_in_icode;
7529
7530 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
7531 || (rl->in != 0 && rl->out != 0))
7532 {
7533 secondary_reload_info sri, sri2;
7534 enum reg_class new_class, new_t_class;
7535
7536 sri.icode = CODE_FOR_nothing;
7537 sri.prev_sri = NULL;
7538 new_class
7539 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7540 rl->rclass, mode,
7541 &sri);
7542
7543 if (new_class == NO_REGS && sri.icode == CODE_FOR_nothing)
7544 second_reload_reg = 0;
7545 else if (new_class == NO_REGS)
7546 {
7547 if (reload_adjust_reg_for_icode (&second_reload_reg,
7548 third_reload_reg,
7549 (enum insn_code) sri.icode))
7550 {
7551 icode = (enum insn_code) sri.icode;
7552 third_reload_reg = 0;
7553 }
7554 else
7555 {
7556 oldequiv = old;
7557 real_oldequiv = real_old;
7558 }
7559 }
7560 else if (sri.icode != CODE_FOR_nothing)
7561 /* We currently lack a way to express this in reloads. */
7562 gcc_unreachable ();
7563 else
7564 {
7565 sri2.icode = CODE_FOR_nothing;
7566 sri2.prev_sri = &sri;
7567 new_t_class
7568 = (enum reg_class) targetm.secondary_reload (1, real_oldequiv,
7569 new_class, mode,
7570 &sri);
7571 if (new_t_class == NO_REGS && sri2.icode == CODE_FOR_nothing)
7572 {
7573 if (reload_adjust_reg_for_temp (&second_reload_reg,
7574 third_reload_reg,
7575 new_class, mode))
7576 {
7577 third_reload_reg = 0;
7578 tertiary_icode = (enum insn_code) sri2.icode;
7579 }
7580 else
7581 {
7582 oldequiv = old;
7583 real_oldequiv = real_old;
7584 }
7585 }
7586 else if (new_t_class == NO_REGS && sri2.icode != CODE_FOR_nothing)
7587 {
7588 rtx intermediate = second_reload_reg;
7589
7590 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7591 new_class, mode)
7592 && reload_adjust_reg_for_icode (&third_reload_reg, NULL,
7593 ((enum insn_code)
7594 sri2.icode)))
7595 {
7596 second_reload_reg = intermediate;
7597 tertiary_icode = (enum insn_code) sri2.icode;
7598 }
7599 else
7600 {
7601 oldequiv = old;
7602 real_oldequiv = real_old;
7603 }
7604 }
7605 else if (new_t_class != NO_REGS && sri2.icode == CODE_FOR_nothing)
7606 {
7607 rtx intermediate = second_reload_reg;
7608
7609 if (reload_adjust_reg_for_temp (&intermediate, NULL,
7610 new_class, mode)
7611 && reload_adjust_reg_for_temp (&third_reload_reg, NULL,
7612 new_t_class, mode))
7613 {
7614 second_reload_reg = intermediate;
7615 tertiary_icode = (enum insn_code) sri2.icode;
7616 }
7617 else
7618 {
7619 oldequiv = old;
7620 real_oldequiv = real_old;
7621 }
7622 }
7623 else
7624 {
7625 /* This could be handled more intelligently too. */
7626 oldequiv = old;
7627 real_oldequiv = real_old;
7628 }
7629 }
7630 }
7631
7632 /* If we still need a secondary reload register, check
7633 to see if it is being used as a scratch or intermediate
7634 register and generate code appropriately. If we need
7635 a scratch register, use REAL_OLDEQUIV since the form of
7636 the insn may depend on the actual address if it is
7637 a MEM. */
7638
7639 if (second_reload_reg)
7640 {
7641 if (icode != CODE_FOR_nothing)
7642 {
7643 /* We'd have to add extra code to handle this case. */
7644 gcc_assert (!third_reload_reg);
7645
7646 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
7647 second_reload_reg));
7648 special = 1;
7649 }
7650 else
7651 {
7652 /* See if we need a scratch register to load the
7653 intermediate register (a tertiary reload). */
7654 if (tertiary_icode != CODE_FOR_nothing)
7655 {
7656 emit_insn ((GEN_FCN (tertiary_icode)
7657 (second_reload_reg, real_oldequiv,
7658 third_reload_reg)));
7659 }
7660 else if (third_reload_reg)
7661 {
7662 gen_reload (third_reload_reg, real_oldequiv,
7663 rl->opnum,
7664 rl->when_needed);
7665 gen_reload (second_reload_reg, third_reload_reg,
7666 rl->opnum,
7667 rl->when_needed);
7668 }
7669 else
7670 gen_reload (second_reload_reg, real_oldequiv,
7671 rl->opnum,
7672 rl->when_needed);
7673
7674 oldequiv = second_reload_reg;
7675 }
7676 }
7677 }
7678
7679 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
7680 {
7681 rtx real_oldequiv = oldequiv;
7682
7683 if ((REG_P (oldequiv)
7684 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
7685 && (reg_equiv_memory_loc (REGNO (oldequiv)) != 0
7686 || reg_equiv_constant (REGNO (oldequiv)) != 0))
7687 || (GET_CODE (oldequiv) == SUBREG
7688 && REG_P (SUBREG_REG (oldequiv))
7689 && (REGNO (SUBREG_REG (oldequiv))
7690 >= FIRST_PSEUDO_REGISTER)
7691 && ((reg_equiv_memory_loc (REGNO (SUBREG_REG (oldequiv))) != 0)
7692 || (reg_equiv_constant (REGNO (SUBREG_REG (oldequiv))) != 0)))
7693 || (CONSTANT_P (oldequiv)
7694 && (targetm.preferred_reload_class (oldequiv,
7695 REGNO_REG_CLASS (REGNO (reloadreg)))
7696 == NO_REGS)))
7697 real_oldequiv = rl->in;
7698 gen_reload (reloadreg, real_oldequiv, rl->opnum,
7699 rl->when_needed);
7700 }
7701
7702 if (cfun->can_throw_non_call_exceptions)
7703 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7704
7705 /* End this sequence. */
7706 *where = get_insns ();
7707 end_sequence ();
7708
7709 /* Update reload_override_in so that delete_address_reloads_1
7710 can see the actual register usage. */
7711 if (oldequiv_reg)
7712 reload_override_in[j] = oldequiv;
7713 }
7714
7715 /* Generate insns to for the output reload RL, which is for the insn described
7716 by CHAIN and has the number J. */
7717 static void
7718 emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
7719 int j)
7720 {
7721 rtx reloadreg;
7722 rtx_insn *insn = chain->insn;
7723 int special = 0;
7724 rtx old = rl->out;
7725 machine_mode mode;
7726 rtx_insn *p;
7727 rtx rl_reg_rtx;
7728
7729 if (rl->when_needed == RELOAD_OTHER)
7730 start_sequence ();
7731 else
7732 push_to_sequence (output_reload_insns[rl->opnum]);
7733
7734 rl_reg_rtx = reload_reg_rtx_for_output[j];
7735 mode = GET_MODE (rl_reg_rtx);
7736
7737 reloadreg = rl_reg_rtx;
7738
7739 /* If we need two reload regs, set RELOADREG to the intermediate
7740 one, since it will be stored into OLD. We might need a secondary
7741 register only for an input reload, so check again here. */
7742
7743 if (rl->secondary_out_reload >= 0)
7744 {
7745 rtx real_old = old;
7746 int secondary_reload = rl->secondary_out_reload;
7747 int tertiary_reload = rld[secondary_reload].secondary_out_reload;
7748
7749 if (REG_P (old) && REGNO (old) >= FIRST_PSEUDO_REGISTER
7750 && reg_equiv_mem (REGNO (old)) != 0)
7751 real_old = reg_equiv_mem (REGNO (old));
7752
7753 if (secondary_reload_class (0, rl->rclass, mode, real_old) != NO_REGS)
7754 {
7755 rtx second_reloadreg = reloadreg;
7756 reloadreg = rld[secondary_reload].reg_rtx;
7757
7758 /* See if RELOADREG is to be used as a scratch register
7759 or as an intermediate register. */
7760 if (rl->secondary_out_icode != CODE_FOR_nothing)
7761 {
7762 /* We'd have to add extra code to handle this case. */
7763 gcc_assert (tertiary_reload < 0);
7764
7765 emit_insn ((GEN_FCN (rl->secondary_out_icode)
7766 (real_old, second_reloadreg, reloadreg)));
7767 special = 1;
7768 }
7769 else
7770 {
7771 /* See if we need both a scratch and intermediate reload
7772 register. */
7773
7774 enum insn_code tertiary_icode
7775 = rld[secondary_reload].secondary_out_icode;
7776
7777 /* We'd have to add more code for quartary reloads. */
7778 gcc_assert (tertiary_reload < 0
7779 || rld[tertiary_reload].secondary_out_reload < 0);
7780
7781 if (GET_MODE (reloadreg) != mode)
7782 reloadreg = reload_adjust_reg_for_mode (reloadreg, mode);
7783
7784 if (tertiary_icode != CODE_FOR_nothing)
7785 {
7786 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7787
7788 /* Copy primary reload reg to secondary reload reg.
7789 (Note that these have been swapped above, then
7790 secondary reload reg to OLD using our insn.) */
7791
7792 /* If REAL_OLD is a paradoxical SUBREG, remove it
7793 and try to put the opposite SUBREG on
7794 RELOADREG. */
7795 strip_paradoxical_subreg (&real_old, &reloadreg);
7796
7797 gen_reload (reloadreg, second_reloadreg,
7798 rl->opnum, rl->when_needed);
7799 emit_insn ((GEN_FCN (tertiary_icode)
7800 (real_old, reloadreg, third_reloadreg)));
7801 special = 1;
7802 }
7803
7804 else
7805 {
7806 /* Copy between the reload regs here and then to
7807 OUT later. */
7808
7809 gen_reload (reloadreg, second_reloadreg,
7810 rl->opnum, rl->when_needed);
7811 if (tertiary_reload >= 0)
7812 {
7813 rtx third_reloadreg = rld[tertiary_reload].reg_rtx;
7814
7815 gen_reload (third_reloadreg, reloadreg,
7816 rl->opnum, rl->when_needed);
7817 reloadreg = third_reloadreg;
7818 }
7819 }
7820 }
7821 }
7822 }
7823
7824 /* Output the last reload insn. */
7825 if (! special)
7826 {
7827 rtx set;
7828
7829 /* Don't output the last reload if OLD is not the dest of
7830 INSN and is in the src and is clobbered by INSN. */
7831 if (! flag_expensive_optimizations
7832 || !REG_P (old)
7833 || !(set = single_set (insn))
7834 || rtx_equal_p (old, SET_DEST (set))
7835 || !reg_mentioned_p (old, SET_SRC (set))
7836 || !((REGNO (old) < FIRST_PSEUDO_REGISTER)
7837 && regno_clobbered_p (REGNO (old), insn, rl->mode, 0)))
7838 gen_reload (old, reloadreg, rl->opnum,
7839 rl->when_needed);
7840 }
7841
7842 /* Look at all insns we emitted, just to be safe. */
7843 for (p = get_insns (); p; p = NEXT_INSN (p))
7844 if (INSN_P (p))
7845 {
7846 rtx pat = PATTERN (p);
7847
7848 /* If this output reload doesn't come from a spill reg,
7849 clear any memory of reloaded copies of the pseudo reg.
7850 If this output reload comes from a spill reg,
7851 reg_has_output_reload will make this do nothing. */
7852 note_stores (pat, forget_old_reloads_1, NULL);
7853
7854 if (reg_mentioned_p (rl_reg_rtx, pat))
7855 {
7856 rtx set = single_set (insn);
7857 if (reload_spill_index[j] < 0
7858 && set
7859 && SET_SRC (set) == rl_reg_rtx)
7860 {
7861 int src = REGNO (SET_SRC (set));
7862
7863 reload_spill_index[j] = src;
7864 SET_HARD_REG_BIT (reg_is_output_reload, src);
7865 if (find_regno_note (insn, REG_DEAD, src))
7866 SET_HARD_REG_BIT (reg_reloaded_died, src);
7867 }
7868 if (HARD_REGISTER_P (rl_reg_rtx))
7869 {
7870 int s = rl->secondary_out_reload;
7871 set = single_set (p);
7872 /* If this reload copies only to the secondary reload
7873 register, the secondary reload does the actual
7874 store. */
7875 if (s >= 0 && set == NULL_RTX)
7876 /* We can't tell what function the secondary reload
7877 has and where the actual store to the pseudo is
7878 made; leave new_spill_reg_store alone. */
7879 ;
7880 else if (s >= 0
7881 && SET_SRC (set) == rl_reg_rtx
7882 && SET_DEST (set) == rld[s].reg_rtx)
7883 {
7884 /* Usually the next instruction will be the
7885 secondary reload insn; if we can confirm
7886 that it is, setting new_spill_reg_store to
7887 that insn will allow an extra optimization. */
7888 rtx s_reg = rld[s].reg_rtx;
7889 rtx_insn *next = NEXT_INSN (p);
7890 rld[s].out = rl->out;
7891 rld[s].out_reg = rl->out_reg;
7892 set = single_set (next);
7893 if (set && SET_SRC (set) == s_reg
7894 && reload_reg_rtx_reaches_end_p (s_reg, s))
7895 {
7896 SET_HARD_REG_BIT (reg_is_output_reload,
7897 REGNO (s_reg));
7898 new_spill_reg_store[REGNO (s_reg)] = next;
7899 }
7900 }
7901 else if (reload_reg_rtx_reaches_end_p (rl_reg_rtx, j))
7902 new_spill_reg_store[REGNO (rl_reg_rtx)] = p;
7903 }
7904 }
7905 }
7906
7907 if (rl->when_needed == RELOAD_OTHER)
7908 {
7909 emit_insn (other_output_reload_insns[rl->opnum]);
7910 other_output_reload_insns[rl->opnum] = get_insns ();
7911 }
7912 else
7913 output_reload_insns[rl->opnum] = get_insns ();
7914
7915 if (cfun->can_throw_non_call_exceptions)
7916 copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
7917
7918 end_sequence ();
7919 }
7920
7921 /* Do input reloading for reload RL, which is for the insn described by CHAIN
7922 and has the number J. */
7923 static void
7924 do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
7925 {
7926 rtx_insn *insn = chain->insn;
7927 rtx old = (rl->in && MEM_P (rl->in)
7928 ? rl->in_reg : rl->in);
7929 rtx reg_rtx = rl->reg_rtx;
7930
7931 if (old && reg_rtx)
7932 {
7933 machine_mode mode;
7934
7935 /* Determine the mode to reload in.
7936 This is very tricky because we have three to choose from.
7937 There is the mode the insn operand wants (rl->inmode).
7938 There is the mode of the reload register RELOADREG.
7939 There is the intrinsic mode of the operand, which we could find
7940 by stripping some SUBREGs.
7941 It turns out that RELOADREG's mode is irrelevant:
7942 we can change that arbitrarily.
7943
7944 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
7945 then the reload reg may not support QImode moves, so use SImode.
7946 If foo is in memory due to spilling a pseudo reg, this is safe,
7947 because the QImode value is in the least significant part of a
7948 slot big enough for a SImode. If foo is some other sort of
7949 memory reference, then it is impossible to reload this case,
7950 so previous passes had better make sure this never happens.
7951
7952 Then consider a one-word union which has SImode and one of its
7953 members is a float, being fetched as (SUBREG:SF union:SI).
7954 We must fetch that as SFmode because we could be loading into
7955 a float-only register. In this case OLD's mode is correct.
7956
7957 Consider an immediate integer: it has VOIDmode. Here we need
7958 to get a mode from something else.
7959
7960 In some cases, there is a fourth mode, the operand's
7961 containing mode. If the insn specifies a containing mode for
7962 this operand, it overrides all others.
7963
7964 I am not sure whether the algorithm here is always right,
7965 but it does the right things in those cases. */
7966
7967 mode = GET_MODE (old);
7968 if (mode == VOIDmode)
7969 mode = rl->inmode;
7970
7971 /* We cannot use gen_lowpart_common since it can do the wrong thing
7972 when REG_RTX has a multi-word mode. Note that REG_RTX must
7973 always be a REG here. */
7974 if (GET_MODE (reg_rtx) != mode)
7975 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
7976 }
7977 reload_reg_rtx_for_input[j] = reg_rtx;
7978
7979 if (old != 0
7980 /* AUTO_INC reloads need to be handled even if inherited. We got an
7981 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
7982 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
7983 && ! rtx_equal_p (reg_rtx, old)
7984 && reg_rtx != 0)
7985 emit_input_reload_insns (chain, rld + j, old, j);
7986
7987 /* When inheriting a wider reload, we have a MEM in rl->in,
7988 e.g. inheriting a SImode output reload for
7989 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7990 if (optimize && reload_inherited[j] && rl->in
7991 && MEM_P (rl->in)
7992 && MEM_P (rl->in_reg)
7993 && reload_spill_index[j] >= 0
7994 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7995 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7996
7997 /* If we are reloading a register that was recently stored in with an
7998 output-reload, see if we can prove there was
7999 actually no need to store the old value in it. */
8000
8001 if (optimize
8002 && (reload_inherited[j] || reload_override_in[j])
8003 && reg_rtx
8004 && REG_P (reg_rtx)
8005 && spill_reg_store[REGNO (reg_rtx)] != 0
8006 #if 0
8007 /* There doesn't seem to be any reason to restrict this to pseudos
8008 and doing so loses in the case where we are copying from a
8009 register of the wrong class. */
8010 && !HARD_REGISTER_P (spill_reg_stored_to[REGNO (reg_rtx)])
8011 #endif
8012 /* The insn might have already some references to stackslots
8013 replaced by MEMs, while reload_out_reg still names the
8014 original pseudo. */
8015 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (reg_rtx)])
8016 || rtx_equal_p (spill_reg_stored_to[REGNO (reg_rtx)], rl->out_reg)))
8017 delete_output_reload (insn, j, REGNO (reg_rtx), reg_rtx);
8018 }
8019
8020 /* Do output reloading for reload RL, which is for the insn described by
8021 CHAIN and has the number J.
8022 ??? At some point we need to support handling output reloads of
8023 JUMP_INSNs or insns that set cc0. */
8024 static void
8025 do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
8026 {
8027 rtx note, old;
8028 rtx_insn *insn = chain->insn;
8029 /* If this is an output reload that stores something that is
8030 not loaded in this same reload, see if we can eliminate a previous
8031 store. */
8032 rtx pseudo = rl->out_reg;
8033 rtx reg_rtx = rl->reg_rtx;
8034
8035 if (rl->out && reg_rtx)
8036 {
8037 machine_mode mode;
8038
8039 /* Determine the mode to reload in.
8040 See comments above (for input reloading). */
8041 mode = GET_MODE (rl->out);
8042 if (mode == VOIDmode)
8043 {
8044 /* VOIDmode should never happen for an output. */
8045 if (asm_noperands (PATTERN (insn)) < 0)
8046 /* It's the compiler's fault. */
8047 fatal_insn ("VOIDmode on an output", insn);
8048 error_for_asm (insn, "output operand is constant in %<asm%>");
8049 /* Prevent crash--use something we know is valid. */
8050 mode = word_mode;
8051 rl->out = gen_rtx_REG (mode, REGNO (reg_rtx));
8052 }
8053 if (GET_MODE (reg_rtx) != mode)
8054 reg_rtx = reload_adjust_reg_for_mode (reg_rtx, mode);
8055 }
8056 reload_reg_rtx_for_output[j] = reg_rtx;
8057
8058 if (pseudo
8059 && optimize
8060 && REG_P (pseudo)
8061 && ! rtx_equal_p (rl->in_reg, pseudo)
8062 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
8063 && reg_last_reload_reg[REGNO (pseudo)])
8064 {
8065 int pseudo_no = REGNO (pseudo);
8066 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
8067
8068 /* We don't need to test full validity of last_regno for
8069 inherit here; we only want to know if the store actually
8070 matches the pseudo. */
8071 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
8072 && reg_reloaded_contents[last_regno] == pseudo_no
8073 && spill_reg_store[last_regno]
8074 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
8075 delete_output_reload (insn, j, last_regno, reg_rtx);
8076 }
8077
8078 old = rl->out_reg;
8079 if (old == 0
8080 || reg_rtx == 0
8081 || rtx_equal_p (old, reg_rtx))
8082 return;
8083
8084 /* An output operand that dies right away does need a reload,
8085 but need not be copied from it. Show the new location in the
8086 REG_UNUSED note. */
8087 if ((REG_P (old) || GET_CODE (old) == SCRATCH)
8088 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
8089 {
8090 XEXP (note, 0) = reg_rtx;
8091 return;
8092 }
8093 /* Likewise for a SUBREG of an operand that dies. */
8094 else if (GET_CODE (old) == SUBREG
8095 && REG_P (SUBREG_REG (old))
8096 && 0 != (note = find_reg_note (insn, REG_UNUSED,
8097 SUBREG_REG (old))))
8098 {
8099 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old), reg_rtx);
8100 return;
8101 }
8102 else if (GET_CODE (old) == SCRATCH)
8103 /* If we aren't optimizing, there won't be a REG_UNUSED note,
8104 but we don't want to make an output reload. */
8105 return;
8106
8107 /* If is a JUMP_INSN, we can't support output reloads yet. */
8108 gcc_assert (NONJUMP_INSN_P (insn));
8109
8110 emit_output_reload_insns (chain, rld + j, j);
8111 }
8112
8113 /* A reload copies values of MODE from register SRC to register DEST.
8114 Return true if it can be treated for inheritance purposes like a
8115 group of reloads, each one reloading a single hard register. The
8116 caller has already checked that (reg:MODE SRC) and (reg:MODE DEST)
8117 occupy the same number of hard registers. */
8118
8119 static bool
8120 inherit_piecemeal_p (int dest ATTRIBUTE_UNUSED,
8121 int src ATTRIBUTE_UNUSED,
8122 machine_mode mode ATTRIBUTE_UNUSED)
8123 {
8124 #ifdef CANNOT_CHANGE_MODE_CLASS
8125 return (!REG_CANNOT_CHANGE_MODE_P (dest, mode, reg_raw_mode[dest])
8126 && !REG_CANNOT_CHANGE_MODE_P (src, mode, reg_raw_mode[src]));
8127 #else
8128 return true;
8129 #endif
8130 }
8131
8132 /* Output insns to reload values in and out of the chosen reload regs. */
8133
8134 static void
8135 emit_reload_insns (struct insn_chain *chain)
8136 {
8137 rtx_insn *insn = chain->insn;
8138
8139 int j;
8140
8141 CLEAR_HARD_REG_SET (reg_reloaded_died);
8142
8143 for (j = 0; j < reload_n_operands; j++)
8144 input_reload_insns[j] = input_address_reload_insns[j]
8145 = inpaddr_address_reload_insns[j]
8146 = output_reload_insns[j] = output_address_reload_insns[j]
8147 = outaddr_address_reload_insns[j]
8148 = other_output_reload_insns[j] = 0;
8149 other_input_address_reload_insns = 0;
8150 other_input_reload_insns = 0;
8151 operand_reload_insns = 0;
8152 other_operand_reload_insns = 0;
8153
8154 /* Dump reloads into the dump file. */
8155 if (dump_file)
8156 {
8157 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
8158 debug_reload_to_stream (dump_file);
8159 }
8160
8161 for (j = 0; j < n_reloads; j++)
8162 if (rld[j].reg_rtx && HARD_REGISTER_P (rld[j].reg_rtx))
8163 {
8164 unsigned int i;
8165
8166 for (i = REGNO (rld[j].reg_rtx); i < END_REGNO (rld[j].reg_rtx); i++)
8167 new_spill_reg_store[i] = 0;
8168 }
8169
8170 /* Now output the instructions to copy the data into and out of the
8171 reload registers. Do these in the order that the reloads were reported,
8172 since reloads of base and index registers precede reloads of operands
8173 and the operands may need the base and index registers reloaded. */
8174
8175 for (j = 0; j < n_reloads; j++)
8176 {
8177 do_input_reload (chain, rld + j, j);
8178 do_output_reload (chain, rld + j, j);
8179 }
8180
8181 /* Now write all the insns we made for reloads in the order expected by
8182 the allocation functions. Prior to the insn being reloaded, we write
8183 the following reloads:
8184
8185 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
8186
8187 RELOAD_OTHER reloads.
8188
8189 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
8190 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
8191 RELOAD_FOR_INPUT reload for the operand.
8192
8193 RELOAD_FOR_OPADDR_ADDRS reloads.
8194
8195 RELOAD_FOR_OPERAND_ADDRESS reloads.
8196
8197 After the insn being reloaded, we write the following:
8198
8199 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
8200 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
8201 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
8202 reloads for the operand. The RELOAD_OTHER output reloads are
8203 output in descending order by reload number. */
8204
8205 emit_insn_before (other_input_address_reload_insns, insn);
8206 emit_insn_before (other_input_reload_insns, insn);
8207
8208 for (j = 0; j < reload_n_operands; j++)
8209 {
8210 emit_insn_before (inpaddr_address_reload_insns[j], insn);
8211 emit_insn_before (input_address_reload_insns[j], insn);
8212 emit_insn_before (input_reload_insns[j], insn);
8213 }
8214
8215 emit_insn_before (other_operand_reload_insns, insn);
8216 emit_insn_before (operand_reload_insns, insn);
8217
8218 for (j = 0; j < reload_n_operands; j++)
8219 {
8220 rtx x = emit_insn_after (outaddr_address_reload_insns[j], insn);
8221 x = emit_insn_after (output_address_reload_insns[j], x);
8222 x = emit_insn_after (output_reload_insns[j], x);
8223 emit_insn_after (other_output_reload_insns[j], x);
8224 }
8225
8226 /* For all the spill regs newly reloaded in this instruction,
8227 record what they were reloaded from, so subsequent instructions
8228 can inherit the reloads.
8229
8230 Update spill_reg_store for the reloads of this insn.
8231 Copy the elements that were updated in the loop above. */
8232
8233 for (j = 0; j < n_reloads; j++)
8234 {
8235 int r = reload_order[j];
8236 int i = reload_spill_index[r];
8237
8238 /* If this is a non-inherited input reload from a pseudo, we must
8239 clear any memory of a previous store to the same pseudo. Only do
8240 something if there will not be an output reload for the pseudo
8241 being reloaded. */
8242 if (rld[r].in_reg != 0
8243 && ! (reload_inherited[r] || reload_override_in[r]))
8244 {
8245 rtx reg = rld[r].in_reg;
8246
8247 if (GET_CODE (reg) == SUBREG)
8248 reg = SUBREG_REG (reg);
8249
8250 if (REG_P (reg)
8251 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
8252 && !REGNO_REG_SET_P (&reg_has_output_reload, REGNO (reg)))
8253 {
8254 int nregno = REGNO (reg);
8255
8256 if (reg_last_reload_reg[nregno])
8257 {
8258 int last_regno = REGNO (reg_last_reload_reg[nregno]);
8259
8260 if (reg_reloaded_contents[last_regno] == nregno)
8261 spill_reg_store[last_regno] = 0;
8262 }
8263 }
8264 }
8265
8266 /* I is nonneg if this reload used a register.
8267 If rld[r].reg_rtx is 0, this is an optional reload
8268 that we opted to ignore. */
8269
8270 if (i >= 0 && rld[r].reg_rtx != 0)
8271 {
8272 int nr = hard_regno_nregs[i][GET_MODE (rld[r].reg_rtx)];
8273 int k;
8274
8275 /* For a multi register reload, we need to check if all or part
8276 of the value lives to the end. */
8277 for (k = 0; k < nr; k++)
8278 if (reload_reg_reaches_end_p (i + k, r))
8279 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
8280
8281 /* Maybe the spill reg contains a copy of reload_out. */
8282 if (rld[r].out != 0
8283 && (REG_P (rld[r].out)
8284 || (rld[r].out_reg
8285 ? REG_P (rld[r].out_reg)
8286 /* The reload value is an auto-modification of
8287 some kind. For PRE_INC, POST_INC, PRE_DEC
8288 and POST_DEC, we record an equivalence
8289 between the reload register and the operand
8290 on the optimistic assumption that we can make
8291 the equivalence hold. reload_as_needed must
8292 then either make it hold or invalidate the
8293 equivalence.
8294
8295 PRE_MODIFY and POST_MODIFY addresses are reloaded
8296 somewhat differently, and allowing them here leads
8297 to problems. */
8298 : (GET_CODE (rld[r].out) != POST_MODIFY
8299 && GET_CODE (rld[r].out) != PRE_MODIFY))))
8300 {
8301 rtx reg;
8302
8303 reg = reload_reg_rtx_for_output[r];
8304 if (reload_reg_rtx_reaches_end_p (reg, r))
8305 {
8306 machine_mode mode = GET_MODE (reg);
8307 int regno = REGNO (reg);
8308 int nregs = hard_regno_nregs[regno][mode];
8309 rtx out = (REG_P (rld[r].out)
8310 ? rld[r].out
8311 : rld[r].out_reg
8312 ? rld[r].out_reg
8313 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
8314 int out_regno = REGNO (out);
8315 int out_nregs = (!HARD_REGISTER_NUM_P (out_regno) ? 1
8316 : hard_regno_nregs[out_regno][mode]);
8317 bool piecemeal;
8318
8319 spill_reg_store[regno] = new_spill_reg_store[regno];
8320 spill_reg_stored_to[regno] = out;
8321 reg_last_reload_reg[out_regno] = reg;
8322
8323 piecemeal = (HARD_REGISTER_NUM_P (out_regno)
8324 && nregs == out_nregs
8325 && inherit_piecemeal_p (out_regno, regno, mode));
8326
8327 /* If OUT_REGNO is a hard register, it may occupy more than
8328 one register. If it does, say what is in the
8329 rest of the registers assuming that both registers
8330 agree on how many words the object takes. If not,
8331 invalidate the subsequent registers. */
8332
8333 if (HARD_REGISTER_NUM_P (out_regno))
8334 for (k = 1; k < out_nregs; k++)
8335 reg_last_reload_reg[out_regno + k]
8336 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8337
8338 /* Now do the inverse operation. */
8339 for (k = 0; k < nregs; k++)
8340 {
8341 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8342 reg_reloaded_contents[regno + k]
8343 = (!HARD_REGISTER_NUM_P (out_regno) || !piecemeal
8344 ? out_regno
8345 : out_regno + k);
8346 reg_reloaded_insn[regno + k] = insn;
8347 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8348 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8349 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8350 regno + k);
8351 else
8352 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8353 regno + k);
8354 }
8355 }
8356 }
8357 /* Maybe the spill reg contains a copy of reload_in. Only do
8358 something if there will not be an output reload for
8359 the register being reloaded. */
8360 else if (rld[r].out_reg == 0
8361 && rld[r].in != 0
8362 && ((REG_P (rld[r].in)
8363 && !HARD_REGISTER_P (rld[r].in)
8364 && !REGNO_REG_SET_P (&reg_has_output_reload,
8365 REGNO (rld[r].in)))
8366 || (REG_P (rld[r].in_reg)
8367 && !REGNO_REG_SET_P (&reg_has_output_reload,
8368 REGNO (rld[r].in_reg))))
8369 && !reg_set_p (reload_reg_rtx_for_input[r], PATTERN (insn)))
8370 {
8371 rtx reg;
8372
8373 reg = reload_reg_rtx_for_input[r];
8374 if (reload_reg_rtx_reaches_end_p (reg, r))
8375 {
8376 machine_mode mode;
8377 int regno;
8378 int nregs;
8379 int in_regno;
8380 int in_nregs;
8381 rtx in;
8382 bool piecemeal;
8383
8384 mode = GET_MODE (reg);
8385 regno = REGNO (reg);
8386 nregs = hard_regno_nregs[regno][mode];
8387 if (REG_P (rld[r].in)
8388 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
8389 in = rld[r].in;
8390 else if (REG_P (rld[r].in_reg))
8391 in = rld[r].in_reg;
8392 else
8393 in = XEXP (rld[r].in_reg, 0);
8394 in_regno = REGNO (in);
8395
8396 in_nregs = (!HARD_REGISTER_NUM_P (in_regno) ? 1
8397 : hard_regno_nregs[in_regno][mode]);
8398
8399 reg_last_reload_reg[in_regno] = reg;
8400
8401 piecemeal = (HARD_REGISTER_NUM_P (in_regno)
8402 && nregs == in_nregs
8403 && inherit_piecemeal_p (regno, in_regno, mode));
8404
8405 if (HARD_REGISTER_NUM_P (in_regno))
8406 for (k = 1; k < in_nregs; k++)
8407 reg_last_reload_reg[in_regno + k]
8408 = (piecemeal ? regno_reg_rtx[regno + k] : 0);
8409
8410 /* Unless we inherited this reload, show we haven't
8411 recently done a store.
8412 Previous stores of inherited auto_inc expressions
8413 also have to be discarded. */
8414 if (! reload_inherited[r]
8415 || (rld[r].out && ! rld[r].out_reg))
8416 spill_reg_store[regno] = 0;
8417
8418 for (k = 0; k < nregs; k++)
8419 {
8420 CLEAR_HARD_REG_BIT (reg_reloaded_dead, regno + k);
8421 reg_reloaded_contents[regno + k]
8422 = (!HARD_REGISTER_NUM_P (in_regno) || !piecemeal
8423 ? in_regno
8424 : in_regno + k);
8425 reg_reloaded_insn[regno + k] = insn;
8426 SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
8427 if (HARD_REGNO_CALL_PART_CLOBBERED (regno + k, mode))
8428 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8429 regno + k);
8430 else
8431 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8432 regno + k);
8433 }
8434 }
8435 }
8436 }
8437
8438 /* The following if-statement was #if 0'd in 1.34 (or before...).
8439 It's reenabled in 1.35 because supposedly nothing else
8440 deals with this problem. */
8441
8442 /* If a register gets output-reloaded from a non-spill register,
8443 that invalidates any previous reloaded copy of it.
8444 But forget_old_reloads_1 won't get to see it, because
8445 it thinks only about the original insn. So invalidate it here.
8446 Also do the same thing for RELOAD_OTHER constraints where the
8447 output is discarded. */
8448 if (i < 0
8449 && ((rld[r].out != 0
8450 && (REG_P (rld[r].out)
8451 || (MEM_P (rld[r].out)
8452 && REG_P (rld[r].out_reg))))
8453 || (rld[r].out == 0 && rld[r].out_reg
8454 && REG_P (rld[r].out_reg))))
8455 {
8456 rtx out = ((rld[r].out && REG_P (rld[r].out))
8457 ? rld[r].out : rld[r].out_reg);
8458 int out_regno = REGNO (out);
8459 machine_mode mode = GET_MODE (out);
8460
8461 /* REG_RTX is now set or clobbered by the main instruction.
8462 As the comment above explains, forget_old_reloads_1 only
8463 sees the original instruction, and there is no guarantee
8464 that the original instruction also clobbered REG_RTX.
8465 For example, if find_reloads sees that the input side of
8466 a matched operand pair dies in this instruction, it may
8467 use the input register as the reload register.
8468
8469 Calling forget_old_reloads_1 is a waste of effort if
8470 REG_RTX is also the output register.
8471
8472 If we know that REG_RTX holds the value of a pseudo
8473 register, the code after the call will record that fact. */
8474 if (rld[r].reg_rtx && rld[r].reg_rtx != out)
8475 forget_old_reloads_1 (rld[r].reg_rtx, NULL_RTX, NULL);
8476
8477 if (!HARD_REGISTER_NUM_P (out_regno))
8478 {
8479 rtx src_reg;
8480 rtx_insn *store_insn = NULL;
8481
8482 reg_last_reload_reg[out_regno] = 0;
8483
8484 /* If we can find a hard register that is stored, record
8485 the storing insn so that we may delete this insn with
8486 delete_output_reload. */
8487 src_reg = reload_reg_rtx_for_output[r];
8488
8489 if (src_reg)
8490 {
8491 if (reload_reg_rtx_reaches_end_p (src_reg, r))
8492 store_insn = new_spill_reg_store[REGNO (src_reg)];
8493 else
8494 src_reg = NULL_RTX;
8495 }
8496 else
8497 {
8498 /* If this is an optional reload, try to find the
8499 source reg from an input reload. */
8500 rtx set = single_set (insn);
8501 if (set && SET_DEST (set) == rld[r].out)
8502 {
8503 int k;
8504
8505 src_reg = SET_SRC (set);
8506 store_insn = insn;
8507 for (k = 0; k < n_reloads; k++)
8508 {
8509 if (rld[k].in == src_reg)
8510 {
8511 src_reg = reload_reg_rtx_for_input[k];
8512 break;
8513 }
8514 }
8515 }
8516 }
8517 if (src_reg && REG_P (src_reg)
8518 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
8519 {
8520 int src_regno, src_nregs, k;
8521 rtx note;
8522
8523 gcc_assert (GET_MODE (src_reg) == mode);
8524 src_regno = REGNO (src_reg);
8525 src_nregs = hard_regno_nregs[src_regno][mode];
8526 /* The place where to find a death note varies with
8527 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
8528 necessarily checked exactly in the code that moves
8529 notes, so just check both locations. */
8530 note = find_regno_note (insn, REG_DEAD, src_regno);
8531 if (! note && store_insn)
8532 note = find_regno_note (store_insn, REG_DEAD, src_regno);
8533 for (k = 0; k < src_nregs; k++)
8534 {
8535 spill_reg_store[src_regno + k] = store_insn;
8536 spill_reg_stored_to[src_regno + k] = out;
8537 reg_reloaded_contents[src_regno + k] = out_regno;
8538 reg_reloaded_insn[src_regno + k] = store_insn;
8539 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
8540 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
8541 if (HARD_REGNO_CALL_PART_CLOBBERED (src_regno + k,
8542 mode))
8543 SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8544 src_regno + k);
8545 else
8546 CLEAR_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
8547 src_regno + k);
8548 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + k);
8549 if (note)
8550 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
8551 else
8552 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
8553 }
8554 reg_last_reload_reg[out_regno] = src_reg;
8555 /* We have to set reg_has_output_reload here, or else
8556 forget_old_reloads_1 will clear reg_last_reload_reg
8557 right away. */
8558 SET_REGNO_REG_SET (&reg_has_output_reload,
8559 out_regno);
8560 }
8561 }
8562 else
8563 {
8564 int k, out_nregs = hard_regno_nregs[out_regno][mode];
8565
8566 for (k = 0; k < out_nregs; k++)
8567 reg_last_reload_reg[out_regno + k] = 0;
8568 }
8569 }
8570 }
8571 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
8572 }
8573 \f
8574 /* Go through the motions to emit INSN and test if it is strictly valid.
8575 Return the emitted insn if valid, else return NULL. */
8576
8577 static rtx_insn *
8578 emit_insn_if_valid_for_reload (rtx pat)
8579 {
8580 rtx_insn *last = get_last_insn ();
8581 int code;
8582
8583 rtx_insn *insn = emit_insn (pat);
8584 code = recog_memoized (insn);
8585
8586 if (code >= 0)
8587 {
8588 extract_insn (insn);
8589 /* We want constrain operands to treat this insn strictly in its
8590 validity determination, i.e., the way it would after reload has
8591 completed. */
8592 if (constrain_operands (1, get_enabled_alternatives (insn)))
8593 return insn;
8594 }
8595
8596 delete_insns_since (last);
8597 return NULL;
8598 }
8599
8600 /* Emit code to perform a reload from IN (which may be a reload register) to
8601 OUT (which may also be a reload register). IN or OUT is from operand
8602 OPNUM with reload type TYPE.
8603
8604 Returns first insn emitted. */
8605
8606 static rtx_insn *
8607 gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
8608 {
8609 rtx_insn *last = get_last_insn ();
8610 rtx_insn *tem;
8611 #ifdef SECONDARY_MEMORY_NEEDED
8612 rtx tem1, tem2;
8613 #endif
8614
8615 /* If IN is a paradoxical SUBREG, remove it and try to put the
8616 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
8617 if (!strip_paradoxical_subreg (&in, &out))
8618 strip_paradoxical_subreg (&out, &in);
8619
8620 /* How to do this reload can get quite tricky. Normally, we are being
8621 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
8622 register that didn't get a hard register. In that case we can just
8623 call emit_move_insn.
8624
8625 We can also be asked to reload a PLUS that adds a register or a MEM to
8626 another register, constant or MEM. This can occur during frame pointer
8627 elimination and while reloading addresses. This case is handled by
8628 trying to emit a single insn to perform the add. If it is not valid,
8629 we use a two insn sequence.
8630
8631 Or we can be asked to reload an unary operand that was a fragment of
8632 an addressing mode, into a register. If it isn't recognized as-is,
8633 we try making the unop operand and the reload-register the same:
8634 (set reg:X (unop:X expr:Y))
8635 -> (set reg:Y expr:Y) (set reg:X (unop:X reg:Y)).
8636
8637 Finally, we could be called to handle an 'o' constraint by putting
8638 an address into a register. In that case, we first try to do this
8639 with a named pattern of "reload_load_address". If no such pattern
8640 exists, we just emit a SET insn and hope for the best (it will normally
8641 be valid on machines that use 'o').
8642
8643 This entire process is made complex because reload will never
8644 process the insns we generate here and so we must ensure that
8645 they will fit their constraints and also by the fact that parts of
8646 IN might be being reloaded separately and replaced with spill registers.
8647 Because of this, we are, in some sense, just guessing the right approach
8648 here. The one listed above seems to work.
8649
8650 ??? At some point, this whole thing needs to be rethought. */
8651
8652 if (GET_CODE (in) == PLUS
8653 && (REG_P (XEXP (in, 0))
8654 || GET_CODE (XEXP (in, 0)) == SUBREG
8655 || MEM_P (XEXP (in, 0)))
8656 && (REG_P (XEXP (in, 1))
8657 || GET_CODE (XEXP (in, 1)) == SUBREG
8658 || CONSTANT_P (XEXP (in, 1))
8659 || MEM_P (XEXP (in, 1))))
8660 {
8661 /* We need to compute the sum of a register or a MEM and another
8662 register, constant, or MEM, and put it into the reload
8663 register. The best possible way of doing this is if the machine
8664 has a three-operand ADD insn that accepts the required operands.
8665
8666 The simplest approach is to try to generate such an insn and see if it
8667 is recognized and matches its constraints. If so, it can be used.
8668
8669 It might be better not to actually emit the insn unless it is valid,
8670 but we need to pass the insn as an operand to `recog' and
8671 `extract_insn' and it is simpler to emit and then delete the insn if
8672 not valid than to dummy things up. */
8673
8674 rtx op0, op1, tem;
8675 rtx_insn *insn;
8676 enum insn_code code;
8677
8678 op0 = find_replacement (&XEXP (in, 0));
8679 op1 = find_replacement (&XEXP (in, 1));
8680
8681 /* Since constraint checking is strict, commutativity won't be
8682 checked, so we need to do that here to avoid spurious failure
8683 if the add instruction is two-address and the second operand
8684 of the add is the same as the reload reg, which is frequently
8685 the case. If the insn would be A = B + A, rearrange it so
8686 it will be A = A + B as constrain_operands expects. */
8687
8688 if (REG_P (XEXP (in, 1))
8689 && REGNO (out) == REGNO (XEXP (in, 1)))
8690 tem = op0, op0 = op1, op1 = tem;
8691
8692 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
8693 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
8694
8695 insn = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8696 if (insn)
8697 return insn;
8698
8699 /* If that failed, we must use a conservative two-insn sequence.
8700
8701 Use a move to copy one operand into the reload register. Prefer
8702 to reload a constant, MEM or pseudo since the move patterns can
8703 handle an arbitrary operand. If OP1 is not a constant, MEM or
8704 pseudo and OP1 is not a valid operand for an add instruction, then
8705 reload OP1.
8706
8707 After reloading one of the operands into the reload register, add
8708 the reload register to the output register.
8709
8710 If there is another way to do this for a specific machine, a
8711 DEFINE_PEEPHOLE should be specified that recognizes the sequence
8712 we emit below. */
8713
8714 code = optab_handler (add_optab, GET_MODE (out));
8715
8716 if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
8717 || (REG_P (op1)
8718 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
8719 || (code != CODE_FOR_nothing
8720 && !insn_operand_matches (code, 2, op1)))
8721 tem = op0, op0 = op1, op1 = tem;
8722
8723 gen_reload (out, op0, opnum, type);
8724
8725 /* If OP0 and OP1 are the same, we can use OUT for OP1.
8726 This fixes a problem on the 32K where the stack pointer cannot
8727 be used as an operand of an add insn. */
8728
8729 if (rtx_equal_p (op0, op1))
8730 op1 = out;
8731
8732 insn = emit_insn_if_valid_for_reload (gen_add2_insn (out, op1));
8733 if (insn)
8734 {
8735 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
8736 set_dst_reg_note (insn, REG_EQUIV, in, out);
8737 return insn;
8738 }
8739
8740 /* If that failed, copy the address register to the reload register.
8741 Then add the constant to the reload register. */
8742
8743 gcc_assert (!reg_overlap_mentioned_p (out, op0));
8744 gen_reload (out, op1, opnum, type);
8745 insn = emit_insn (gen_add2_insn (out, op0));
8746 set_dst_reg_note (insn, REG_EQUIV, in, out);
8747 }
8748
8749 #ifdef SECONDARY_MEMORY_NEEDED
8750 /* If we need a memory location to do the move, do it that way. */
8751 else if ((tem1 = replaced_subreg (in), tem2 = replaced_subreg (out),
8752 (REG_P (tem1) && REG_P (tem2)))
8753 && REGNO (tem1) < FIRST_PSEUDO_REGISTER
8754 && REGNO (tem2) < FIRST_PSEUDO_REGISTER
8755 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (tem1)),
8756 REGNO_REG_CLASS (REGNO (tem2)),
8757 GET_MODE (out)))
8758 {
8759 /* Get the memory to use and rewrite both registers to its mode. */
8760 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
8761
8762 if (GET_MODE (loc) != GET_MODE (out))
8763 out = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (out));
8764
8765 if (GET_MODE (loc) != GET_MODE (in))
8766 in = gen_rtx_REG (GET_MODE (loc), reg_or_subregno (in));
8767
8768 gen_reload (loc, in, opnum, type);
8769 gen_reload (out, loc, opnum, type);
8770 }
8771 #endif
8772 else if (REG_P (out) && UNARY_P (in))
8773 {
8774 rtx insn;
8775 rtx op1;
8776 rtx out_moded;
8777 rtx_insn *set;
8778
8779 op1 = find_replacement (&XEXP (in, 0));
8780 if (op1 != XEXP (in, 0))
8781 in = gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in), op1);
8782
8783 /* First, try a plain SET. */
8784 set = emit_insn_if_valid_for_reload (gen_rtx_SET (VOIDmode, out, in));
8785 if (set)
8786 return set;
8787
8788 /* If that failed, move the inner operand to the reload
8789 register, and try the same unop with the inner expression
8790 replaced with the reload register. */
8791
8792 if (GET_MODE (op1) != GET_MODE (out))
8793 out_moded = gen_rtx_REG (GET_MODE (op1), REGNO (out));
8794 else
8795 out_moded = out;
8796
8797 gen_reload (out_moded, op1, opnum, type);
8798
8799 insn
8800 = gen_rtx_SET (VOIDmode, out,
8801 gen_rtx_fmt_e (GET_CODE (in), GET_MODE (in),
8802 out_moded));
8803 insn = emit_insn_if_valid_for_reload (insn);
8804 if (insn)
8805 {
8806 set_unique_reg_note (insn, REG_EQUIV, in);
8807 return as_a <rtx_insn *> (insn);
8808 }
8809
8810 fatal_insn ("failure trying to reload:", set);
8811 }
8812 /* If IN is a simple operand, use gen_move_insn. */
8813 else if (OBJECT_P (in) || GET_CODE (in) == SUBREG)
8814 {
8815 tem = emit_insn (gen_move_insn (out, in));
8816 /* IN may contain a LABEL_REF, if so add a REG_LABEL_OPERAND note. */
8817 mark_jump_label (in, tem, 0);
8818 }
8819
8820 #ifdef HAVE_reload_load_address
8821 else if (HAVE_reload_load_address)
8822 emit_insn (gen_reload_load_address (out, in));
8823 #endif
8824
8825 /* Otherwise, just write (set OUT IN) and hope for the best. */
8826 else
8827 emit_insn (gen_rtx_SET (VOIDmode, out, in));
8828
8829 /* Return the first insn emitted.
8830 We can not just return get_last_insn, because there may have
8831 been multiple instructions emitted. Also note that gen_move_insn may
8832 emit more than one insn itself, so we can not assume that there is one
8833 insn emitted per emit_insn_before call. */
8834
8835 return last ? NEXT_INSN (last) : get_insns ();
8836 }
8837 \f
8838 /* Delete a previously made output-reload whose result we now believe
8839 is not needed. First we double-check.
8840
8841 INSN is the insn now being processed.
8842 LAST_RELOAD_REG is the hard register number for which we want to delete
8843 the last output reload.
8844 J is the reload-number that originally used REG. The caller has made
8845 certain that reload J doesn't use REG any longer for input.
8846 NEW_RELOAD_REG is reload register that reload J is using for REG. */
8847
8848 static void
8849 delete_output_reload (rtx_insn *insn, int j, int last_reload_reg,
8850 rtx new_reload_reg)
8851 {
8852 rtx_insn *output_reload_insn = spill_reg_store[last_reload_reg];
8853 rtx reg = spill_reg_stored_to[last_reload_reg];
8854 int k;
8855 int n_occurrences;
8856 int n_inherited = 0;
8857 rtx substed;
8858 unsigned regno;
8859 int nregs;
8860
8861 /* It is possible that this reload has been only used to set another reload
8862 we eliminated earlier and thus deleted this instruction too. */
8863 if (output_reload_insn->deleted ())
8864 return;
8865
8866 /* Get the raw pseudo-register referred to. */
8867
8868 while (GET_CODE (reg) == SUBREG)
8869 reg = SUBREG_REG (reg);
8870 substed = reg_equiv_memory_loc (REGNO (reg));
8871
8872 /* This is unsafe if the operand occurs more often in the current
8873 insn than it is inherited. */
8874 for (k = n_reloads - 1; k >= 0; k--)
8875 {
8876 rtx reg2 = rld[k].in;
8877 if (! reg2)
8878 continue;
8879 if (MEM_P (reg2) || reload_override_in[k])
8880 reg2 = rld[k].in_reg;
8881 #ifdef AUTO_INC_DEC
8882 if (rld[k].out && ! rld[k].out_reg)
8883 reg2 = XEXP (rld[k].in_reg, 0);
8884 #endif
8885 while (GET_CODE (reg2) == SUBREG)
8886 reg2 = SUBREG_REG (reg2);
8887 if (rtx_equal_p (reg2, reg))
8888 {
8889 if (reload_inherited[k] || reload_override_in[k] || k == j)
8890 n_inherited++;
8891 else
8892 return;
8893 }
8894 }
8895 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
8896 if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
8897 n_occurrences += count_occurrences (CALL_INSN_FUNCTION_USAGE (insn),
8898 reg, 0);
8899 if (substed)
8900 n_occurrences += count_occurrences (PATTERN (insn),
8901 eliminate_regs (substed, VOIDmode,
8902 NULL_RTX), 0);
8903 for (rtx i1 = reg_equiv_alt_mem_list (REGNO (reg)); i1; i1 = XEXP (i1, 1))
8904 {
8905 gcc_assert (!rtx_equal_p (XEXP (i1, 0), substed));
8906 n_occurrences += count_occurrences (PATTERN (insn), XEXP (i1, 0), 0);
8907 }
8908 if (n_occurrences > n_inherited)
8909 return;
8910
8911 regno = REGNO (reg);
8912 if (regno >= FIRST_PSEUDO_REGISTER)
8913 nregs = 1;
8914 else
8915 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
8916
8917 /* If the pseudo-reg we are reloading is no longer referenced
8918 anywhere between the store into it and here,
8919 and we're within the same basic block, then the value can only
8920 pass through the reload reg and end up here.
8921 Otherwise, give up--return. */
8922 for (rtx_insn *i1 = NEXT_INSN (output_reload_insn);
8923 i1 != insn; i1 = NEXT_INSN (i1))
8924 {
8925 if (NOTE_INSN_BASIC_BLOCK_P (i1))
8926 return;
8927 if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
8928 && refers_to_regno_p (regno, regno + nregs, PATTERN (i1), NULL))
8929 {
8930 /* If this is USE in front of INSN, we only have to check that
8931 there are no more references than accounted for by inheritance. */
8932 while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
8933 {
8934 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
8935 i1 = NEXT_INSN (i1);
8936 }
8937 if (n_occurrences <= n_inherited && i1 == insn)
8938 break;
8939 return;
8940 }
8941 }
8942
8943 /* We will be deleting the insn. Remove the spill reg information. */
8944 for (k = hard_regno_nregs[last_reload_reg][GET_MODE (reg)]; k-- > 0; )
8945 {
8946 spill_reg_store[last_reload_reg + k] = 0;
8947 spill_reg_stored_to[last_reload_reg + k] = 0;
8948 }
8949
8950 /* The caller has already checked that REG dies or is set in INSN.
8951 It has also checked that we are optimizing, and thus some
8952 inaccuracies in the debugging information are acceptable.
8953 So we could just delete output_reload_insn. But in some cases
8954 we can improve the debugging information without sacrificing
8955 optimization - maybe even improving the code: See if the pseudo
8956 reg has been completely replaced with reload regs. If so, delete
8957 the store insn and forget we had a stack slot for the pseudo. */
8958 if (rld[j].out != rld[j].in
8959 && REG_N_DEATHS (REGNO (reg)) == 1
8960 && REG_N_SETS (REGNO (reg)) == 1
8961 && REG_BASIC_BLOCK (REGNO (reg)) >= NUM_FIXED_BLOCKS
8962 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
8963 {
8964 rtx_insn *i2;
8965
8966 /* We know that it was used only between here and the beginning of
8967 the current basic block. (We also know that the last use before
8968 INSN was the output reload we are thinking of deleting, but never
8969 mind that.) Search that range; see if any ref remains. */
8970 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8971 {
8972 rtx set = single_set (i2);
8973
8974 /* Uses which just store in the pseudo don't count,
8975 since if they are the only uses, they are dead. */
8976 if (set != 0 && SET_DEST (set) == reg)
8977 continue;
8978 if (LABEL_P (i2) || JUMP_P (i2))
8979 break;
8980 if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
8981 && reg_mentioned_p (reg, PATTERN (i2)))
8982 {
8983 /* Some other ref remains; just delete the output reload we
8984 know to be dead. */
8985 delete_address_reloads (output_reload_insn, insn);
8986 delete_insn (output_reload_insn);
8987 return;
8988 }
8989 }
8990
8991 /* Delete the now-dead stores into this pseudo. Note that this
8992 loop also takes care of deleting output_reload_insn. */
8993 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8994 {
8995 rtx set = single_set (i2);
8996
8997 if (set != 0 && SET_DEST (set) == reg)
8998 {
8999 delete_address_reloads (i2, insn);
9000 delete_insn (i2);
9001 }
9002 if (LABEL_P (i2) || JUMP_P (i2))
9003 break;
9004 }
9005
9006 /* For the debugging info, say the pseudo lives in this reload reg. */
9007 reg_renumber[REGNO (reg)] = REGNO (new_reload_reg);
9008 if (ira_conflicts_p)
9009 /* Inform IRA about the change. */
9010 ira_mark_allocation_change (REGNO (reg));
9011 alter_reg (REGNO (reg), -1, false);
9012 }
9013 else
9014 {
9015 delete_address_reloads (output_reload_insn, insn);
9016 delete_insn (output_reload_insn);
9017 }
9018 }
9019
9020 /* We are going to delete DEAD_INSN. Recursively delete loads of
9021 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
9022 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
9023 static void
9024 delete_address_reloads (rtx_insn *dead_insn, rtx_insn *current_insn)
9025 {
9026 rtx set = single_set (dead_insn);
9027 rtx set2, dst;
9028 rtx_insn *prev, *next;
9029 if (set)
9030 {
9031 rtx dst = SET_DEST (set);
9032 if (MEM_P (dst))
9033 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
9034 }
9035 /* If we deleted the store from a reloaded post_{in,de}c expression,
9036 we can delete the matching adds. */
9037 prev = PREV_INSN (dead_insn);
9038 next = NEXT_INSN (dead_insn);
9039 if (! prev || ! next)
9040 return;
9041 set = single_set (next);
9042 set2 = single_set (prev);
9043 if (! set || ! set2
9044 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
9045 || !CONST_INT_P (XEXP (SET_SRC (set), 1))
9046 || !CONST_INT_P (XEXP (SET_SRC (set2), 1)))
9047 return;
9048 dst = SET_DEST (set);
9049 if (! rtx_equal_p (dst, SET_DEST (set2))
9050 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
9051 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
9052 || (INTVAL (XEXP (SET_SRC (set), 1))
9053 != -INTVAL (XEXP (SET_SRC (set2), 1))))
9054 return;
9055 delete_related_insns (prev);
9056 delete_related_insns (next);
9057 }
9058
9059 /* Subfunction of delete_address_reloads: process registers found in X. */
9060 static void
9061 delete_address_reloads_1 (rtx_insn *dead_insn, rtx x, rtx_insn *current_insn)
9062 {
9063 rtx_insn *prev, *i2;
9064 rtx set, dst;
9065 int i, j;
9066 enum rtx_code code = GET_CODE (x);
9067
9068 if (code != REG)
9069 {
9070 const char *fmt = GET_RTX_FORMAT (code);
9071 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9072 {
9073 if (fmt[i] == 'e')
9074 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
9075 else if (fmt[i] == 'E')
9076 {
9077 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9078 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
9079 current_insn);
9080 }
9081 }
9082 return;
9083 }
9084
9085 if (spill_reg_order[REGNO (x)] < 0)
9086 return;
9087
9088 /* Scan backwards for the insn that sets x. This might be a way back due
9089 to inheritance. */
9090 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
9091 {
9092 code = GET_CODE (prev);
9093 if (code == CODE_LABEL || code == JUMP_INSN)
9094 return;
9095 if (!INSN_P (prev))
9096 continue;
9097 if (reg_set_p (x, PATTERN (prev)))
9098 break;
9099 if (reg_referenced_p (x, PATTERN (prev)))
9100 return;
9101 }
9102 if (! prev || INSN_UID (prev) < reload_first_uid)
9103 return;
9104 /* Check that PREV only sets the reload register. */
9105 set = single_set (prev);
9106 if (! set)
9107 return;
9108 dst = SET_DEST (set);
9109 if (!REG_P (dst)
9110 || ! rtx_equal_p (dst, x))
9111 return;
9112 if (! reg_set_p (dst, PATTERN (dead_insn)))
9113 {
9114 /* Check if DST was used in a later insn -
9115 it might have been inherited. */
9116 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
9117 {
9118 if (LABEL_P (i2))
9119 break;
9120 if (! INSN_P (i2))
9121 continue;
9122 if (reg_referenced_p (dst, PATTERN (i2)))
9123 {
9124 /* If there is a reference to the register in the current insn,
9125 it might be loaded in a non-inherited reload. If no other
9126 reload uses it, that means the register is set before
9127 referenced. */
9128 if (i2 == current_insn)
9129 {
9130 for (j = n_reloads - 1; j >= 0; j--)
9131 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9132 || reload_override_in[j] == dst)
9133 return;
9134 for (j = n_reloads - 1; j >= 0; j--)
9135 if (rld[j].in && rld[j].reg_rtx == dst)
9136 break;
9137 if (j >= 0)
9138 break;
9139 }
9140 return;
9141 }
9142 if (JUMP_P (i2))
9143 break;
9144 /* If DST is still live at CURRENT_INSN, check if it is used for
9145 any reload. Note that even if CURRENT_INSN sets DST, we still
9146 have to check the reloads. */
9147 if (i2 == current_insn)
9148 {
9149 for (j = n_reloads - 1; j >= 0; j--)
9150 if ((rld[j].reg_rtx == dst && reload_inherited[j])
9151 || reload_override_in[j] == dst)
9152 return;
9153 /* ??? We can't finish the loop here, because dst might be
9154 allocated to a pseudo in this block if no reload in this
9155 block needs any of the classes containing DST - see
9156 spill_hard_reg. There is no easy way to tell this, so we
9157 have to scan till the end of the basic block. */
9158 }
9159 if (reg_set_p (dst, PATTERN (i2)))
9160 break;
9161 }
9162 }
9163 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
9164 reg_reloaded_contents[REGNO (dst)] = -1;
9165 delete_insn (prev);
9166 }
9167 \f
9168 /* Output reload-insns to reload VALUE into RELOADREG.
9169 VALUE is an autoincrement or autodecrement RTX whose operand
9170 is a register or memory location;
9171 so reloading involves incrementing that location.
9172 IN is either identical to VALUE, or some cheaper place to reload from.
9173
9174 INC_AMOUNT is the number to increment or decrement by (always positive).
9175 This cannot be deduced from VALUE. */
9176
9177 static void
9178 inc_for_reload (rtx reloadreg, rtx in, rtx value, int inc_amount)
9179 {
9180 /* REG or MEM to be copied and incremented. */
9181 rtx incloc = find_replacement (&XEXP (value, 0));
9182 /* Nonzero if increment after copying. */
9183 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC
9184 || GET_CODE (value) == POST_MODIFY);
9185 rtx_insn *last;
9186 rtx inc;
9187 rtx_insn *add_insn;
9188 int code;
9189 rtx real_in = in == value ? incloc : in;
9190
9191 /* No hard register is equivalent to this register after
9192 inc/dec operation. If REG_LAST_RELOAD_REG were nonzero,
9193 we could inc/dec that register as well (maybe even using it for
9194 the source), but I'm not sure it's worth worrying about. */
9195 if (REG_P (incloc))
9196 reg_last_reload_reg[REGNO (incloc)] = 0;
9197
9198 if (GET_CODE (value) == PRE_MODIFY || GET_CODE (value) == POST_MODIFY)
9199 {
9200 gcc_assert (GET_CODE (XEXP (value, 1)) == PLUS);
9201 inc = find_replacement (&XEXP (XEXP (value, 1), 1));
9202 }
9203 else
9204 {
9205 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
9206 inc_amount = -inc_amount;
9207
9208 inc = GEN_INT (inc_amount);
9209 }
9210
9211 /* If this is post-increment, first copy the location to the reload reg. */
9212 if (post && real_in != reloadreg)
9213 emit_insn (gen_move_insn (reloadreg, real_in));
9214
9215 if (in == value)
9216 {
9217 /* See if we can directly increment INCLOC. Use a method similar to
9218 that in gen_reload. */
9219
9220 last = get_last_insn ();
9221 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
9222 gen_rtx_PLUS (GET_MODE (incloc),
9223 incloc, inc)));
9224
9225 code = recog_memoized (add_insn);
9226 if (code >= 0)
9227 {
9228 extract_insn (add_insn);
9229 if (constrain_operands (1, get_enabled_alternatives (add_insn)))
9230 {
9231 /* If this is a pre-increment and we have incremented the value
9232 where it lives, copy the incremented value to RELOADREG to
9233 be used as an address. */
9234
9235 if (! post)
9236 emit_insn (gen_move_insn (reloadreg, incloc));
9237 return;
9238 }
9239 }
9240 delete_insns_since (last);
9241 }
9242
9243 /* If couldn't do the increment directly, must increment in RELOADREG.
9244 The way we do this depends on whether this is pre- or post-increment.
9245 For pre-increment, copy INCLOC to the reload register, increment it
9246 there, then save back. */
9247
9248 if (! post)
9249 {
9250 if (in != reloadreg)
9251 emit_insn (gen_move_insn (reloadreg, real_in));
9252 emit_insn (gen_add2_insn (reloadreg, inc));
9253 emit_insn (gen_move_insn (incloc, reloadreg));
9254 }
9255 else
9256 {
9257 /* Postincrement.
9258 Because this might be a jump insn or a compare, and because RELOADREG
9259 may not be available after the insn in an input reload, we must do
9260 the incrementation before the insn being reloaded for.
9261
9262 We have already copied IN to RELOADREG. Increment the copy in
9263 RELOADREG, save that back, then decrement RELOADREG so it has
9264 the original value. */
9265
9266 emit_insn (gen_add2_insn (reloadreg, inc));
9267 emit_insn (gen_move_insn (incloc, reloadreg));
9268 if (CONST_INT_P (inc))
9269 emit_insn (gen_add2_insn (reloadreg,
9270 gen_int_mode (-INTVAL (inc),
9271 GET_MODE (reloadreg))));
9272 else
9273 emit_insn (gen_sub2_insn (reloadreg, inc));
9274 }
9275 }
9276 \f
9277 #ifdef AUTO_INC_DEC
9278 static void
9279 add_auto_inc_notes (rtx_insn *insn, rtx x)
9280 {
9281 enum rtx_code code = GET_CODE (x);
9282 const char *fmt;
9283 int i, j;
9284
9285 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9286 {
9287 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
9288 return;
9289 }
9290
9291 /* Scan all the operand sub-expressions. */
9292 fmt = GET_RTX_FORMAT (code);
9293 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9294 {
9295 if (fmt[i] == 'e')
9296 add_auto_inc_notes (insn, XEXP (x, i));
9297 else if (fmt[i] == 'E')
9298 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9299 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9300 }
9301 }
9302 #endif