]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/reload1.c
Merge in gcc2 snapshot 19980929. See gcc/ChangeLog and gcc/FSFChangeLog for
[thirdparty/gcc.git] / gcc / reload1.c
CommitLineData
08a9dd06 1/* Reload pseudo regs into hard regs for insns that require hard regs.
f4ac4e51 2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
08a9dd06 3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
a6c145ef 18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
08a9dd06 20
21
22#include "config.h"
405711de 23#include "system.h"
a986b5e0 24
25#include "machmode.h"
26#include "hard-reg-set.h"
08a9dd06 27#include "rtl.h"
28#include "obstack.h"
29#include "insn-config.h"
30#include "insn-flags.h"
31#include "insn-codes.h"
32#include "flags.h"
33#include "expr.h"
34#include "regs.h"
e94a59c6 35#include "basic-block.h"
08a9dd06 36#include "reload.h"
37#include "recog.h"
08a9dd06 38#include "output.h"
2a9ed571 39#include "real.h"
12874aaf 40#include "toplev.h"
08a9dd06 41
42/* This file contains the reload pass of the compiler, which is
43 run after register allocation has been done. It checks that
44 each insn is valid (operands required to be in registers really
45 are in registers of the proper class) and fixes up invalid ones
46 by copying values temporarily into registers for the insns
47 that need them.
48
49 The results of register allocation are described by the vector
50 reg_renumber; the insns still contain pseudo regs, but reg_renumber
51 can be used to find which hard reg, if any, a pseudo reg is in.
52
53 The technique we always use is to free up a few hard regs that are
54 called ``reload regs'', and for each place where a pseudo reg
55 must be in a hard reg, copy it temporarily into one of the reload regs.
56
dab171c5 57 Reload regs are allocated locally for every instruction that needs
58 reloads. When there are pseudos which are allocated to a register that
59 has been chosen as a reload reg, such pseudos must be ``spilled''.
60 This means that they go to other hard regs, or to stack slots if no other
08a9dd06 61 available hard regs can be found. Spilling can invalidate more
62 insns, requiring additional need for reloads, so we must keep checking
63 until the process stabilizes.
64
65 For machines with different classes of registers, we must keep track
66 of the register class needed for each reload, and make sure that
67 we allocate enough reload registers of each class.
68
69 The file reload.c contains the code that checks one insn for
70 validity and reports the reloads that it needs. This file
71 is in charge of scanning the entire rtl code, accumulating the
72 reload needs, spilling, assigning reload registers to use for
73 fixing up each insn, and generating the new insns to copy values
74 into the reload registers. */
81d0fbb3 75
76
77#ifndef REGISTER_MOVE_COST
78#define REGISTER_MOVE_COST(x, y) 2
79#endif
08a9dd06 80\f
81/* During reload_as_needed, element N contains a REG rtx for the hard reg
a92771b8 82 into which reg N has been reloaded (perhaps for a previous insn). */
08a9dd06 83static rtx *reg_last_reload_reg;
84
85/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87static char *reg_has_output_reload;
88
89/* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91static HARD_REG_SET reg_is_output_reload;
92
93/* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97rtx *reg_equiv_constant;
98
99/* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
c548e550 103rtx *reg_equiv_memory_loc;
08a9dd06 104
105/* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108rtx *reg_equiv_address;
109
110/* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112rtx *reg_equiv_mem;
113
114/* Widest width in which each pseudo reg is referred to (via subreg). */
115static int *reg_max_ref_width;
116
537e8d9c 117/* Element N is the list of insns that initialized reg N from its equivalent
08a9dd06 118 constant or memory slot. */
119static rtx *reg_equiv_init;
120
dab171c5 121/* Vector to remember old contents of reg_renumber before spilling. */
122static short *reg_old_renumber;
123
a5e95c30 124/* During reload_as_needed, element N contains the last pseudo regno reloaded
dab171c5 125 into hard register N. If that pseudo reg occupied more than one register,
08a9dd06 126 reg_reloaded_contents points to that pseudo for each spill register in
127 use; all of these must remain set for an inheritance to occur. */
128static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
129
130/* During reload_as_needed, element N contains the insn for which
a5e95c30 131 hard register N was last used. Its contents are significant only
132 when reg_reloaded_valid is set for this register. */
08a9dd06 133static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134
a5e95c30 135/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
136static HARD_REG_SET reg_reloaded_valid;
137/* Indicate if the register was dead at the end of the reload.
138 This is only valid if reg_reloaded_contents is set and valid. */
139static HARD_REG_SET reg_reloaded_dead;
140
08a9dd06 141/* Number of spill-regs so far; number of valid elements of spill_regs. */
142static int n_spills;
143
144/* In parallel with spill_regs, contains REG rtx's for those regs.
145 Holds the last rtx used for any given reg, or 0 if it has never
146 been used for spilling yet. This rtx is reused, provided it has
147 the proper mode. */
148static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
149
150/* In parallel with spill_regs, contains nonzero for a spill reg
151 that was stored after the last time it was used.
152 The precise value is the insn generated to do the store. */
153static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
154
93c7b06e 155/* This is the register that was stored with spill_reg_store. This is a
156 copy of reload_out / reload_out_reg when the value was stored; if
157 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
158static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
159
08a9dd06 160/* This table is the inverse mapping of spill_regs:
161 indexed by hard reg number,
162 it contains the position of that reg in spill_regs,
e55026a1 163 or -1 for something that is not in spill_regs.
164
165 ?!? This is no longer accurate. */
08a9dd06 166static short spill_reg_order[FIRST_PSEUDO_REGISTER];
167
dab171c5 168/* This reg set indicates registers that can't be used as spill registers for
169 the currently processed insn. These are the hard registers which are live
170 during the insn, but not allocated to pseudos, as well as fixed
171 registers. */
08a9dd06 172static HARD_REG_SET bad_spill_regs;
173
dab171c5 174/* These are the hard registers that can't be used as spill register for any
175 insn. This includes registers used for user variables and registers that
176 we can't eliminate. A register that appears in this set also can't be used
177 to retry register allocation. */
178static HARD_REG_SET bad_spill_regs_global;
179
08a9dd06 180/* Describes order of use of registers for reloading
dab171c5 181 of spilled pseudo-registers. `n_spills' is the number of
182 elements that are actually valid; new ones are added at the end.
183
184 Both spill_regs and spill_reg_order are used on two occasions:
185 once during find_reload_regs, where they keep track of the spill registers
186 for a single insn, but also during reload_as_needed where they show all
187 the registers ever used by reload. For the latter case, the information
188 is calculated during finish_spills. */
08a9dd06 189static short spill_regs[FIRST_PSEUDO_REGISTER];
190
dab171c5 191/* This vector of reg sets indicates, for each pseudo, which hard registers
192 may not be used for retrying global allocation because the register was
193 formerly spilled from one of them. If we allowed reallocating a pseudo to
194 a register that it was already allocated to, reload might not
195 terminate. */
196static HARD_REG_SET *pseudo_previous_regs;
197
198/* This vector of reg sets indicates, for each pseudo, which hard
199 registers may not be used for retrying global allocation because they
200 are used as spill registers during one of the insns in which the
201 pseudo is live. */
202static HARD_REG_SET *pseudo_forbidden_regs;
203
204/* All hard regs that have been used as spill registers for any insn are
205 marked in this set. */
206static HARD_REG_SET used_spill_regs;
ed69f33f 207
de261207 208/* Index of last register assigned as a spill register. We allocate in
209 a round-robin fashion. */
de261207 210static int last_spill_reg;
211
08a9dd06 212/* Describes order of preference for putting regs into spill_regs.
213 Contains the numbers of all the hard regs, in order most preferred first.
214 This order is different for each function.
215 It is set up by order_regs_for_reload.
216 Empty elements at the end contain -1. */
217static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
218
08a9dd06 219/* Nonzero if indirect addressing is supported on the machine; this means
220 that spilling (REG n) does not require reloading it into a register in
221 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
222 value indicates the level of indirect addressing supported, e.g., two
223 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
224 a hard register. */
08a9dd06 225static char spill_indirect_levels;
226
227/* Nonzero if indirect addressing is supported when the innermost MEM is
228 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
229 which these are valid is the same as spill_indirect_levels, above. */
08a9dd06 230char indirect_symref_ok;
231
232/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
08a9dd06 233char double_reg_address_ok;
234
235/* Record the stack slot for each spilled hard register. */
08a9dd06 236static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
237
238/* Width allocated so far for that stack slot. */
08a9dd06 239static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240
590ec786 241/* Record which pseudos needed to be spilled. */
242static regset spilled_pseudos;
243
08a9dd06 244/* First uid used by insns created by reload in this function.
245 Used in find_equiv_reg. */
246int reload_first_uid;
247
248/* Flag set by local-alloc or global-alloc if anything is live in
249 a call-clobbered reg across calls. */
08a9dd06 250int caller_save_needed;
251
252/* Set to 1 while reload_as_needed is operating.
253 Required by some machines to handle any generated moves differently. */
08a9dd06 254int reload_in_progress = 0;
255
256/* These arrays record the insn_code of insns that may be needed to
257 perform input and output reloads of special objects. They provide a
258 place to pass a scratch register. */
08a9dd06 259enum insn_code reload_in_optab[NUM_MACHINE_MODES];
260enum insn_code reload_out_optab[NUM_MACHINE_MODES];
261
f9e15121 262/* This obstack is used for allocation of rtl during register elimination.
08a9dd06 263 The allocated storage can be freed once find_reloads has processed the
264 insn. */
08a9dd06 265struct obstack reload_obstack;
e94a59c6 266
267/* Points to the beginning of the reload_obstack. All insn_chain structures
268 are allocated first. */
269char *reload_startobj;
270
271/* The point after all insn_chain structures. Used to quickly deallocate
272 memory used while processing one insn. */
08a9dd06 273char *reload_firstobj;
274
275#define obstack_chunk_alloc xmalloc
276#define obstack_chunk_free free
277
08a9dd06 278/* List of labels that must never be deleted. */
279extern rtx forced_labels;
e94a59c6 280
281/* List of insn_chain instructions, one for every insn that reload needs to
282 examine. */
283struct insn_chain *reload_insn_chain;
590ec786 284
fd311d44 285#ifdef TREE_CODE
286extern tree current_function_decl;
287#else
7e159545 288extern union tree_node *current_function_decl;
fd311d44 289#endif
290
dab171c5 291/* List of all insns needing reloads. */
590ec786 292static struct insn_chain *insns_need_reload;
08a9dd06 293\f
294/* This structure is used to record information about register eliminations.
295 Each array entry describes one possible way of eliminating a register
296 in favor of another. If there is more than one way of eliminating a
297 particular register, the most preferred should be specified first. */
298
911d0ac2 299struct elim_table
08a9dd06 300{
a92771b8 301 int from; /* Register number to be eliminated. */
302 int to; /* Register number used as replacement. */
303 int initial_offset; /* Initial difference between values. */
304 int can_eliminate; /* Non-zero if this elimination can be done. */
08a9dd06 305 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
a92771b8 306 insns made by reload. */
307 int offset; /* Current offset between the two regs. */
a92771b8 308 int previous_offset; /* Offset at end of previous insn. */
309 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
08a9dd06 310 rtx from_rtx; /* REG rtx for the register to be eliminated.
311 We cannot simply compare the number since
312 we might then spuriously replace a hard
313 register corresponding to a pseudo
a92771b8 314 assigned to the reg to be eliminated. */
315 rtx to_rtx; /* REG rtx for the replacement. */
911d0ac2 316};
317
318static struct elim_table * reg_eliminate = 0;
319
320/* This is an intermediate structure to initialize the table. It has
321 exactly the members provided by ELIMINABLE_REGS. */
322static struct elim_table_1
323{
324 int from;
325 int to;
326} reg_eliminate_1[] =
08a9dd06 327
328/* If a set of eliminable registers was specified, define the table from it.
329 Otherwise, default to the normal case of the frame pointer being
330 replaced by the stack pointer. */
331
332#ifdef ELIMINABLE_REGS
333 ELIMINABLE_REGS;
334#else
335 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
336#endif
337
911d0ac2 338#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate_1/sizeof reg_eliminate_1[0])
08a9dd06 339
340/* Record the number of pending eliminations that have an offset not equal
341 to their initial offset. If non-zero, we use a new copy of each
342 replacement result in any insns encountered. */
93c7b06e 343int num_not_at_initial_offset;
08a9dd06 344
345/* Count the number of registers that we may be able to eliminate. */
346static int num_eliminable;
aa8d28af 347/* And the number of registers that are equivalent to a constant that
348 can be eliminated to frame_pointer / arg_pointer + constant. */
349static int num_eliminable_invariants;
08a9dd06 350
351/* For each label, we record the offset of each elimination. If we reach
352 a label by more than one path and an offset differs, we cannot do the
353 elimination. This information is indexed by the number of the label.
354 The first table is an array of flags that records whether we have yet
355 encountered a label and the second table is an array of arrays, one
356 entry in the latter array for each elimination. */
357
358static char *offsets_known_at;
359static int (*offsets_at)[NUM_ELIMINABLE_REGS];
360
361/* Number of labels in the current function. */
362
363static int num_labels;
81d0fbb3 364
dab171c5 365struct hard_reg_n_uses
366{
367 int regno;
368 unsigned int uses;
369};
08a9dd06 370\f
faf8dbee 371static void maybe_fix_stack_asms PROTO((void));
dab171c5 372static void calculate_needs_all_insns PROTO((int));
373static void calculate_needs PROTO((struct insn_chain *));
374static void find_reload_regs PROTO((struct insn_chain *chain,
375 FILE *));
376static void find_tworeg_group PROTO((struct insn_chain *, int,
377 FILE *));
378static void find_group PROTO((struct insn_chain *, int,
379 FILE *));
380static int possible_group_p PROTO((struct insn_chain *, int));
381static void count_possible_groups PROTO((struct insn_chain *, int));
81d0fbb3 382static int modes_equiv_for_class_p PROTO((enum machine_mode,
383 enum machine_mode,
384 enum reg_class));
590ec786 385static void delete_caller_save_insns PROTO((void));
dab171c5 386
81d0fbb3 387static void spill_failure PROTO((rtx));
dab171c5 388static void new_spill_reg PROTO((struct insn_chain *, int, int,
389 int, FILE *));
390static void maybe_mark_pseudo_spilled PROTO((int));
81d0fbb3 391static void delete_dead_insn PROTO((rtx));
392static void alter_reg PROTO((int, int));
393static void set_label_offsets PROTO((rtx, rtx, int));
394static int eliminate_regs_in_insn PROTO((rtx, int));
93c7b06e 395static void update_eliminable_offsets PROTO((void));
81d0fbb3 396static void mark_not_eliminable PROTO((rtx, rtx));
9d24e570 397static void set_initial_elim_offsets PROTO((void));
8ad9ded8 398static void verify_initial_elim_offsets PROTO((void));
f87f6d5d 399static void set_initial_label_offsets PROTO((void));
400static void set_offsets_for_label PROTO((rtx));
9d24e570 401static void init_elim_table PROTO((void));
402static void update_eliminables PROTO((HARD_REG_SET *));
dab171c5 403static void spill_hard_reg PROTO((int, FILE *, int));
404static int finish_spills PROTO((int, FILE *));
405static void ior_hard_reg_set PROTO((HARD_REG_SET *, HARD_REG_SET *));
81d0fbb3 406static void scan_paradoxical_subregs PROTO((rtx));
04b08097 407static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
dab171c5 408static void count_pseudo PROTO((struct hard_reg_n_uses *, int));
409static void order_regs_for_reload PROTO((struct insn_chain *));
590ec786 410static void reload_as_needed PROTO((int));
b990dc72 411static void forget_old_reloads_1 PROTO((rtx, rtx));
04b08097 412static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
81d0fbb3 413static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
414 enum machine_mode));
7ac01373 415static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
416 enum machine_mode));
81d0fbb3 417static int reload_reg_free_p PROTO((int, int, enum reload_type));
c6bdbe11 418static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int, int));
81d0fbb3 419static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
dab171c5 420static int allocate_reload_reg PROTO((struct insn_chain *, int, int,
421 int));
422static void choose_reload_regs PROTO((struct insn_chain *));
81d0fbb3 423static void merge_assigned_reloads PROTO((rtx));
590ec786 424static void emit_reload_insns PROTO((struct insn_chain *));
93c7b06e 425static void delete_output_reload PROTO((rtx, int, int));
426static void delete_address_reloads PROTO((rtx, rtx));
427static void delete_address_reloads_1 PROTO((rtx, rtx, rtx));
428static rtx inc_for_reload PROTO((rtx, rtx, rtx, int));
a8482e91 429static int constraint_accepts_reg_p PROTO((const char *, rtx));
11f22bbf 430static void reload_cse_regs_1 PROTO((rtx));
5e98e63d 431static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
63c52f6e 432static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
5e98e63d 433static void reload_cse_invalidate_mem PROTO((rtx));
434static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
5e98e63d 435static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
0b399586 436static int reload_cse_noop_set_p PROTO((rtx, rtx));
0dbd1c74 437static int reload_cse_simplify_set PROTO((rtx, rtx));
438static int reload_cse_simplify_operands PROTO((rtx));
5e98e63d 439static void reload_cse_check_clobber PROTO((rtx, rtx));
440static void reload_cse_record_set PROTO((rtx, rtx));
11f22bbf 441static void reload_combine PROTO((void));
442static void reload_combine_note_use PROTO((rtx *, rtx));
443static void reload_combine_note_store PROTO((rtx, rtx));
444static void reload_cse_move2add PROTO((rtx));
445static void move2add_note_store PROTO((rtx, rtx));
08a9dd06 446\f
81d0fbb3 447/* Initialize the reload pass once per compilation. */
448
08a9dd06 449void
450init_reload ()
451{
452 register int i;
453
454 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
455 Set spill_indirect_levels to the number of levels such addressing is
456 permitted, zero if it is not permitted at all. */
457
458 register rtx tem
941522d6 459 = gen_rtx_MEM (Pmode,
460 gen_rtx_PLUS (Pmode,
461 gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1),
462 GEN_INT (4)));
08a9dd06 463 spill_indirect_levels = 0;
464
465 while (memory_address_p (QImode, tem))
466 {
467 spill_indirect_levels++;
941522d6 468 tem = gen_rtx_MEM (Pmode, tem);
08a9dd06 469 }
470
471 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
472
941522d6 473 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
08a9dd06 474 indirect_symref_ok = memory_address_p (QImode, tem);
475
476 /* See if reg+reg is a valid (and offsettable) address. */
477
b19bcbd6 478 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
26717e42 479 {
941522d6 480 tem = gen_rtx_PLUS (Pmode,
481 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
482 gen_rtx_REG (Pmode, i));
26717e42 483 /* This way, we make sure that reg+reg is an offsettable address. */
484 tem = plus_constant (tem, 4);
485
486 if (memory_address_p (QImode, tem))
487 {
488 double_reg_address_ok = 1;
489 break;
490 }
491 }
08a9dd06 492
a92771b8 493 /* Initialize obstack for our rtl allocation. */
08a9dd06 494 gcc_obstack_init (&reload_obstack);
e94a59c6 495 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
08a9dd06 496}
497
e94a59c6 498/* List of insn chains that are currently unused. */
499static struct insn_chain *unused_insn_chains = 0;
500
501/* Allocate an empty insn_chain structure. */
502struct insn_chain *
503new_insn_chain ()
504{
505 struct insn_chain *c;
506
507 if (unused_insn_chains == 0)
508 {
509 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
510 c->live_before = OBSTACK_ALLOC_REG_SET (&reload_obstack);
511 c->live_after = OBSTACK_ALLOC_REG_SET (&reload_obstack);
512 }
513 else
514 {
515 c = unused_insn_chains;
516 unused_insn_chains = c->next;
517 }
518 c->is_caller_save_insn = 0;
dab171c5 519 c->need_operand_change = 0;
e94a59c6 520 c->need_reload = 0;
521 c->need_elim = 0;
522 return c;
523}
524
590ec786 525/* Small utility function to set all regs in hard reg set TO which are
526 allocated to pseudos in regset FROM. */
527void
528compute_use_by_pseudos (to, from)
529 HARD_REG_SET *to;
530 regset from;
531{
532 int regno;
533 EXECUTE_IF_SET_IN_REG_SET
534 (from, FIRST_PSEUDO_REGISTER, regno,
535 {
536 int r = reg_renumber[regno];
537 int nregs;
538 if (r < 0)
22c48241 539 {
540 /* reload_combine uses the information from
541 basic_block_live_at_start, which might still contain registers
542 that have not actually been allocated since they have an
543 equivalence. */
544 if (! reload_completed)
545 abort ();
546 }
547 else
548 {
549 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
550 while (nregs-- > 0)
551 SET_HARD_REG_BIT (*to, r + nregs);
552 }
590ec786 553 });
554}
dab171c5 555\f
1aacee06 556/* Global variables used by reload and its subroutines. */
557
1aacee06 558/* Set during calculate_needs if an insn needs register elimination. */
559static int something_needs_elimination;
93c7b06e 560/* Set during calculate_needs if an insn needs an operand changed. */
561int something_needs_operands_changed;
1aacee06 562
1aacee06 563/* Nonzero means we couldn't get enough spill regs. */
564static int failure;
565
81d0fbb3 566/* Main entry point for the reload pass.
08a9dd06 567
568 FIRST is the first insn of the function being compiled.
569
570 GLOBAL nonzero means we were called from global_alloc
571 and should attempt to reallocate any pseudoregs that we
572 displace from hard regs we will use for reloads.
573 If GLOBAL is zero, we do not have enough information to do that,
574 so any pseudo reg that is spilled must go to the stack.
575
576 DUMPFILE is the global-reg debugging dump file stream, or 0.
577 If it is nonzero, messages are written to it to describe
578 which registers are seized as reload regs, which pseudo regs
2e756f7d 579 are spilled from them, and where the pseudo regs are reallocated to.
08a9dd06 580
2e756f7d 581 Return value is nonzero if reload failed
582 and we must not do any more for this function. */
583
584int
08a9dd06 585reload (first, global, dumpfile)
586 rtx first;
587 int global;
588 FILE *dumpfile;
589{
dab171c5 590 register int i;
08a9dd06 591 register rtx insn;
592 register struct elim_table *ep;
593
6dd870f4 594 /* The two pointers used to track the true location of the memory used
595 for label offsets. */
596 char *real_known_ptr = NULL_PTR;
597 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
598
08a9dd06 599 /* Make sure even insns with volatile mem refs are recognizable. */
600 init_recog ();
601
1aacee06 602 failure = 0;
603
e94a59c6 604 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
605
e546118a 606 /* Make sure that the last insn in the chain
607 is not something that needs reloading. */
608 emit_note (NULL_PTR, NOTE_INSN_DELETED);
609
08a9dd06 610 /* Enable find_equiv_reg to distinguish insns made by reload. */
611 reload_first_uid = get_max_uid ();
612
c538053c 613#ifdef SECONDARY_MEMORY_NEEDED
614 /* Initialize the secondary memory table. */
615 clear_secondary_mem ();
616#endif
617
08a9dd06 618 /* We don't have a stack slot for any spill reg yet. */
748e6d74 619 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
620 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
08a9dd06 621
ca9902fc 622 /* Initialize the save area information for caller-save, in case some
623 are needed. */
624 init_save_areas ();
c8ad158d 625
08a9dd06 626 /* Compute which hard registers are now in use
627 as homes for pseudo registers.
628 This is done here rather than (eg) in global_alloc
629 because this point is reached even if not optimizing. */
08a9dd06 630 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
631 mark_home_live (i);
632
f668952a 633 /* A function that receives a nonlocal goto must save all call-saved
634 registers. */
635 if (current_function_has_nonlocal_label)
636 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
637 {
638 if (! call_used_regs[i] && ! fixed_regs[i])
639 regs_ever_live[i] = 1;
640 }
641
08a9dd06 642 /* Find all the pseudo registers that didn't get hard regs
643 but do have known equivalent constants or memory slots.
644 These include parameters (known equivalent to parameter slots)
645 and cse'd or loop-moved constant memory addresses.
646
647 Record constant equivalents in reg_equiv_constant
648 so they will be substituted by find_reloads.
649 Record memory equivalents in reg_mem_equiv so they can
650 be substituted eventually by altering the REG-rtx's. */
651
829c0ce9 652 reg_equiv_constant = (rtx *) xmalloc (max_regno * sizeof (rtx));
748e6d74 653 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
829c0ce9 654 reg_equiv_memory_loc = (rtx *) xmalloc (max_regno * sizeof (rtx));
748e6d74 655 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
829c0ce9 656 reg_equiv_mem = (rtx *) xmalloc (max_regno * sizeof (rtx));
748e6d74 657 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
829c0ce9 658 reg_equiv_init = (rtx *) xmalloc (max_regno * sizeof (rtx));
748e6d74 659 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
829c0ce9 660 reg_equiv_address = (rtx *) xmalloc (max_regno * sizeof (rtx));
748e6d74 661 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
829c0ce9 662 reg_max_ref_width = (int *) xmalloc (max_regno * sizeof (int));
748e6d74 663 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
dab171c5 664 reg_old_renumber = (short *) xmalloc (max_regno * sizeof (short));
1b988e85 665 bcopy ((PTR) reg_renumber, (PTR) reg_old_renumber, max_regno * sizeof (short));
dab171c5 666 pseudo_forbidden_regs
667 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
668 pseudo_previous_regs
669 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
08a9dd06 670
dab171c5 671 CLEAR_HARD_REG_SET (bad_spill_regs_global);
672 bzero ((char *) pseudo_previous_regs, max_regno * sizeof (HARD_REG_SET));
e715e2a3 673
08a9dd06 674 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
e715e2a3 675 Also find all paradoxical subregs and find largest such for each pseudo.
676 On machines with small register classes, record hard registers that
c4d9ce40 677 are used for user variables. These can never be used for spills.
678 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
679 caller-saved registers must be marked live. */
08a9dd06 680
aa8d28af 681 num_eliminable_invariants = 0;
08a9dd06 682 for (insn = first; insn; insn = NEXT_INSN (insn))
683 {
684 rtx set = single_set (insn);
685
c4d9ce40 686 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
687 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
688 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
689 if (! call_used_regs[i])
690 regs_ever_live[i] = 1;
691
08a9dd06 692 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
693 {
e5fdd564 694 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
ca9902fc 695 if (note
696#ifdef LEGITIMATE_PIC_OPERAND_P
aa8d28af 697 && (! function_invariant_p (XEXP (note, 0))
698 || ! flag_pic
ca9902fc 699 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
700#endif
701 )
08a9dd06 702 {
703 rtx x = XEXP (note, 0);
704 i = REGNO (SET_DEST (set));
705 if (i > LAST_VIRTUAL_REGISTER)
706 {
707 if (GET_CODE (x) == MEM)
ad87de1e 708 {
709 /* If the operand is a PLUS, the MEM may be shared,
710 so make sure we have an unshared copy here. */
711 if (GET_CODE (XEXP (x, 0)) == PLUS)
712 x = copy_rtx (x);
713
714 reg_equiv_memory_loc[i] = x;
715 }
aa8d28af 716 else if (function_invariant_p (x))
08a9dd06 717 {
aa8d28af 718 if (GET_CODE (x) == PLUS)
719 {
720 /* This is PLUS of frame pointer and a constant,
721 and might be shared. Unshare it. */
722 reg_equiv_constant[i] = copy_rtx (x);
723 num_eliminable_invariants++;
724 }
725 else if (x == frame_pointer_rtx
726 || x == arg_pointer_rtx)
727 {
728 reg_equiv_constant[i] = x;
729 num_eliminable_invariants++;
730 }
731 else if (LEGITIMATE_CONSTANT_P (x))
08a9dd06 732 reg_equiv_constant[i] = x;
733 else
734 reg_equiv_memory_loc[i]
497de2d4 735 = force_const_mem (GET_MODE (SET_DEST (set)), x);
08a9dd06 736 }
737 else
738 continue;
739
740 /* If this register is being made equivalent to a MEM
741 and the MEM is not SET_SRC, the equivalencing insn
742 is one with the MEM as a SET_DEST and it occurs later.
743 So don't mark this insn now. */
744 if (GET_CODE (x) != MEM
745 || rtx_equal_p (SET_SRC (set), x))
537e8d9c 746 reg_equiv_init[i]
747 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
08a9dd06 748 }
749 }
750 }
751
752 /* If this insn is setting a MEM from a register equivalent to it,
753 this is the equivalencing insn. */
754 else if (set && GET_CODE (SET_DEST (set)) == MEM
755 && GET_CODE (SET_SRC (set)) == REG
756 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
757 && rtx_equal_p (SET_DEST (set),
758 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
537e8d9c 759 reg_equiv_init[REGNO (SET_SRC (set))]
760 = gen_rtx_INSN_LIST (VOIDmode, insn,
761 reg_equiv_init[REGNO (SET_SRC (set))]);
08a9dd06 762
763 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
764 scan_paradoxical_subregs (PATTERN (insn));
765 }
766
9d24e570 767 init_elim_table ();
08a9dd06 768
769 num_labels = max_label_num () - get_first_label_num ();
770
771 /* Allocate the tables used to store offset information at labels. */
6dd870f4 772 /* We used to use alloca here, but the size of what it would try to
773 allocate would occasionally cause it to exceed the stack limit and
774 cause a core dump. */
775 real_known_ptr = xmalloc (num_labels);
776 real_at_ptr
08a9dd06 777 = (int (*)[NUM_ELIMINABLE_REGS])
6dd870f4 778 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
08a9dd06 779
6dd870f4 780 offsets_known_at = real_known_ptr - get_first_label_num ();
781 offsets_at
782 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
08a9dd06 783
784 /* Alter each pseudo-reg rtx to contain its hard reg number.
785 Assign stack slots to the pseudos that lack hard regs or equivalents.
786 Do not touch virtual registers. */
787
788 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
789 alter_reg (i, -1);
790
08a9dd06 791 /* If we have some registers we think can be eliminated, scan all insns to
792 see if there is an insn that sets one of these registers to something
793 other than itself plus a constant. If so, the register cannot be
794 eliminated. Doing this scan here eliminates an extra pass through the
795 main reload loop in the most common case where register elimination
796 cannot be done. */
797 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
798 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
799 || GET_CODE (insn) == CALL_INSN)
800 note_stores (PATTERN (insn), mark_not_eliminable);
801
802#ifndef REGISTER_CONSTRAINTS
803 /* If all the pseudo regs have hard regs,
804 except for those that are never referenced,
805 we know that no reloads are needed. */
806 /* But that is not true if there are register constraints, since
807 in that case some pseudos might be in the wrong kind of hard reg. */
808
809 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
394685a4 810 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
08a9dd06 811 break;
812
9182a5cf 813 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
6dd870f4 814 {
815 free (real_known_ptr);
816 free (real_at_ptr);
829c0ce9 817 free (reg_equiv_constant);
818 free (reg_equiv_memory_loc);
819 free (reg_equiv_mem);
820 free (reg_equiv_init);
821 free (reg_equiv_address);
822 free (reg_max_ref_width);
dab171c5 823 free (reg_old_renumber);
824 free (pseudo_previous_regs);
825 free (pseudo_forbidden_regs);
829c0ce9 826 return 0;
6dd870f4 827 }
08a9dd06 828#endif
829
faf8dbee 830 maybe_fix_stack_asms ();
831
dab171c5 832 insns_need_reload = 0;
833 something_needs_elimination = 0;
834
de261207 835 /* Initialize to -1, which means take the first spill register. */
836 last_spill_reg = -1;
837
590ec786 838 spilled_pseudos = ALLOCA_REG_SET ();
839
08a9dd06 840 /* Spill any hard regs that we know we can't eliminate. */
dab171c5 841 CLEAR_HARD_REG_SET (used_spill_regs);
08a9dd06 842 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
843 if (! ep->can_eliminate)
dab171c5 844 spill_hard_reg (ep->from, dumpfile, 1);
0394c2e6 845
846#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
847 if (frame_pointer_needed)
dab171c5 848 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, dumpfile, 1);
0394c2e6 849#endif
590ec786 850 finish_spills (global, dumpfile);
851
9e519b97 852 /* From now on, we may need to generate moves differently. We may also
853 allow modifications of insns which cause them to not be recognized.
854 Any such modifications will be cleaned up during reload itself. */
c1668c09 855 reload_in_progress = 1;
856
08a9dd06 857 /* This loop scans the entire function each go-round
858 and repeats until one repetition spills no additional hard regs. */
dab171c5 859 for (;;)
08a9dd06 860 {
dab171c5 861 int something_changed;
862 int did_spill;
863 struct insn_chain *chain;
08a9dd06 864
dab171c5 865 HOST_WIDE_INT starting_frame_size;
08a9dd06 866
2debb316 867 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
868 here because the stack size may be a part of the offset computation
869 for register elimination, and there might have been new stack slots
870 created in the last iteration of this loop. */
871 assign_stack_local (BLKmode, 0, 0);
872
873 starting_frame_size = get_frame_size ();
874
9d24e570 875 set_initial_elim_offsets ();
f87f6d5d 876 set_initial_label_offsets ();
dab171c5 877
08a9dd06 878 /* For each pseudo register that has an equivalent location defined,
879 try to eliminate any eliminable registers (such as the frame pointer)
880 assuming initial offsets for the replacement register, which
881 is the normal case.
882
883 If the resulting location is directly addressable, substitute
884 the MEM we just got directly for the old REG.
885
886 If it is not addressable but is a constant or the sum of a hard reg
887 and constant, it is probably not addressable because the constant is
888 out of range, in that case record the address; we will generate
889 hairy code to compute the address in a register each time it is
37690010 890 needed. Similarly if it is a hard register, but one that is not
891 valid as an address register.
08a9dd06 892
893 If the location is not addressable, but does not have one of the
894 above forms, assign a stack slot. We have to do this to avoid the
895 potential of producing lots of reloads if, e.g., a location involves
896 a pseudo that didn't get a hard register and has an equivalent memory
897 location that also involves a pseudo that didn't get a hard register.
898
899 Perhaps at some point we will improve reload_when_needed handling
900 so this problem goes away. But that's very hairy. */
901
902 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
903 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
904 {
6182a80f 905 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
08a9dd06 906
907 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
908 XEXP (x, 0)))
909 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
910 else if (CONSTANT_P (XEXP (x, 0))
37690010 911 || (GET_CODE (XEXP (x, 0)) == REG
912 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
08a9dd06 913 || (GET_CODE (XEXP (x, 0)) == PLUS
914 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
915 && (REGNO (XEXP (XEXP (x, 0), 0))
916 < FIRST_PSEUDO_REGISTER)
917 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
918 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
919 else
920 {
921 /* Make a new stack slot. Then indicate that something
c8ad158d 922 changed so we go back and recompute offsets for
08a9dd06 923 eliminable registers because the allocation of memory
924 below might change some offset. reg_equiv_{mem,address}
925 will be set up for this pseudo on the next pass around
926 the loop. */
927 reg_equiv_memory_loc[i] = 0;
928 reg_equiv_init[i] = 0;
929 alter_reg (i, -1);
08a9dd06 930 }
931 }
c8ad158d 932
e546118a 933 if (caller_save_needed)
934 setup_save_areas ();
935
dab171c5 936 /* If we allocated another stack slot, redo elimination bookkeeping. */
e546118a 937 if (starting_frame_size != get_frame_size ())
08a9dd06 938 continue;
939
e546118a 940 if (caller_save_needed)
ca9902fc 941 {
e546118a 942 save_call_clobbered_regs ();
943 /* That might have allocated new insn_chain structures. */
944 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
ca9902fc 945 }
946
dab171c5 947 calculate_needs_all_insns (global);
948
949 CLEAR_REG_SET (spilled_pseudos);
950 did_spill = 0;
951
952 something_changed = 0;
08a9dd06 953
c538053c 954 /* If we allocated any new memory locations, make another pass
955 since it might have changed elimination offsets. */
956 if (starting_frame_size != get_frame_size ())
957 something_changed = 1;
958
9d24e570 959 {
960 HARD_REG_SET to_spill;
961 CLEAR_HARD_REG_SET (to_spill);
962 update_eliminables (&to_spill);
963 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
964 if (TEST_HARD_REG_BIT (to_spill, i))
08a9dd06 965 {
dab171c5 966 spill_hard_reg (i, dumpfile, 1);
967 did_spill = 1;
60e28df3 968
969 /* Regardless of the state of spills, if we previously had
970 a register that we thought we could eliminate, but no can
971 not eliminate, we must run another pass.
972
973 Consider pseudos which have an entry in reg_equiv_* which
974 reference an eliminable register. We must make another pass
975 to update reg_equiv_* so that we do not substitute in the
976 old value from when we thought the elimination could be
977 performed. */
978 something_changed = 1;
08a9dd06 979 }
9d24e570 980 }
0394c2e6 981
dab171c5 982 CLEAR_HARD_REG_SET (used_spill_regs);
983 /* Try to satisfy the needs for each insn. */
984 for (chain = insns_need_reload; chain != 0;
985 chain = chain->next_need_reload)
986 find_reload_regs (chain, dumpfile);
08a9dd06 987
1aacee06 988 if (failure)
989 goto failed;
e546118a 990
dab171c5 991 if (insns_need_reload != 0 || did_spill)
992 something_changed |= finish_spills (global, dumpfile);
590ec786 993
dab171c5 994 if (! something_changed)
995 break;
996
997 if (caller_save_needed)
590ec786 998 delete_caller_save_insns ();
08a9dd06 999 }
1000
1001 /* If global-alloc was run, notify it of any register eliminations we have
1002 done. */
1003 if (global)
1004 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1005 if (ep->can_eliminate)
1006 mark_elimination (ep->from, ep->to);
1007
08a9dd06 1008 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1009 If that insn didn't set the register (i.e., it copied the register to
1010 memory), just delete that insn instead of the equivalencing insn plus
1011 anything now dead. If we call delete_dead_insn on that insn, we may
537e8d9c 1012 delete the insn that actually sets the register if the register dies
08a9dd06 1013 there and that is incorrect. */
1014
1015 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
537e8d9c 1016 {
1017 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1018 {
1019 rtx list;
1020 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1021 {
1022 rtx equiv_insn = XEXP (list, 0);
1023 if (GET_CODE (equiv_insn) == NOTE)
1024 continue;
1025 if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1026 delete_dead_insn (equiv_insn);
1027 else
1028 {
1029 PUT_CODE (equiv_insn, NOTE);
1030 NOTE_SOURCE_FILE (equiv_insn) = 0;
1031 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1032 }
1033 }
1034 }
1035 }
08a9dd06 1036
1037 /* Use the reload registers where necessary
1038 by generating move instructions to move the must-be-register
1039 values into or out of the reload registers. */
1040
dab171c5 1041 if (insns_need_reload != 0 || something_needs_elimination
1042 || something_needs_operands_changed)
8ad9ded8 1043 {
1044 int old_frame_size = get_frame_size ();
1045
1046 reload_as_needed (global);
1047
1048 if (old_frame_size != get_frame_size ())
1049 abort ();
1050
1051 if (num_eliminable)
1052 verify_initial_elim_offsets ();
1053 }
08a9dd06 1054
6631552e 1055 /* If we were able to eliminate the frame pointer, show that it is no
81d0fbb3 1056 longer live at the start of any basic block. If it ls live by
6631552e 1057 virtue of being in a pseudo, that pseudo will be marked live
1058 and hence the frame pointer will be known to be live via that
1059 pseudo. */
1060
1061 if (! frame_pointer_needed)
1062 for (i = 0; i < n_basic_blocks; i++)
19997ce7 1063 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
1064 HARD_FRAME_POINTER_REGNUM);
6631552e 1065
2e756f7d 1066 /* Come here (with failure set nonzero) if we can't get enough spill regs
1067 and we decide not to abort about it. */
1068 failed:
1069
f905c47c 1070 reload_in_progress = 0;
1071
08a9dd06 1072 /* Now eliminate all pseudo regs by modifying them into
1073 their equivalent memory references.
1074 The REG-rtx's for the pseudos are modified in place,
1075 so all insns that used to refer to them now refer to memory.
1076
1077 For a reg that has a reg_equiv_address, all those insns
1078 were changed by reloading so that no insns refer to it any longer;
1079 but the DECL_RTL of a variable decl may refer to it,
1080 and if so this causes the debugging info to mention the variable. */
1081
1082 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1083 {
1084 rtx addr = 0;
03d29f65 1085 int in_struct = 0;
6a0934dd 1086 int is_scalar;
9e042f31 1087 int is_readonly = 0;
1088
1089 if (reg_equiv_memory_loc[i])
03d29f65 1090 {
9e042f31 1091 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
6a0934dd 1092 is_scalar = MEM_SCALAR_P (reg_equiv_memory_loc[i]);
9e042f31 1093 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
03d29f65 1094 }
9e042f31 1095
1096 if (reg_equiv_mem[i])
1097 addr = XEXP (reg_equiv_mem[i], 0);
1098
08a9dd06 1099 if (reg_equiv_address[i])
1100 addr = reg_equiv_address[i];
9e042f31 1101
08a9dd06 1102 if (addr)
1103 {
1104 if (reg_renumber[i] < 0)
1105 {
1106 rtx reg = regno_reg_rtx[i];
1107 XEXP (reg, 0) = addr;
1108 REG_USERVAR_P (reg) = 0;
9e042f31 1109 RTX_UNCHANGING_P (reg) = is_readonly;
03d29f65 1110 MEM_IN_STRUCT_P (reg) = in_struct;
6a0934dd 1111 MEM_SCALAR_P (reg) = is_scalar;
b5ba9f3a 1112 /* We have no alias information about this newly created
1113 MEM. */
1114 MEM_ALIAS_SET (reg) = 0;
08a9dd06 1115 PUT_CODE (reg, MEM);
1116 }
1117 else if (reg_equiv_mem[i])
1118 XEXP (reg_equiv_mem[i], 0) = addr;
1119 }
1120 }
1121
8648fb5b 1122 /* We must set reload_completed now since the cleanup_subreg_operands call
1123 below will re-recognize each insn and reload may have generated insns
1124 which are only valid during and after reload. */
1125 reload_completed = 1;
1126
6aed5e78 1127 /* Make a pass over all the insns and delete all USEs which we inserted
d740a1dd 1128 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
881c3cf0 1129 notes. Delete all CLOBBER insns and simplify (subreg (reg)) operands.
1130 Also remove all REG_RETVAL and REG_LIBCALL notes since they are no longer
1131 useful or accurate. */
08a9dd06 1132
1133 for (insn = first; insn; insn = NEXT_INSN (insn))
1134 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1135 {
3f4d644c 1136 rtx *pnote;
08a9dd06 1137
d740a1dd 1138 if ((GET_CODE (PATTERN (insn)) == USE
1139 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1140 || GET_CODE (PATTERN (insn)) == CLOBBER)
6aed5e78 1141 {
1142 PUT_CODE (insn, NOTE);
1143 NOTE_SOURCE_FILE (insn) = 0;
1144 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1145 continue;
1146 }
3f4d644c 1147
1148 pnote = &REG_NOTES (insn);
1149 while (*pnote != 0)
08a9dd06 1150 {
3f4d644c 1151 if (REG_NOTE_KIND (*pnote) == REG_DEAD
881c3cf0 1152 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1153 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1154 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
3f4d644c 1155 *pnote = XEXP (*pnote, 1);
1156 else
1157 pnote = &XEXP (*pnote, 1);
08a9dd06 1158 }
d740a1dd 1159
1160 /* And simplify (subreg (reg)) if it appears as an operand. */
1161 cleanup_subreg_operands (insn);
6aed5e78 1162 }
08a9dd06 1163
58e888c1 1164 /* If we are doing stack checking, give a warning if this function's
1165 frame size is larger than we expect. */
1166 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1167 {
1168 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1169
1170 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1171 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1172 size += UNITS_PER_WORD;
1173
1174 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1175 warning ("frame size too large for reliable stack checking");
1176 }
e94a59c6 1177
be2828ce 1178 /* If we are doing stack checking, give a warning if this function's
1179 frame size is larger than we expect. */
1180 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1181 {
1182 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1183 static int verbose_warned = 0;
1184
1185 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1186 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1187 size += UNITS_PER_WORD;
1188
1189 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1190 {
1191 warning ("frame size too large for reliable stack checking");
1192 if (! verbose_warned)
1193 {
1194 warning ("try reducing the number of local variables");
1195 verbose_warned = 1;
1196 }
1197 }
1198 }
1199
08a9dd06 1200 /* Indicate that we no longer have known memory locations or constants. */
77210fa2 1201 if (reg_equiv_constant)
1202 free (reg_equiv_constant);
08a9dd06 1203 reg_equiv_constant = 0;
77210fa2 1204 if (reg_equiv_memory_loc)
1205 free (reg_equiv_memory_loc);
08a9dd06 1206 reg_equiv_memory_loc = 0;
2e756f7d 1207
6dd870f4 1208 if (real_known_ptr)
1209 free (real_known_ptr);
1210 if (real_at_ptr)
1211 free (real_at_ptr);
1212
829c0ce9 1213 free (reg_equiv_mem);
1214 free (reg_equiv_init);
1215 free (reg_equiv_address);
1216 free (reg_max_ref_width);
dab171c5 1217 free (reg_old_renumber);
1218 free (pseudo_previous_regs);
1219 free (pseudo_forbidden_regs);
829c0ce9 1220
590ec786 1221 FREE_REG_SET (spilled_pseudos);
1222
ed69f33f 1223 CLEAR_HARD_REG_SET (used_spill_regs);
1224 for (i = 0; i < n_spills; i++)
1225 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1226
590ec786 1227 /* Free all the insn_chain structures at once. */
1228 obstack_free (&reload_obstack, reload_startobj);
1229 unused_insn_chains = 0;
1230
2e756f7d 1231 return failure;
08a9dd06 1232}
1aacee06 1233
faf8dbee 1234/* Yet another special case. Unfortunately, reg-stack forces people to
1235 write incorrect clobbers in asm statements. These clobbers must not
1236 cause the register to appear in bad_spill_regs, otherwise we'll call
1237 fatal_insn later. We clear the corresponding regnos in the live
1238 register sets to avoid this.
1239 The whole thing is rather sick, I'm afraid. */
1240static void
1241maybe_fix_stack_asms ()
1242{
1243#ifdef STACK_REGS
1244 char *constraints[MAX_RECOG_OPERANDS];
1245 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1246 struct insn_chain *chain;
1247
1248 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1249 {
1250 int i, noperands;
1251 HARD_REG_SET clobbered, allowed;
1252 rtx pat;
1253
1254 if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i'
1255 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1256 continue;
1257 pat = PATTERN (chain->insn);
1258 if (GET_CODE (pat) != PARALLEL)
1259 continue;
1260
1261 CLEAR_HARD_REG_SET (clobbered);
1262 CLEAR_HARD_REG_SET (allowed);
1263
1264 /* First, make a mask of all stack regs that are clobbered. */
1265 for (i = 0; i < XVECLEN (pat, 0); i++)
1266 {
1267 rtx t = XVECEXP (pat, 0, i);
1268 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1269 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1270 }
1271
1272 /* Get the operand values and constraints out of the insn. */
1273 decode_asm_operands (pat, recog_operand, recog_operand_loc,
1274 constraints, operand_mode);
1275
1276 /* For every operand, see what registers are allowed. */
1277 for (i = 0; i < noperands; i++)
1278 {
1279 char *p = constraints[i];
1280 /* For every alternative, we compute the class of registers allowed
1281 for reloading in CLS, and merge its contents into the reg set
1282 ALLOWED. */
1283 int cls = (int) NO_REGS;
1284
1285 for (;;)
1286 {
1287 char c = *p++;
1288
1289 if (c == '\0' || c == ',' || c == '#')
1290 {
1291 /* End of one alternative - mark the regs in the current
1292 class, and reset the class. */
1293 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1294 cls = NO_REGS;
1295 if (c == '#')
1296 do {
1297 c = *p++;
1298 } while (c != '\0' && c != ',');
1299 if (c == '\0')
1300 break;
1301 continue;
1302 }
1303
1304 switch (c)
1305 {
1306 case '=': case '+': case '*': case '%': case '?': case '!':
1307 case '0': case '1': case '2': case '3': case '4': case 'm':
1308 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1309 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1310 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1311 case 'P':
1312#ifdef EXTRA_CONSTRAINT
1313 case 'Q': case 'R': case 'S': case 'T': case 'U':
1314#endif
1315 break;
1316
1317 case 'p':
1318 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1319 break;
1320
1321 case 'g':
1322 case 'r':
1323 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1324 break;
1325
1326 default:
1327 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
1328
1329 }
1330 }
1331 }
1332 /* Those of the registers which are clobbered, but allowed by the
1333 constraints, must be usable as reload registers. So clear them
1334 out of the life information. */
1335 AND_HARD_REG_SET (allowed, clobbered);
1336 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1337 if (TEST_HARD_REG_BIT (allowed, i))
1338 {
1339 CLEAR_REGNO_REG_SET (chain->live_before, i);
1340 CLEAR_REGNO_REG_SET (chain->live_after, i);
1341 }
1342 }
1343
1344#endif
1345}
1346
dab171c5 1347\f
1348/* Walk the chain of insns, and determine for each whether it needs reloads
1349 and/or eliminations. Build the corresponding insns_need_reload list, and
1350 set something_needs_elimination as appropriate. */
1351static void
590ec786 1352calculate_needs_all_insns (global)
1aacee06 1353 int global;
1354{
590ec786 1355 struct insn_chain **pprev_reload = &insns_need_reload;
dab171c5 1356 struct insn_chain **pchain;
1aacee06 1357
dab171c5 1358 something_needs_elimination = 0;
1359
1360 for (pchain = &reload_insn_chain; *pchain != 0; pchain = &(*pchain)->next)
1aacee06 1361 {
dab171c5 1362 rtx insn;
1363 struct insn_chain *chain;
1364
1365 chain = *pchain;
1366 insn = chain->insn;
1aacee06 1367
dab171c5 1368 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1369 include REG_LABEL), we need to see what effects this has on the
1370 known offsets at labels. */
1aacee06 1371
1372 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1373 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1374 && REG_NOTES (insn) != 0))
1375 set_label_offsets (insn, insn, 0);
1376
1377 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1378 {
1379 rtx old_body = PATTERN (insn);
1380 int old_code = INSN_CODE (insn);
1381 rtx old_notes = REG_NOTES (insn);
1382 int did_elimination = 0;
93c7b06e 1383 int operands_changed = 0;
aa8d28af 1384 rtx set = single_set (insn);
1385
1386 /* Skip insns that only set an equivalence. */
1387 if (set && GET_CODE (SET_DEST (set)) == REG
1388 && reg_renumber[REGNO (SET_DEST (set))] < 0
1389 && reg_equiv_constant[REGNO (SET_DEST (set))])
1390 continue;
1aacee06 1391
1aacee06 1392 /* If needed, eliminate any eliminable registers. */
aa8d28af 1393 if (num_eliminable || num_eliminable_invariants)
1aacee06 1394 did_elimination = eliminate_regs_in_insn (insn, 0);
1395
1396 /* Analyze the instruction. */
93c7b06e 1397 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1398 global, spill_reg_order);
1399
1400 /* If a no-op set needs more than one reload, this is likely
1401 to be something that needs input address reloads. We
1402 can't get rid of this cleanly later, and it is of no use
1403 anyway, so discard it now.
1404 We only do this when expensive_optimizations is enabled,
1405 since this complements reload inheritance / output
1406 reload deletion, and it can make debugging harder. */
1407 if (flag_expensive_optimizations && n_reloads > 1)
1408 {
1409 rtx set = single_set (insn);
1410 if (set
1411 && SET_SRC (set) == SET_DEST (set)
1412 && GET_CODE (SET_SRC (set)) == REG
1413 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1414 {
1415 PUT_CODE (insn, NOTE);
1416 NOTE_SOURCE_FILE (insn) = 0;
1417 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1418 continue;
1419 }
1420 }
1421 if (num_eliminable)
1422 update_eliminable_offsets ();
1aacee06 1423
1424 /* Remember for later shortcuts which insns had any reloads or
590ec786 1425 register eliminations. */
1426 chain->need_elim = did_elimination;
dab171c5 1427 chain->need_reload = n_reloads > 0;
1428 chain->need_operand_change = operands_changed;
1aacee06 1429
1430 /* Discard any register replacements done. */
1431 if (did_elimination)
1432 {
1433 obstack_free (&reload_obstack, reload_firstobj);
1434 PATTERN (insn) = old_body;
1435 INSN_CODE (insn) = old_code;
1436 REG_NOTES (insn) = old_notes;
1437 something_needs_elimination = 1;
1438 }
1439
93c7b06e 1440 something_needs_operands_changed |= operands_changed;
1441
e546118a 1442 if (n_reloads != 0)
590ec786 1443 {
1444 *pprev_reload = chain;
1445 pprev_reload = &chain->next_need_reload;
dab171c5 1446
1447 calculate_needs (chain);
590ec786 1448 }
1aacee06 1449 }
1aacee06 1450 }
590ec786 1451 *pprev_reload = 0;
1aacee06 1452}
1453
dab171c5 1454/* Compute the most additional registers needed by one instruction,
1455 given by CHAIN. Collect information separately for each class of regs.
1456
1457 To compute the number of reload registers of each class needed for an
1458 insn, we must simulate what choose_reload_regs can do. We do this by
1459 splitting an insn into an "input" and an "output" part. RELOAD_OTHER
1460 reloads are used in both. The input part uses those reloads,
1461 RELOAD_FOR_INPUT reloads, which must be live over the entire input section
1462 of reloads, and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1463 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the inputs.
1464
1465 The registers needed for output are RELOAD_OTHER and RELOAD_FOR_OUTPUT,
1466 which are live for the entire output portion, and the maximum of all the
1467 RELOAD_FOR_OUTPUT_ADDRESS reloads for each operand.
1aacee06 1468
1469 The total number of registers needed is the maximum of the
1470 inputs and outputs. */
1471
dab171c5 1472static void
1473calculate_needs (chain)
590ec786 1474 struct insn_chain *chain;
1aacee06 1475{
1aacee06 1476 int i;
1477
1aacee06 1478 /* Each `struct needs' corresponds to one RELOAD_... type. */
1479 struct {
1480 struct needs other;
1481 struct needs input;
1482 struct needs output;
1483 struct needs insn;
1484 struct needs other_addr;
1485 struct needs op_addr;
1486 struct needs op_addr_reload;
1487 struct needs in_addr[MAX_RECOG_OPERANDS];
1488 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1489 struct needs out_addr[MAX_RECOG_OPERANDS];
1490 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1491 } insn_needs;
1492
dab171c5 1493 bzero ((char *) chain->group_size, sizeof chain->group_size);
1494 for (i = 0; i < N_REG_CLASSES; i++)
1495 chain->group_mode[i] = VOIDmode;
1aacee06 1496 bzero ((char *) &insn_needs, sizeof insn_needs);
1497
1498 /* Count each reload once in every class
1499 containing the reload's own class. */
1500
1501 for (i = 0; i < n_reloads; i++)
1502 {
1503 register enum reg_class *p;
1504 enum reg_class class = reload_reg_class[i];
1505 int size;
1506 enum machine_mode mode;
1507 struct needs *this_needs;
1508
1509 /* Don't count the dummy reloads, for which one of the
1510 regs mentioned in the insn can be used for reloading.
1511 Don't count optional reloads.
1512 Don't count reloads that got combined with others. */
1513 if (reload_reg_rtx[i] != 0
1514 || reload_optional[i] != 0
1515 || (reload_out[i] == 0 && reload_in[i] == 0
1516 && ! reload_secondary_p[i]))
1517 continue;
1518
1aacee06 1519 mode = reload_inmode[i];
1520 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1521 mode = reload_outmode[i];
1522 size = CLASS_MAX_NREGS (class, mode);
1523
1524 /* Decide which time-of-use to count this reload for. */
1525 switch (reload_when_needed[i])
1526 {
1527 case RELOAD_OTHER:
1528 this_needs = &insn_needs.other;
1529 break;
1530 case RELOAD_FOR_INPUT:
1531 this_needs = &insn_needs.input;
1532 break;
1533 case RELOAD_FOR_OUTPUT:
1534 this_needs = &insn_needs.output;
1535 break;
1536 case RELOAD_FOR_INSN:
1537 this_needs = &insn_needs.insn;
1538 break;
1539 case RELOAD_FOR_OTHER_ADDRESS:
1540 this_needs = &insn_needs.other_addr;
1541 break;
1542 case RELOAD_FOR_INPUT_ADDRESS:
1543 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1544 break;
1545 case RELOAD_FOR_INPADDR_ADDRESS:
1546 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1547 break;
1548 case RELOAD_FOR_OUTPUT_ADDRESS:
1549 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1550 break;
1551 case RELOAD_FOR_OUTADDR_ADDRESS:
1552 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1553 break;
1554 case RELOAD_FOR_OPERAND_ADDRESS:
1555 this_needs = &insn_needs.op_addr;
1556 break;
1557 case RELOAD_FOR_OPADDR_ADDR:
1558 this_needs = &insn_needs.op_addr_reload;
1559 break;
3c1d7436 1560 default:
1561 abort();
1aacee06 1562 }
1563
1564 if (size > 1)
1565 {
1566 enum machine_mode other_mode, allocate_mode;
1567
1568 /* Count number of groups needed separately from
1569 number of individual regs needed. */
1570 this_needs->groups[(int) class]++;
1571 p = reg_class_superclasses[(int) class];
1572 while (*p != LIM_REG_CLASSES)
1573 this_needs->groups[(int) *p++]++;
1574
1575 /* Record size and mode of a group of this class. */
1576 /* If more than one size group is needed,
1577 make all groups the largest needed size. */
dab171c5 1578 if (chain->group_size[(int) class] < size)
1aacee06 1579 {
dab171c5 1580 other_mode = chain->group_mode[(int) class];
1aacee06 1581 allocate_mode = mode;
1582
dab171c5 1583 chain->group_size[(int) class] = size;
1584 chain->group_mode[(int) class] = mode;
1aacee06 1585 }
1586 else
1587 {
1588 other_mode = mode;
dab171c5 1589 allocate_mode = chain->group_mode[(int) class];
1aacee06 1590 }
1591
1592 /* Crash if two dissimilar machine modes both need
1593 groups of consecutive regs of the same class. */
1594
1595 if (other_mode != VOIDmode && other_mode != allocate_mode
1596 && ! modes_equiv_for_class_p (allocate_mode,
1597 other_mode, class))
1598 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
dab171c5 1599 chain->insn);
1aacee06 1600 }
1601 else if (size == 1)
1602 {
274c11d8 1603 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) class] += 1;
1aacee06 1604 p = reg_class_superclasses[(int) class];
1605 while (*p != LIM_REG_CLASSES)
274c11d8 1606 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) *p++] += 1;
1aacee06 1607 }
1608 else
1609 abort ();
1610 }
1611
1612 /* All reloads have been counted for this insn;
1613 now merge the various times of use.
1614 This sets insn_needs, etc., to the maximum total number
1615 of registers needed at any point in this insn. */
1616
1617 for (i = 0; i < N_REG_CLASSES; i++)
1618 {
1619 int j, in_max, out_max;
1620
1621 /* Compute normal and nongroup needs. */
1622 for (j = 0; j <= 1; j++)
1623 {
1624 int k;
1625 for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++)
1626 {
1627 in_max = MAX (in_max,
1628 (insn_needs.in_addr[k].regs[j][i]
1629 + insn_needs.in_addr_addr[k].regs[j][i]));
1630 out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1631 out_max = MAX (out_max,
1632 insn_needs.out_addr_addr[k].regs[j][i]);
1633 }
1634
1635 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1636 and operand addresses but not things used to reload
1637 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1638 don't conflict with things needed to reload inputs or
1639 outputs. */
1640
1641 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1642 insn_needs.op_addr_reload.regs[j][i]),
1643 in_max);
1644
1645 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1646
1647 insn_needs.input.regs[j][i]
1648 = MAX (insn_needs.input.regs[j][i]
1649 + insn_needs.op_addr.regs[j][i]
1650 + insn_needs.insn.regs[j][i],
1651 in_max + insn_needs.input.regs[j][i]);
1652
1653 insn_needs.output.regs[j][i] += out_max;
1654 insn_needs.other.regs[j][i]
1655 += MAX (MAX (insn_needs.input.regs[j][i],
1656 insn_needs.output.regs[j][i]),
1657 insn_needs.other_addr.regs[j][i]);
1658
1659 }
1660
1661 /* Now compute group needs. */
1662 for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++)
1663 {
1664 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1665 in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]);
1666 out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1667 out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1668 }
1669
1670 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1671 insn_needs.op_addr_reload.groups[i]),
1672 in_max);
1673 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1674
1675 insn_needs.input.groups[i]
1676 = MAX (insn_needs.input.groups[i]
1677 + insn_needs.op_addr.groups[i]
1678 + insn_needs.insn.groups[i],
1679 in_max + insn_needs.input.groups[i]);
1680
1681 insn_needs.output.groups[i] += out_max;
1682 insn_needs.other.groups[i]
1683 += MAX (MAX (insn_needs.input.groups[i],
1684 insn_needs.output.groups[i]),
1685 insn_needs.other_addr.groups[i]);
1686 }
1687
590ec786 1688 /* Record the needs for later. */
1689 chain->need = insn_needs.other;
1aacee06 1690}
dab171c5 1691\f
1aacee06 1692/* Find a group of exactly 2 registers.
1693
1694 First try to fill out the group by spilling a single register which
1695 would allow completion of the group.
1696
1697 Then try to create a new group from a pair of registers, neither of
1698 which are explicitly used.
1699
1700 Then try to create a group from any pair of registers. */
dab171c5 1701
1702static void
1703find_tworeg_group (chain, class, dumpfile)
1704 struct insn_chain *chain;
1aacee06 1705 int class;
1706 FILE *dumpfile;
1707{
1708 int i;
1709 /* First, look for a register that will complete a group. */
1710 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1711 {
1712 int j, other;
1713
1714 j = potential_reload_regs[i];
1715 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1716 && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1717 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
dab171c5 1719 && HARD_REGNO_MODE_OK (other, chain->group_mode[class])
1720 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1aacee06 1721 /* We don't want one part of another group.
1722 We could get "two groups" that overlap! */
dab171c5 1723 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))
1aacee06 1724 || (j < FIRST_PSEUDO_REGISTER - 1
1725 && (other = j + 1, spill_reg_order[other] >= 0)
1726 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1727 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
dab171c5 1728 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1729 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1730 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))))
1aacee06 1731 {
1732 register enum reg_class *p;
1733
1734 /* We have found one that will complete a group,
1735 so count off one group as provided. */
dab171c5 1736 chain->need.groups[class]--;
1aacee06 1737 p = reg_class_superclasses[class];
1738 while (*p != LIM_REG_CLASSES)
1739 {
dab171c5 1740 if (chain->group_size [(int) *p] <= chain->group_size [class])
1741 chain->need.groups[(int) *p]--;
1aacee06 1742 p++;
1743 }
1744
1745 /* Indicate both these regs are part of a group. */
dab171c5 1746 SET_HARD_REG_BIT (chain->counted_for_groups, j);
1747 SET_HARD_REG_BIT (chain->counted_for_groups, other);
1aacee06 1748 break;
1749 }
1750 }
1751 /* We can't complete a group, so start one. */
1aacee06 1752 if (i == FIRST_PSEUDO_REGISTER)
1753 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1754 {
1755 int j, k;
1756 j = potential_reload_regs[i];
1757 /* Verify that J+1 is a potential reload reg. */
1758 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1759 if (potential_reload_regs[k] == j + 1)
1760 break;
1761 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1762 && k < FIRST_PSEUDO_REGISTER
1763 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1764 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1765 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
dab171c5 1766 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1767 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, j + 1)
1aacee06 1768 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1769 break;
1770 }
1771
1772 /* I should be the index in potential_reload_regs
1773 of the new reload reg we have found. */
1774
dab171c5 1775 new_spill_reg (chain, i, class, 0, dumpfile);
1aacee06 1776}
1777
1778/* Find a group of more than 2 registers.
1779 Look for a sufficient sequence of unspilled registers, and spill them all
1780 at once. */
dab171c5 1781
1782static void
1783find_group (chain, class, dumpfile)
1784 struct insn_chain *chain;
1aacee06 1785 int class;
1786 FILE *dumpfile;
1787{
1aacee06 1788 int i;
1789
1790 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1791 {
dab171c5 1792 int j = potential_reload_regs[i];
1aacee06 1793
1aacee06 1794 if (j >= 0
dab171c5 1795 && j + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
1796 && HARD_REGNO_MODE_OK (j, chain->group_mode[class]))
1aacee06 1797 {
dab171c5 1798 int k;
1aacee06 1799 /* Check each reg in the sequence. */
dab171c5 1800 for (k = 0; k < chain->group_size[class]; k++)
1aacee06 1801 if (! (spill_reg_order[j + k] < 0
1802 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1803 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1804 break;
1805 /* We got a full sequence, so spill them all. */
dab171c5 1806 if (k == chain->group_size[class])
1aacee06 1807 {
1808 register enum reg_class *p;
dab171c5 1809 for (k = 0; k < chain->group_size[class]; k++)
1aacee06 1810 {
1811 int idx;
dab171c5 1812 SET_HARD_REG_BIT (chain->counted_for_groups, j + k);
1aacee06 1813 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1814 if (potential_reload_regs[idx] == j + k)
1815 break;
dab171c5 1816 new_spill_reg (chain, idx, class, 0, dumpfile);
1aacee06 1817 }
1818
1819 /* We have found one that will complete a group,
1820 so count off one group as provided. */
dab171c5 1821 chain->need.groups[class]--;
1aacee06 1822 p = reg_class_superclasses[class];
1823 while (*p != LIM_REG_CLASSES)
1824 {
dab171c5 1825 if (chain->group_size [(int) *p]
1826 <= chain->group_size [class])
1827 chain->need.groups[(int) *p]--;
1aacee06 1828 p++;
1829 }
dab171c5 1830 return;
1aacee06 1831 }
1832 }
1833 }
1834 /* There are no groups left. */
dab171c5 1835 spill_failure (chain->insn);
1aacee06 1836 failure = 1;
1aacee06 1837}
1838
dab171c5 1839/* If pseudo REG conflicts with one of our reload registers, mark it as
1840 spilled. */
1841static void
1842maybe_mark_pseudo_spilled (reg)
1843 int reg;
1844{
1845 int i;
1846 int r = reg_renumber[reg];
1847 int nregs;
1848
1849 if (r < 0)
1850 abort ();
1851 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1852 for (i = 0; i < n_spills; i++)
1853 if (r <= spill_regs[i] && r + nregs > spill_regs[i])
1854 {
1855 SET_REGNO_REG_SET (spilled_pseudos, reg);
1856 return;
1857 }
1858}
1859
1860/* Find more reload regs to satisfy the remaining need of an insn, which
1861 is given by CHAIN.
1aacee06 1862 Do it by ascending class number, since otherwise a reg
1863 might be spilled for a big class and might fail to count
1864 for a smaller class even though it belongs to that class.
1865
1866 Count spilled regs in `spills', and add entries to
1867 `spill_regs' and `spill_reg_order'.
1868
1869 ??? Note there is a problem here.
1870 When there is a need for a group in a high-numbered class,
1871 and also need for non-group regs that come from a lower class,
1872 the non-group regs are chosen first. If there aren't many regs,
1873 they might leave no room for a group.
1874
1875 This was happening on the 386. To fix it, we added the code
1876 that calls possible_group_p, so that the lower class won't
1877 break up the last possible group.
1878
1879 Really fixing the problem would require changes above
1880 in counting the regs already spilled, and in choose_reload_regs.
1881 It might be hard to avoid introducing bugs there. */
1882
dab171c5 1883static void
1884find_reload_regs (chain, dumpfile)
1885 struct insn_chain *chain;
1aacee06 1886 FILE *dumpfile;
1887{
dab171c5 1888 int i, class;
1889 short *group_needs = chain->need.groups;
1890 short *simple_needs = chain->need.regs[0];
1891 short *nongroup_needs = chain->need.regs[1];
1892
1893 if (dumpfile)
1894 fprintf (dumpfile, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1895
1896 /* Compute the order of preference for hard registers to spill.
1897 Store them by decreasing preference in potential_reload_regs. */
1898
1899 order_regs_for_reload (chain);
1900
1901 /* So far, no hard regs have been spilled. */
1902 n_spills = 0;
1903 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1904 spill_reg_order[i] = -1;
1aacee06 1905
dab171c5 1906 CLEAR_HARD_REG_SET (chain->used_spill_regs);
1907 CLEAR_HARD_REG_SET (chain->counted_for_groups);
1908 CLEAR_HARD_REG_SET (chain->counted_for_nongroups);
1aacee06 1909
1910 for (class = 0; class < N_REG_CLASSES; class++)
1911 {
1912 /* First get the groups of registers.
1913 If we got single registers first, we might fragment
1914 possible groups. */
dab171c5 1915 while (group_needs[class] > 0)
1aacee06 1916 {
1917 /* If any single spilled regs happen to form groups,
1918 count them now. Maybe we don't really need
1919 to spill another group. */
dab171c5 1920 count_possible_groups (chain, class);
1aacee06 1921
dab171c5 1922 if (group_needs[class] <= 0)
1aacee06 1923 break;
1924
dab171c5 1925 /* Groups of size 2, the only groups used on most machines,
1aacee06 1926 are treated specially. */
dab171c5 1927 if (chain->group_size[class] == 2)
1928 find_tworeg_group (chain, class, dumpfile);
1aacee06 1929 else
dab171c5 1930 find_group (chain, class, dumpfile);
1aacee06 1931 if (failure)
dab171c5 1932 return;
1aacee06 1933 }
1934
1935 /* Now similarly satisfy all need for single registers. */
1936
dab171c5 1937 while (simple_needs[class] > 0 || nongroup_needs[class] > 0)
1aacee06 1938 {
1aacee06 1939 /* If we spilled enough regs, but they weren't counted
1940 against the non-group need, see if we can count them now.
1941 If so, we can avoid some actual spilling. */
dab171c5 1942 if (simple_needs[class] <= 0 && nongroup_needs[class] > 0)
1aacee06 1943 for (i = 0; i < n_spills; i++)
1944 {
1945 int regno = spill_regs[i];
1946 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
dab171c5 1947 && !TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
1948 && !TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno)
1949 && nongroup_needs[class] > 0)
1950 {
1951 register enum reg_class *p;
1aacee06 1952
dab171c5 1953 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
1954 nongroup_needs[class]--;
1955 p = reg_class_superclasses[class];
1956 while (*p != LIM_REG_CLASSES)
1957 nongroup_needs[(int) *p++]--;
1958 }
1aacee06 1959 }
dab171c5 1960
1961 if (simple_needs[class] <= 0 && nongroup_needs[class] <= 0)
1aacee06 1962 break;
1963
1964 /* Consider the potential reload regs that aren't
1965 yet in use as reload regs, in order of preference.
1966 Find the most preferred one that's in this class. */
1967
1968 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1969 {
1970 int regno = potential_reload_regs[i];
1971 if (regno >= 0
1972 && TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1973 /* If this reg will not be available for groups,
1974 pick one that does not foreclose possible groups.
1975 This is a kludge, and not very general,
1976 but it should be sufficient to make the 386 work,
1977 and the problem should not occur on machines with
1978 more registers. */
dab171c5 1979 && (nongroup_needs[class] == 0
1980 || possible_group_p (chain, regno)))
1aacee06 1981 break;
1982 }
1983
1984 /* If we couldn't get a register, try to get one even if we
1985 might foreclose possible groups. This may cause problems
1986 later, but that's better than aborting now, since it is
1987 possible that we will, in fact, be able to form the needed
1988 group even with this allocation. */
1989
1990 if (i >= FIRST_PSEUDO_REGISTER
dab171c5 1991 && asm_noperands (chain->insn) < 0)
1aacee06 1992 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1993 if (potential_reload_regs[i] >= 0
1994 && TEST_HARD_REG_BIT (reg_class_contents[class],
1995 potential_reload_regs[i]))
1996 break;
1997
1998 /* I should be the index in potential_reload_regs
1999 of the new reload reg we have found. */
2000
dab171c5 2001 new_spill_reg (chain, i, class, 1, dumpfile);
2002 if (failure)
2003 return;
1aacee06 2004 }
2005 }
dab171c5 2006
2007 /* We know which hard regs to use, now mark the pseudos that live in them
2008 as needing to be kicked out. */
2009 EXECUTE_IF_SET_IN_REG_SET
2010 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
2011 {
2012 maybe_mark_pseudo_spilled (i);
2013 });
2014 EXECUTE_IF_SET_IN_REG_SET
2015 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
2016 {
2017 maybe_mark_pseudo_spilled (i);
2018 });
2019
2020 IOR_HARD_REG_SET (used_spill_regs, chain->used_spill_regs);
1aacee06 2021}
2022
dab171c5 2023void
2024dump_needs (chain, dumpfile)
2025 struct insn_chain *chain;
9d24e570 2026 FILE *dumpfile;
2027{
2028 static char *reg_class_names[] = REG_CLASS_NAMES;
2029 int i;
dab171c5 2030 struct needs *n = &chain->need;
9d24e570 2031
2032 for (i = 0; i < N_REG_CLASSES; i++)
2033 {
dab171c5 2034 if (n->regs[i][0] > 0)
9d24e570 2035 fprintf (dumpfile,
dab171c5 2036 ";; Need %d reg%s of class %s.\n",
2037 n->regs[i][0], n->regs[i][0] == 1 ? "" : "s",
2038 reg_class_names[i]);
2039 if (n->regs[i][1] > 0)
9d24e570 2040 fprintf (dumpfile,
dab171c5 2041 ";; Need %d nongroup reg%s of class %s.\n",
2042 n->regs[i][1], n->regs[i][1] == 1 ? "" : "s",
2043 reg_class_names[i]);
2044 if (n->groups[i] > 0)
9d24e570 2045 fprintf (dumpfile,
dab171c5 2046 ";; Need %d group%s (%smode) of class %s.\n",
2047 n->groups[i], n->groups[i] == 1 ? "" : "s",
2048 mode_name[(int) chain->group_mode[i]],
2049 reg_class_names[i]);
9d24e570 2050 }
2051}
08a9dd06 2052\f
e546118a 2053/* Delete all insns that were inserted by emit_caller_save_insns during
2054 this iteration. */
2055static void
590ec786 2056delete_caller_save_insns ()
e546118a 2057{
590ec786 2058 struct insn_chain *c = reload_insn_chain;
e546118a 2059
590ec786 2060 while (c != 0)
e546118a 2061 {
590ec786 2062 while (c != 0 && c->is_caller_save_insn)
e546118a 2063 {
590ec786 2064 struct insn_chain *next = c->next;
2065 rtx insn = c->insn;
2066
68676d00 2067 if (insn == BLOCK_HEAD (c->block))
2068 BLOCK_HEAD (c->block) = NEXT_INSN (insn);
2069 if (insn == BLOCK_END (c->block))
2070 BLOCK_END (c->block) = PREV_INSN (insn);
590ec786 2071 if (c == reload_insn_chain)
2072 reload_insn_chain = next;
2073
2074 if (NEXT_INSN (insn) != 0)
2075 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2076 if (PREV_INSN (insn) != 0)
2077 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2078
2079 if (next)
2080 next->prev = c->prev;
2081 if (c->prev)
2082 c->prev->next = next;
2083 c->next = unused_insn_chains;
2084 unused_insn_chains = c;
2085 c = next;
e546118a 2086 }
590ec786 2087 if (c != 0)
2088 c = c->next;
e546118a 2089 }
2090}
2091\f
08a9dd06 2092/* Nonzero if, after spilling reg REGNO for non-groups,
2093 it will still be possible to find a group if we still need one. */
2094
2095static int
dab171c5 2096possible_group_p (chain, regno)
2097 struct insn_chain *chain;
08a9dd06 2098 int regno;
08a9dd06 2099{
2100 int i;
2101 int class = (int) NO_REGS;
2102
2103 for (i = 0; i < (int) N_REG_CLASSES; i++)
dab171c5 2104 if (chain->need.groups[i] > 0)
08a9dd06 2105 {
2106 class = i;
2107 break;
2108 }
2109
2110 if (class == (int) NO_REGS)
2111 return 1;
2112
2113 /* Consider each pair of consecutive registers. */
2114 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2115 {
2116 /* Ignore pairs that include reg REGNO. */
2117 if (i == regno || i + 1 == regno)
2118 continue;
2119
2120 /* Ignore pairs that are outside the class that needs the group.
2121 ??? Here we fail to handle the case where two different classes
2122 independently need groups. But this never happens with our
2123 current machine descriptions. */
2124 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2125 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2126 continue;
2127
2128 /* A pair of consecutive regs we can still spill does the trick. */
2129 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2130 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2131 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2132 return 1;
2133
2134 /* A pair of one already spilled and one we can spill does it
2135 provided the one already spilled is not otherwise reserved. */
2136 if (spill_reg_order[i] < 0
2137 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2138 && spill_reg_order[i + 1] >= 0
dab171c5 2139 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i + 1)
2140 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i + 1))
08a9dd06 2141 return 1;
2142 if (spill_reg_order[i + 1] < 0
2143 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2144 && spill_reg_order[i] >= 0
dab171c5 2145 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i)
2146 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i))
08a9dd06 2147 return 1;
2148 }
2149
2150 return 0;
2151}
dab171c5 2152
86d5e51f 2153/* Count any groups of CLASS that can be formed from the registers recently
2154 spilled. */
08a9dd06 2155
2156static void
dab171c5 2157count_possible_groups (chain, class)
2158 struct insn_chain *chain;
86d5e51f 2159 int class;
08a9dd06 2160{
86d5e51f 2161 HARD_REG_SET new;
2162 int i, j;
2163
08a9dd06 2164 /* Now find all consecutive groups of spilled registers
2165 and mark each group off against the need for such groups.
2166 But don't count them against ordinary need, yet. */
2167
dab171c5 2168 if (chain->group_size[class] == 0)
86d5e51f 2169 return;
2170
2171 CLEAR_HARD_REG_SET (new);
2172
2173 /* Make a mask of all the regs that are spill regs in class I. */
2174 for (i = 0; i < n_spills; i++)
dab171c5 2175 {
2176 int regno = spill_regs[i];
2177
2178 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
2179 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
2180 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno))
2181 SET_HARD_REG_BIT (new, regno);
2182 }
86d5e51f 2183
2184 /* Find each consecutive group of them. */
dab171c5 2185 for (i = 0; i < FIRST_PSEUDO_REGISTER && chain->need.groups[class] > 0; i++)
86d5e51f 2186 if (TEST_HARD_REG_BIT (new, i)
dab171c5 2187 && i + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
2188 && HARD_REGNO_MODE_OK (i, chain->group_mode[class]))
08a9dd06 2189 {
dab171c5 2190 for (j = 1; j < chain->group_size[class]; j++)
86d5e51f 2191 if (! TEST_HARD_REG_BIT (new, i + j))
2192 break;
08a9dd06 2193
dab171c5 2194 if (j == chain->group_size[class])
86d5e51f 2195 {
2196 /* We found a group. Mark it off against this class's need for
2197 groups, and against each superclass too. */
2198 register enum reg_class *p;
2199
dab171c5 2200 chain->need.groups[class]--;
86d5e51f 2201 p = reg_class_superclasses[class];
2202 while (*p != LIM_REG_CLASSES)
7dbb7fe8 2203 {
dab171c5 2204 if (chain->group_size [(int) *p] <= chain->group_size [class])
2205 chain->need.groups[(int) *p]--;
7dbb7fe8 2206 p++;
2207 }
86d5e51f 2208
2209 /* Don't count these registers again. */
dab171c5 2210 for (j = 0; j < chain->group_size[class]; j++)
2211 SET_HARD_REG_BIT (chain->counted_for_groups, i + j);
86d5e51f 2212 }
2213
2214 /* Skip to the last reg in this group. When i is incremented above,
2215 it will then point to the first reg of the next possible group. */
2216 i += j - 1;
2217 }
08a9dd06 2218}
2219\f
2220/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2221 another mode that needs to be reloaded for the same register class CLASS.
2222 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2223 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2224
2225 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2226 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2227 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2228 causes unnecessary failures on machines requiring alignment of register
2229 groups when the two modes are different sizes, because the larger mode has
2230 more strict alignment rules than the smaller mode. */
2231
2232static int
2233modes_equiv_for_class_p (allocate_mode, other_mode, class)
2234 enum machine_mode allocate_mode, other_mode;
2235 enum reg_class class;
2236{
2237 register int regno;
2238 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2239 {
2240 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2241 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2242 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2243 return 0;
2244 }
2245 return 1;
2246}
dab171c5 2247\f
2e756f7d 2248/* Handle the failure to find a register to spill.
2249 INSN should be one of the insns which needed this particular spill reg. */
2250
2251static void
2252spill_failure (insn)
2253 rtx insn;
2254{
2255 if (asm_noperands (PATTERN (insn)) >= 0)
2256 error_for_asm (insn, "`asm' needs too many reloads");
2257 else
ed6a2d98 2258 fatal_insn ("Unable to find a register to spill.", insn);
2e756f7d 2259}
2260
dab171c5 2261/* Add a new register to the tables of available spill-registers.
2262 CHAIN is the insn for which the register will be used; we decrease the
2263 needs of that insn.
08a9dd06 2264 I is the index of this register in potential_reload_regs.
2265 CLASS is the regclass whose need is being satisfied.
dab171c5 2266 NONGROUP is 0 if this register is part of a group.
2267 DUMPFILE is the same as the one that `reload' got. */
08a9dd06 2268
dab171c5 2269static void
2270new_spill_reg (chain, i, class, nongroup, dumpfile)
2271 struct insn_chain *chain;
08a9dd06 2272 int i;
2273 int class;
dab171c5 2274 int nongroup;
08a9dd06 2275 FILE *dumpfile;
2276{
2277 register enum reg_class *p;
08a9dd06 2278 int regno = potential_reload_regs[i];
2279
2280 if (i >= FIRST_PSEUDO_REGISTER)
dab171c5 2281 {
2282 spill_failure (chain->insn);
2283 failure = 1;
2284 return;
2285 }
08a9dd06 2286
dab171c5 2287 if (TEST_HARD_REG_BIT (bad_spill_regs, regno))
fca03663 2288 {
2289 static char *reg_class_names[] = REG_CLASS_NAMES;
dab171c5 2290
2291 if (asm_noperands (PATTERN (chain->insn)) < 0)
2292 {
2293 /* The error message is still correct - we know only that it wasn't
2294 an asm statement that caused the problem, but one of the global
2295 registers declared by the users might have screwed us. */
2296 error ("fixed or forbidden register %d (%s) was spilled for class %s.",
2297 regno, reg_names[regno], reg_class_names[class]);
2298 error ("This may be due to a compiler bug or to impossible asm");
2299 error ("statements or clauses.");
2300 fatal_insn ("This is the instruction:", chain->insn);
2301 }
2302 error_for_asm (chain->insn, "Invalid `asm' statement:");
2303 error_for_asm (chain->insn,
2304 "fixed or forbidden register %d (%s) was spilled for class %s.",
2305 regno, reg_names[regno], reg_class_names[class]);
2306 failure = 1;
2307 return;
fca03663 2308 }
08a9dd06 2309
2310 /* Make reg REGNO an additional reload reg. */
2311
2312 potential_reload_regs[i] = -1;
2313 spill_regs[n_spills] = regno;
2314 spill_reg_order[regno] = n_spills;
2315 if (dumpfile)
dab171c5 2316 fprintf (dumpfile, "Spilling reg %d.\n", regno);
2317 SET_HARD_REG_BIT (chain->used_spill_regs, regno);
08a9dd06 2318
2319 /* Clear off the needs we just satisfied. */
2320
dab171c5 2321 chain->need.regs[0][class]--;
08a9dd06 2322 p = reg_class_superclasses[class];
2323 while (*p != LIM_REG_CLASSES)
dab171c5 2324 chain->need.regs[0][(int) *p++]--;
08a9dd06 2325
dab171c5 2326 if (nongroup && chain->need.regs[1][class] > 0)
08a9dd06 2327 {
dab171c5 2328 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
2329 chain->need.regs[1][class]--;
08a9dd06 2330 p = reg_class_superclasses[class];
2331 while (*p != LIM_REG_CLASSES)
dab171c5 2332 chain->need.regs[1][(int) *p++]--;
08a9dd06 2333 }
2334
08a9dd06 2335 n_spills++;
08a9dd06 2336}
2337\f
2338/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2339 data that is dead in INSN. */
2340
2341static void
2342delete_dead_insn (insn)
2343 rtx insn;
2344{
2345 rtx prev = prev_real_insn (insn);
2346 rtx prev_dest;
2347
2348 /* If the previous insn sets a register that dies in our insn, delete it
2349 too. */
2350 if (prev && GET_CODE (PATTERN (prev)) == SET
2351 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2352 && reg_mentioned_p (prev_dest, PATTERN (insn))
5d4a682a 2353 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2354 && ! side_effects_p (SET_SRC (PATTERN (prev))))
08a9dd06 2355 delete_dead_insn (prev);
2356
2357 PUT_CODE (insn, NOTE);
2358 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2359 NOTE_SOURCE_FILE (insn) = 0;
2360}
2361
2362/* Modify the home of pseudo-reg I.
2363 The new home is present in reg_renumber[I].
2364
2365 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2366 or it may be -1, meaning there is none or it is not relevant.
2367 This is used so that all pseudos spilled from a given hard reg
2368 can share one stack slot. */
2369
2370static void
2371alter_reg (i, from_reg)
2372 register int i;
2373 int from_reg;
2374{
2375 /* When outputting an inline function, this can happen
2376 for a reg that isn't actually used. */
2377 if (regno_reg_rtx[i] == 0)
2378 return;
2379
2380 /* If the reg got changed to a MEM at rtl-generation time,
2381 ignore it. */
2382 if (GET_CODE (regno_reg_rtx[i]) != REG)
2383 return;
2384
2385 /* Modify the reg-rtx to contain the new hard reg
2386 number or else to contain its pseudo reg number. */
2387 REGNO (regno_reg_rtx[i])
2388 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2389
2390 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2391 allocate a stack slot for it. */
2392
2393 if (reg_renumber[i] < 0
394685a4 2394 && REG_N_REFS (i) > 0
08a9dd06 2395 && reg_equiv_constant[i] == 0
2396 && reg_equiv_memory_loc[i] == 0)
2397 {
2398 register rtx x;
2399 int inherent_size = PSEUDO_REGNO_BYTES (i);
2400 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2401 int adjust = 0;
2402
2403 /* Each pseudo reg has an inherent size which comes from its own mode,
2404 and a total size which provides room for paradoxical subregs
2405 which refer to the pseudo reg in wider modes.
2406
2407 We can use a slot already allocated if it provides both
2408 enough inherent space and enough total space.
2409 Otherwise, we allocate a new slot, making sure that it has no less
2410 inherent space, and no less total space, then the previous slot. */
2411 if (from_reg == -1)
2412 {
2413 /* No known place to spill from => no slot to reuse. */
a0516f44 2414 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2415 inherent_size == total_size ? 0 : -1);
51356f86 2416 if (BYTES_BIG_ENDIAN)
04188a85 2417 /* Cancel the big-endian correction done in assign_stack_local.
2418 Get the address of the beginning of the slot.
2419 This is so we can do a big-endian correction unconditionally
2420 below. */
2421 adjust = inherent_size - total_size;
2422
2423 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
08a9dd06 2424 }
2425 /* Reuse a stack slot if possible. */
2426 else if (spill_stack_slot[from_reg] != 0
2427 && spill_stack_slot_width[from_reg] >= total_size
2428 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2429 >= inherent_size))
2430 x = spill_stack_slot[from_reg];
2431 /* Allocate a bigger slot. */
2432 else
2433 {
2434 /* Compute maximum size needed, both for inherent size
2435 and for total size. */
2436 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
56351df3 2437 rtx stack_slot;
08a9dd06 2438 if (spill_stack_slot[from_reg])
2439 {
2440 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2441 > inherent_size)
2442 mode = GET_MODE (spill_stack_slot[from_reg]);
2443 if (spill_stack_slot_width[from_reg] > total_size)
2444 total_size = spill_stack_slot_width[from_reg];
2445 }
2446 /* Make a slot with that size. */
a0516f44 2447 x = assign_stack_local (mode, total_size,
2448 inherent_size == total_size ? 0 : -1);
56351df3 2449 stack_slot = x;
51356f86 2450 if (BYTES_BIG_ENDIAN)
2451 {
2452 /* Cancel the big-endian correction done in assign_stack_local.
2453 Get the address of the beginning of the slot.
2454 This is so we can do a big-endian correction unconditionally
2455 below. */
2456 adjust = GET_MODE_SIZE (mode) - total_size;
56351df3 2457 if (adjust)
941522d6 2458 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2459 * BITS_PER_UNIT,
2460 MODE_INT, 1),
04188a85 2461 plus_constant (XEXP (x, 0), adjust));
51356f86 2462 }
56351df3 2463 spill_stack_slot[from_reg] = stack_slot;
08a9dd06 2464 spill_stack_slot_width[from_reg] = total_size;
2465 }
2466
08a9dd06 2467 /* On a big endian machine, the "address" of the slot
2468 is the address of the low part that fits its inherent mode. */
51356f86 2469 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
08a9dd06 2470 adjust += (total_size - inherent_size);
08a9dd06 2471
2472 /* If we have any adjustment to make, or if the stack slot is the
2473 wrong mode, make a new stack slot. */
2474 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2475 {
941522d6 2476 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
08a9dd06 2477 plus_constant (XEXP (x, 0), adjust));
9e042f31 2478
2479 /* If this was shared among registers, must ensure we never
2480 set it readonly since that can cause scheduling
2481 problems. Note we would only have in this adjustment
2482 case in any event, since the code above doesn't set it. */
2483
2484 if (from_reg == -1)
2485 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
08a9dd06 2486 }
2487
2488 /* Save the stack slot for later. */
2489 reg_equiv_memory_loc[i] = x;
2490 }
2491}
2492
2493/* Mark the slots in regs_ever_live for the hard regs
2494 used by pseudo-reg number REGNO. */
2495
2496void
2497mark_home_live (regno)
2498 int regno;
2499{
2500 register int i, lim;
2501 i = reg_renumber[regno];
2502 if (i < 0)
2503 return;
2504 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2505 while (i < lim)
2506 regs_ever_live[i++] = 1;
2507}
2508\f
2509/* This function handles the tracking of elimination offsets around branches.
2510
2511 X is a piece of RTL being scanned.
2512
2513 INSN is the insn that it came from, if any.
2514
2515 INITIAL_P is non-zero if we are to set the offset to be the initial
2516 offset and zero if we are setting the offset of the label to be the
2517 current offset. */
2518
2519static void
2520set_label_offsets (x, insn, initial_p)
2521 rtx x;
2522 rtx insn;
2523 int initial_p;
2524{
2525 enum rtx_code code = GET_CODE (x);
2526 rtx tem;
274c11d8 2527 unsigned int i;
08a9dd06 2528 struct elim_table *p;
2529
2530 switch (code)
2531 {
2532 case LABEL_REF:
3add3b31 2533 if (LABEL_REF_NONLOCAL_P (x))
2534 return;
2535
08a9dd06 2536 x = XEXP (x, 0);
2537
a92771b8 2538 /* ... fall through ... */
08a9dd06 2539
2540 case CODE_LABEL:
2541 /* If we know nothing about this label, set the desired offsets. Note
2542 that this sets the offset at a label to be the offset before a label
2543 if we don't know anything about the label. This is not correct for
2544 the label after a BARRIER, but is the best guess we can make. If
2545 we guessed wrong, we will suppress an elimination that might have
2546 been possible had we been able to guess correctly. */
2547
2548 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2549 {
2550 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2551 offsets_at[CODE_LABEL_NUMBER (x)][i]
2552 = (initial_p ? reg_eliminate[i].initial_offset
2553 : reg_eliminate[i].offset);
2554 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2555 }
2556
2557 /* Otherwise, if this is the definition of a label and it is
f9e15121 2558 preceded by a BARRIER, set our offsets to the known offset of
08a9dd06 2559 that label. */
2560
2561 else if (x == insn
2562 && (tem = prev_nonnote_insn (insn)) != 0
2563 && GET_CODE (tem) == BARRIER)
f87f6d5d 2564 set_offsets_for_label (insn);
08a9dd06 2565 else
2566 /* If neither of the above cases is true, compare each offset
2567 with those previously recorded and suppress any eliminations
2568 where the offsets disagree. */
c8ad158d 2569
08a9dd06 2570 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2571 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2572 != (initial_p ? reg_eliminate[i].initial_offset
2573 : reg_eliminate[i].offset))
2574 reg_eliminate[i].can_eliminate = 0;
2575
2576 return;
2577
2578 case JUMP_INSN:
2579 set_label_offsets (PATTERN (insn), insn, initial_p);
2580
a92771b8 2581 /* ... fall through ... */
08a9dd06 2582
2583 case INSN:
2584 case CALL_INSN:
2585 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2586 and hence must have all eliminations at their initial offsets. */
2587 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2588 if (REG_NOTE_KIND (tem) == REG_LABEL)
2589 set_label_offsets (XEXP (tem, 0), insn, 1);
2590 return;
2591
2592 case ADDR_VEC:
2593 case ADDR_DIFF_VEC:
2594 /* Each of the labels in the address vector must be at their initial
3398e91d 2595 offsets. We want the first field for ADDR_VEC and the second
08a9dd06 2596 field for ADDR_DIFF_VEC. */
2597
274c11d8 2598 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
08a9dd06 2599 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2600 insn, initial_p);
2601 return;
2602
2603 case SET:
2604 /* We only care about setting PC. If the source is not RETURN,
2605 IF_THEN_ELSE, or a label, disable any eliminations not at
2606 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2607 isn't one of those possibilities. For branches to a label,
2608 call ourselves recursively.
2609
2610 Note that this can disable elimination unnecessarily when we have
2611 a non-local goto since it will look like a non-constant jump to
2612 someplace in the current function. This isn't a significant
2613 problem since such jumps will normally be when all elimination
2614 pairs are back to their initial offsets. */
2615
2616 if (SET_DEST (x) != pc_rtx)
2617 return;
2618
2619 switch (GET_CODE (SET_SRC (x)))
2620 {
2621 case PC:
2622 case RETURN:
2623 return;
2624
2625 case LABEL_REF:
2626 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2627 return;
2628
2629 case IF_THEN_ELSE:
2630 tem = XEXP (SET_SRC (x), 1);
2631 if (GET_CODE (tem) == LABEL_REF)
2632 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2633 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2634 break;
2635
2636 tem = XEXP (SET_SRC (x), 2);
2637 if (GET_CODE (tem) == LABEL_REF)
2638 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2639 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2640 break;
2641 return;
0dbd1c74 2642
2643 default:
2644 break;
08a9dd06 2645 }
2646
2647 /* If we reach here, all eliminations must be at their initial
2648 offset because we are doing a jump to a variable address. */
2649 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2650 if (p->offset != p->initial_offset)
2651 p->can_eliminate = 0;
0dbd1c74 2652 break;
2653
2654 default:
2655 break;
08a9dd06 2656 }
2657}
2658\f
2659/* Used for communication between the next two function to properly share
2660 the vector for an ASM_OPERANDS. */
2661
2662static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2663
c8ad158d 2664/* Scan X and replace any eliminable registers (such as fp) with a
08a9dd06 2665 replacement (such as sp), plus an offset.
2666
2667 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2668 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2669 MEM, we are allowed to replace a sum of a register and the constant zero
2670 with the register, which we cannot do outside a MEM. In addition, we need
2671 to record the fact that a register is referenced outside a MEM.
2672
b8b76680 2673 If INSN is an insn, it is the insn containing X. If we replace a REG
08a9dd06 2674 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2675 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
3398e91d 2676 the REG is being modified.
08a9dd06 2677
b8b76680 2678 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2679 That's used when we eliminate in expressions stored in notes.
2680 This means, do not set ref_outside_mem even if the reference
2681 is outside of MEMs.
2682
08a9dd06 2683 If we see a modification to a register we know about, take the
2684 appropriate action (see case SET, below).
2685
2686 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2687 replacements done assuming all offsets are at their initial values. If
2688 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2689 encounter, return the actual location so that find_reloads will do
2690 the proper thing. */
2691
2692rtx
6182a80f 2693eliminate_regs (x, mem_mode, insn)
08a9dd06 2694 rtx x;
2695 enum machine_mode mem_mode;
2696 rtx insn;
2697{
2698 enum rtx_code code = GET_CODE (x);
2699 struct elim_table *ep;
2700 int regno;
2701 rtx new;
2702 int i, j;
2703 char *fmt;
2704 int copied = 0;
2705
e965e340 2706 if (! current_function_decl)
2707 return x;
cc9c157e 2708
08a9dd06 2709 switch (code)
2710 {
2711 case CONST_INT:
2712 case CONST_DOUBLE:
2713 case CONST:
2714 case SYMBOL_REF:
2715 case CODE_LABEL:
2716 case PC:
2717 case CC0:
2718 case ASM_INPUT:
2719 case ADDR_VEC:
2720 case ADDR_DIFF_VEC:
2721 case RETURN:
2722 return x;
2723
0dbd1c74 2724 case ADDRESSOF:
2725 /* This is only for the benefit of the debugging backends, which call
2726 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2727 removed after CSE. */
6182a80f 2728 new = eliminate_regs (XEXP (x, 0), 0, insn);
0dbd1c74 2729 if (GET_CODE (new) == MEM)
2730 return XEXP (new, 0);
2731 return x;
2732
08a9dd06 2733 case REG:
2734 regno = REGNO (x);
2735
2736 /* First handle the case where we encounter a bare register that
2737 is eliminable. Replace it with a PLUS. */
2738 if (regno < FIRST_PSEUDO_REGISTER)
2739 {
2740 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2741 ep++)
2742 if (ep->from_rtx == x && ep->can_eliminate)
2743 {
b8b76680 2744 if (! mem_mode
2745 /* Refs inside notes don't count for this purpose. */
c72edd49 2746 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
b8b76680 2747 || GET_CODE (insn) == INSN_LIST)))
08a9dd06 2748 ep->ref_outside_mem = 1;
2749 return plus_constant (ep->to_rtx, ep->previous_offset);
2750 }
2751
2752 }
aa8d28af 2753 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2754 && reg_equiv_constant[regno]
2755 && ! CONSTANT_P (reg_equiv_constant[regno]))
2756 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2757 mem_mode, insn);
08a9dd06 2758 return x;
2759
2760 case PLUS:
2761 /* If this is the sum of an eliminable register and a constant, rework
2762 the sum. */
2763 if (GET_CODE (XEXP (x, 0)) == REG
2764 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2765 && CONSTANT_P (XEXP (x, 1)))
2766 {
2767 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2768 ep++)
2769 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2770 {
4fe0a423 2771 if (! mem_mode
2772 /* Refs inside notes don't count for this purpose. */
2773 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2774 || GET_CODE (insn) == INSN_LIST)))
08a9dd06 2775 ep->ref_outside_mem = 1;
2776
2777 /* The only time we want to replace a PLUS with a REG (this
2778 occurs when the constant operand of the PLUS is the negative
2779 of the offset) is when we are inside a MEM. We won't want
2780 to do so at other times because that would change the
2781 structure of the insn in a way that reload can't handle.
2782 We special-case the commonest situation in
2783 eliminate_regs_in_insn, so just replace a PLUS with a
2784 PLUS here, unless inside a MEM. */
e77a9f50 2785 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
08a9dd06 2786 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2787 return ep->to_rtx;
2788 else
941522d6 2789 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2790 plus_constant (XEXP (x, 1),
2791 ep->previous_offset));
08a9dd06 2792 }
2793
2794 /* If the register is not eliminable, we are done since the other
2795 operand is a constant. */
2796 return x;
2797 }
2798
2799 /* If this is part of an address, we want to bring any constant to the
2800 outermost PLUS. We will do this by doing register replacement in
2801 our operands and seeing if a constant shows up in one of them.
2802
2803 We assume here this is part of an address (or a "load address" insn)
2804 since an eliminable register is not likely to appear in any other
2805 context.
2806
2807 If we have (plus (eliminable) (reg)), we want to produce
049f26da 2808 (plus (plus (replacement) (reg) (const))). If this was part of a
08a9dd06 2809 normal add insn, (plus (replacement) (reg)) will be pushed as a
2810 reload. This is the desired action. */
2811
2812 {
6182a80f 2813 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2814 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
08a9dd06 2815
2816 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2817 {
2818 /* If one side is a PLUS and the other side is a pseudo that
c8ad158d 2819 didn't get a hard register but has a reg_equiv_constant,
08a9dd06 2820 we must replace the constant here since it may no longer
2821 be in the position of any operand. */
2822 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2823 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2824 && reg_renumber[REGNO (new1)] < 0
2825 && reg_equiv_constant != 0
2826 && reg_equiv_constant[REGNO (new1)] != 0)
2827 new1 = reg_equiv_constant[REGNO (new1)];
2828 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2829 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2830 && reg_renumber[REGNO (new0)] < 0
2831 && reg_equiv_constant[REGNO (new0)] != 0)
2832 new0 = reg_equiv_constant[REGNO (new0)];
2833
2834 new = form_sum (new0, new1);
2835
2836 /* As above, if we are not inside a MEM we do not want to
2837 turn a PLUS into something else. We might try to do so here
2838 for an addition of 0 if we aren't optimizing. */
2839 if (! mem_mode && GET_CODE (new) != PLUS)
941522d6 2840 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
08a9dd06 2841 else
2842 return new;
2843 }
2844 }
2845 return x;
2846
7f24d2b8 2847 case MULT:
2848 /* If this is the product of an eliminable register and a
2849 constant, apply the distribute law and move the constant out
2850 so that we have (plus (mult ..) ..). This is needed in order
c3418f42 2851 to keep load-address insns valid. This case is pathological.
7f24d2b8 2852 We ignore the possibility of overflow here. */
2853 if (GET_CODE (XEXP (x, 0)) == REG
2854 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2855 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2856 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2857 ep++)
2858 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2859 {
2860 if (! mem_mode
2861 /* Refs inside notes don't count for this purpose. */
2862 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2863 || GET_CODE (insn) == INSN_LIST)))
2864 ep->ref_outside_mem = 1;
2865
2866 return
941522d6 2867 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
7f24d2b8 2868 ep->previous_offset * INTVAL (XEXP (x, 1)));
2869 }
08a9dd06 2870
a92771b8 2871 /* ... fall through ... */
08a9dd06 2872
08a9dd06 2873 case CALL:
2874 case COMPARE:
049f26da 2875 case MINUS:
08a9dd06 2876 case DIV: case UDIV:
2877 case MOD: case UMOD:
2878 case AND: case IOR: case XOR:
6503f782 2879 case ROTATERT: case ROTATE:
2880 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
08a9dd06 2881 case NE: case EQ:
2882 case GE: case GT: case GEU: case GTU:
2883 case LE: case LT: case LEU: case LTU:
2884 {
6182a80f 2885 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
e5fdd564 2886 rtx new1
6182a80f 2887 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
08a9dd06 2888
2889 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
941522d6 2890 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
08a9dd06 2891 }
2892 return x;
2893
7f24d2b8 2894 case EXPR_LIST:
2895 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2896 if (XEXP (x, 0))
2897 {
6182a80f 2898 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
7f24d2b8 2899 if (new != XEXP (x, 0))
941522d6 2900 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
7f24d2b8 2901 }
2902
a92771b8 2903 /* ... fall through ... */
7f24d2b8 2904
2905 case INSN_LIST:
2906 /* Now do eliminations in the rest of the chain. If this was
2907 an EXPR_LIST, this might result in allocating more memory than is
2908 strictly needed, but it simplifies the code. */
2909 if (XEXP (x, 1))
2910 {
6182a80f 2911 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
7f24d2b8 2912 if (new != XEXP (x, 1))
941522d6 2913 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
7f24d2b8 2914 }
2915 return x;
2916
08a9dd06 2917 case PRE_INC:
2918 case POST_INC:
2919 case PRE_DEC:
2920 case POST_DEC:
2921 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2922 if (ep->to_rtx == XEXP (x, 0))
2923 {
f5a2f752 2924 int size = GET_MODE_SIZE (mem_mode);
2925
2926 /* If more bytes than MEM_MODE are pushed, account for them. */
2927#ifdef PUSH_ROUNDING
2928 if (ep->to_rtx == stack_pointer_rtx)
2929 size = PUSH_ROUNDING (size);
2930#endif
08a9dd06 2931 if (code == PRE_DEC || code == POST_DEC)
f5a2f752 2932 ep->offset += size;
08a9dd06 2933 else
f5a2f752 2934 ep->offset -= size;
08a9dd06 2935 }
2936
2937 /* Fall through to generic unary operation case. */
08a9dd06 2938 case STRICT_LOW_PART:
2939 case NEG: case NOT:
2940 case SIGN_EXTEND: case ZERO_EXTEND:
2941 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2942 case FLOAT: case FIX:
2943 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2944 case ABS:
2945 case SQRT:
2946 case FFS:
6182a80f 2947 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
08a9dd06 2948 if (new != XEXP (x, 0))
941522d6 2949 return gen_rtx_fmt_e (code, GET_MODE (x), new);
08a9dd06 2950 return x;
2951
2952 case SUBREG:
2953 /* Similar to above processing, but preserve SUBREG_WORD.
2954 Convert (subreg (mem)) to (mem) if not paradoxical.
2955 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2956 pseudo didn't get a hard reg, we must replace this with the
2957 eliminated version of the memory location because push_reloads
2958 may do the replacement in certain circumstances. */
2959 if (GET_CODE (SUBREG_REG (x)) == REG
2960 && (GET_MODE_SIZE (GET_MODE (x))
2961 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2962 && reg_equiv_memory_loc != 0
2963 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2964 {
93c7b06e 2965#if 0
08a9dd06 2966 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
6182a80f 2967 mem_mode, insn);
08a9dd06 2968
2969 /* If we didn't change anything, we must retain the pseudo. */
2970 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
f7ceaab8 2971 new = SUBREG_REG (x);
08a9dd06 2972 else
f7ceaab8 2973 {
f7ceaab8 2974 /* In this case, we must show that the pseudo is used in this
2975 insn so that delete_output_reload will do the right thing. */
2976 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2977 && GET_CODE (insn) != INSN_LIST)
6aed5e78 2978 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
2979 SUBREG_REG (x)),
2980 insn))
2981 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
2982
2983 /* Ensure NEW isn't shared in case we have to reload it. */
2984 new = copy_rtx (new);
f7ceaab8 2985 }
93c7b06e 2986#else
2987 new = SUBREG_REG (x);
2988#endif
08a9dd06 2989 }
2990 else
6182a80f 2991 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
08a9dd06 2992
2993 if (new != XEXP (x, 0))
2994 {
e87a9401 2995 int x_size = GET_MODE_SIZE (GET_MODE (x));
2996 int new_size = GET_MODE_SIZE (GET_MODE (new));
2997
6182a80f 2998 if (GET_CODE (new) == MEM
ebbb4987 2999 && ((x_size < new_size
6182a80f 3000#ifdef WORD_REGISTER_OPERATIONS
ebbb4987 3001 /* On these machines, combine can create rtl of the form
3002 (set (subreg:m1 (reg:m2 R) 0) ...)
3003 where m1 < m2, and expects something interesting to
3004 happen to the entire word. Moreover, it will use the
3005 (reg:m2 R) later, expecting all bits to be preserved.
3006 So if the number of words is the same, preserve the
3007 subreg so that push_reloads can see it. */
3008 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
6182a80f 3009#endif
ebbb4987 3010 )
3011 || (x_size == new_size))
6182a80f 3012 )
08a9dd06 3013 {
3014 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3015 enum machine_mode mode = GET_MODE (x);
3016
51356f86 3017 if (BYTES_BIG_ENDIAN)
3018 offset += (MIN (UNITS_PER_WORD,
3019 GET_MODE_SIZE (GET_MODE (new)))
3020 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
08a9dd06 3021
3022 PUT_MODE (new, mode);
3023 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3024 return new;
3025 }
3026 else
941522d6 3027 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
08a9dd06 3028 }
3029
3030 return x;
3031
d2fd839d 3032 case USE:
3033 /* If using a register that is the source of an eliminate we still
3034 think can be performed, note it cannot be performed since we don't
3035 know how this register is used. */
3036 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3037 if (ep->from_rtx == XEXP (x, 0))
3038 ep->can_eliminate = 0;
3039
6182a80f 3040 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
d2fd839d 3041 if (new != XEXP (x, 0))
941522d6 3042 return gen_rtx_fmt_e (code, GET_MODE (x), new);
d2fd839d 3043 return x;
3044
08a9dd06 3045 case CLOBBER:
3046 /* If clobbering a register that is the replacement register for an
f9e15121 3047 elimination we still think can be performed, note that it cannot
08a9dd06 3048 be performed. Otherwise, we need not be concerned about it. */
3049 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3050 if (ep->to_rtx == XEXP (x, 0))
3051 ep->can_eliminate = 0;
3052
6182a80f 3053 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
97a9c37d 3054 if (new != XEXP (x, 0))
941522d6 3055 return gen_rtx_fmt_e (code, GET_MODE (x), new);
08a9dd06 3056 return x;
3057
3058 case ASM_OPERANDS:
3059 {
3060 rtx *temp_vec;
3061 /* Properly handle sharing input and constraint vectors. */
3062 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3063 {
3064 /* When we come to a new vector not seen before,
3065 scan all its elements; keep the old vector if none
3066 of them changes; otherwise, make a copy. */
3067 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3068 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3069 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3070 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
6182a80f 3071 mem_mode, insn);
08a9dd06 3072
3073 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3074 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3075 break;
3076
3077 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3078 new_asm_operands_vec = old_asm_operands_vec;
3079 else
3080 new_asm_operands_vec
3081 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3082 }
3083
3084 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3085 if (new_asm_operands_vec == old_asm_operands_vec)
3086 return x;
3087
941522d6 3088 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3089 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3090 ASM_OPERANDS_OUTPUT_IDX (x),
3091 new_asm_operands_vec,
3092 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3093 ASM_OPERANDS_SOURCE_FILE (x),
3094 ASM_OPERANDS_SOURCE_LINE (x));
08a9dd06 3095 new->volatil = x->volatil;
3096 return new;
3097 }
3098
3099 case SET:
3100 /* Check for setting a register that we know about. */
3101 if (GET_CODE (SET_DEST (x)) == REG)
3102 {
3103 /* See if this is setting the replacement register for an
c8ad158d 3104 elimination.
08a9dd06 3105
ecbd56da 3106 If DEST is the hard frame pointer, we do nothing because we
3107 assume that all assignments to the frame pointer are for
3108 non-local gotos and are being done at a time when they are valid
3109 and do not disturb anything else. Some machines want to
3110 eliminate a fake argument pointer (or even a fake frame pointer)
3111 with either the real frame or the stack pointer. Assignments to
3112 the hard frame pointer must not prevent this elimination. */
08a9dd06 3113
3114 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3115 ep++)
3116 if (ep->to_rtx == SET_DEST (x)
ecbd56da 3117 && SET_DEST (x) != hard_frame_pointer_rtx)
08a9dd06 3118 {
4bbea254 3119 /* If it is being incremented, adjust the offset. Otherwise,
08a9dd06 3120 this elimination can't be done. */
3121 rtx src = SET_SRC (x);
3122
3123 if (GET_CODE (src) == PLUS
3124 && XEXP (src, 0) == SET_DEST (x)
3125 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3126 ep->offset -= INTVAL (XEXP (src, 1));
3127 else
3128 ep->can_eliminate = 0;
3129 }
3130
3131 /* Now check to see we are assigning to a register that can be
3132 eliminated. If so, it must be as part of a PARALLEL, since we
3133 will not have been called if this is a single SET. So indicate
3134 that we can no longer eliminate this reg. */
3135 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3136 ep++)
3137 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3138 ep->can_eliminate = 0;
3139 }
3140
3141 /* Now avoid the loop below in this common case. */
3142 {
6182a80f 3143 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3144 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
08a9dd06 3145
b8b76680 3146 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
08a9dd06 3147 write a CLOBBER insn. */
3148 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
f8898b80 3149 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3150 && GET_CODE (insn) != INSN_LIST)
941522d6 3151 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
08a9dd06 3152
3153 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
941522d6 3154 return gen_rtx_SET (VOIDmode, new0, new1);
08a9dd06 3155 }
3156
3157 return x;
3158
3159 case MEM:
0dbd1c74 3160 /* This is only for the benefit of the debugging backends, which call
3161 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3162 removed after CSE. */
3163 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
6182a80f 3164 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
0dbd1c74 3165
08a9dd06 3166 /* Our only special processing is to pass the mode of the MEM to our
3167 recursive call and copy the flags. While we are here, handle this
3168 case more efficiently. */
6182a80f 3169 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
08a9dd06 3170 if (new != XEXP (x, 0))
3171 {
941522d6 3172 new = gen_rtx_MEM (GET_MODE (x), new);
08a9dd06 3173 new->volatil = x->volatil;
3174 new->unchanging = x->unchanging;
3175 new->in_struct = x->in_struct;
3176 return new;
3177 }
3178 else
3179 return x;
0dbd1c74 3180
3181 default:
3182 break;
08a9dd06 3183 }
3184
3185 /* Process each of our operands recursively. If any have changed, make a
3186 copy of the rtx. */
3187 fmt = GET_RTX_FORMAT (code);
3188 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3189 {
3190 if (*fmt == 'e')
3191 {
6182a80f 3192 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
08a9dd06 3193 if (new != XEXP (x, i) && ! copied)
3194 {
3195 rtx new_x = rtx_alloc (code);
748e6d74 3196 bcopy ((char *) x, (char *) new_x,
3197 (sizeof (*new_x) - sizeof (new_x->fld)
3198 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
08a9dd06 3199 x = new_x;
3200 copied = 1;
3201 }
3202 XEXP (x, i) = new;
3203 }
3204 else if (*fmt == 'E')
3205 {
3206 int copied_vec = 0;
3207 for (j = 0; j < XVECLEN (x, i); j++)
3208 {
6182a80f 3209 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
08a9dd06 3210 if (new != XVECEXP (x, i, j) && ! copied_vec)
3211 {
55b5c765 3212 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3213 XVEC (x, i)->elem);
08a9dd06 3214 if (! copied)
3215 {
3216 rtx new_x = rtx_alloc (code);
748e6d74 3217 bcopy ((char *) x, (char *) new_x,
3218 (sizeof (*new_x) - sizeof (new_x->fld)
3219 + (sizeof (new_x->fld[0])
3220 * GET_RTX_LENGTH (code))));
08a9dd06 3221 x = new_x;
3222 copied = 1;
3223 }
3224 XVEC (x, i) = new_v;
3225 copied_vec = 1;
3226 }
3227 XVECEXP (x, i, j) = new;
3228 }
3229 }
3230 }
3231
3232 return x;
3233}
3234\f
3235/* Scan INSN and eliminate all eliminable registers in it.
3236
3237 If REPLACE is nonzero, do the replacement destructively. Also
3238 delete the insn as dead it if it is setting an eliminable register.
3239
3240 If REPLACE is zero, do all our allocations in reload_obstack.
3241
3242 If no eliminations were done and this insn doesn't require any elimination
3243 processing (these are not identical conditions: it might be updating sp,
3244 but not referencing fp; this needs to be seen during reload_as_needed so
3245 that the offset between fp and sp can be taken into consideration), zero
3246 is returned. Otherwise, 1 is returned. */
3247
3248static int
3249eliminate_regs_in_insn (insn, replace)
3250 rtx insn;
3251 int replace;
3252{
3253 rtx old_body = PATTERN (insn);
d92d1db8 3254 rtx old_set = single_set (insn);
08a9dd06 3255 rtx new_body;
3256 int val = 0;
3257 struct elim_table *ep;
3258
3259 if (! replace)
3260 push_obstacks (&reload_obstack, &reload_obstack);
3261
d92d1db8 3262 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3263 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
08a9dd06 3264 {
3265 /* Check for setting an eliminable register. */
3266 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
d92d1db8 3267 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
08a9dd06 3268 {
14d6e77b 3269#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3270 /* If this is setting the frame pointer register to the
3271 hardware frame pointer register and this is an elimination
3272 that will be done (tested above), this insn is really
3273 adjusting the frame pointer downward to compensate for
3274 the adjustment done before a nonlocal goto. */
3275 if (ep->from == FRAME_POINTER_REGNUM
3276 && ep->to == HARD_FRAME_POINTER_REGNUM)
3277 {
3278 rtx src = SET_SRC (old_set);
3c1d7436 3279 int offset = 0, ok = 0;
ec6d6e82 3280 rtx prev_insn, prev_set;
14d6e77b 3281
3282 if (src == ep->to_rtx)
3283 offset = 0, ok = 1;
3284 else if (GET_CODE (src) == PLUS
a46ae5f9 3285 && GET_CODE (XEXP (src, 0)) == CONST_INT
3286 && XEXP (src, 1) == ep->to_rtx)
14d6e77b 3287 offset = INTVAL (XEXP (src, 0)), ok = 1;
a46ae5f9 3288 else if (GET_CODE (src) == PLUS
3289 && GET_CODE (XEXP (src, 1)) == CONST_INT
3290 && XEXP (src, 0) == ep->to_rtx)
3291 offset = INTVAL (XEXP (src, 1)), ok = 1;
ec6d6e82 3292 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3293 && (prev_set = single_set (prev_insn)) != 0
3294 && rtx_equal_p (SET_DEST (prev_set), src))
3295 {
3296 src = SET_SRC (prev_set);
3297 if (src == ep->to_rtx)
3298 offset = 0, ok = 1;
3299 else if (GET_CODE (src) == PLUS
3300 && GET_CODE (XEXP (src, 0)) == CONST_INT
3301 && XEXP (src, 1) == ep->to_rtx)
3302 offset = INTVAL (XEXP (src, 0)), ok = 1;
3303 else if (GET_CODE (src) == PLUS
3304 && GET_CODE (XEXP (src, 1)) == CONST_INT
3305 && XEXP (src, 0) == ep->to_rtx)
3306 offset = INTVAL (XEXP (src, 1)), ok = 1;
3307 }
14d6e77b 3308
3309 if (ok)
3310 {
3311 if (replace)
3312 {
3313 rtx src
3314 = plus_constant (ep->to_rtx, offset - ep->offset);
3315
3316 /* First see if this insn remains valid when we
3317 make the change. If not, keep the INSN_CODE
3318 the same and let reload fit it up. */
3319 validate_change (insn, &SET_SRC (old_set), src, 1);
3320 validate_change (insn, &SET_DEST (old_set),
3321 ep->to_rtx, 1);
3322 if (! apply_change_group ())
3323 {
3324 SET_SRC (old_set) = src;
3325 SET_DEST (old_set) = ep->to_rtx;
3326 }
3327 }
3328
3329 val = 1;
3330 goto done;
3331 }
3332 }
3333#endif
3334
08a9dd06 3335 /* In this case this insn isn't serving a useful purpose. We
3336 will delete it in reload_as_needed once we know that this
3337 elimination is, in fact, being done.
3338
b090827b 3339 If REPLACE isn't set, we can't delete this insn, but needn't
08a9dd06 3340 process it since it won't be used unless something changes. */
3341 if (replace)
3342 delete_dead_insn (insn);
3343 val = 1;
3344 goto done;
3345 }
3346
3347 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3348 in the insn is the negative of the offset in FROM. Substitute
3349 (set (reg) (reg to)) for the insn and change its code.
3350
93c7b06e 3351 We have to do this here, rather than in eliminate_regs, so that we can
08a9dd06 3352 change the insn code. */
3353
d92d1db8 3354 if (GET_CODE (SET_SRC (old_set)) == PLUS
3355 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3356 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
08a9dd06 3357 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3358 ep++)
d92d1db8 3359 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
4e14202a 3360 && ep->can_eliminate)
08a9dd06 3361 {
4e14202a 3362 /* We must stop at the first elimination that will be used.
3363 If this one would replace the PLUS with a REG, do it
3364 now. Otherwise, quit the loop and let eliminate_regs
3365 do its normal replacement. */
d92d1db8 3366 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
4e14202a 3367 {
d92d1db8 3368 /* We assume here that we don't need a PARALLEL of
3369 any CLOBBERs for this assignment. There's not
3370 much we can do if we do need it. */
941522d6 3371 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3372 SET_DEST (old_set),
3373 ep->to_rtx);
4e14202a 3374 INSN_CODE (insn) = -1;
3375 val = 1;
3376 goto done;
3377 }
3378
3379 break;
08a9dd06 3380 }
3381 }
3382
3383 old_asm_operands_vec = 0;
3384
3385 /* Replace the body of this insn with a substituted form. If we changed
afca26e1 3386 something, return non-zero.
08a9dd06 3387
3388 If we are replacing a body that was a (set X (plus Y Z)), try to
3389 re-recognize the insn. We do this in case we had a simple addition
3390 but now can do this as a load-address. This saves an insn in this
a92771b8 3391 common case. */
08a9dd06 3392
6182a80f 3393 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
08a9dd06 3394 if (new_body != old_body)
3395 {
721e78c7 3396 /* If we aren't replacing things permanently and we changed something,
3397 make another copy to ensure that all the RTL is new. Otherwise
3398 things can go wrong if find_reload swaps commutative operands
a92771b8 3399 and one is inside RTL that has been copied while the other is not. */
721e78c7 3400
7323cc19 3401 /* Don't copy an asm_operands because (1) there's no need and (2)
3402 copy_rtx can't do it properly when there are multiple outputs. */
df70356d 3403 if (! replace && asm_noperands (old_body) < 0)
721e78c7 3404 new_body = copy_rtx (new_body);
3405
d92d1db8 3406 /* If we had a move insn but now we don't, rerecognize it. This will
3407 cause spurious re-recognition if the old move had a PARALLEL since
3408 the new one still will, but we can't call single_set without
3409 having put NEW_BODY into the insn and the re-recognition won't
3410 hurt in this rare case. */
3411 if (old_set != 0
3412 && ((GET_CODE (SET_SRC (old_set)) == REG
3413 && (GET_CODE (new_body) != SET
3414 || GET_CODE (SET_SRC (new_body)) != REG))
3415 /* If this was a load from or store to memory, compare
3416 the MEM in recog_operand to the one in the insn. If they
3417 are not equal, then rerecognize the insn. */
3418 || (old_set != 0
3419 && ((GET_CODE (SET_SRC (old_set)) == MEM
3420 && SET_SRC (old_set) != recog_operand[1])
3421 || (GET_CODE (SET_DEST (old_set)) == MEM
3422 && SET_DEST (old_set) != recog_operand[0])))
3423 /* If this was an add insn before, rerecognize. */
3424 || GET_CODE (SET_SRC (old_set)) == PLUS))
eb77fc03 3425 {
3426 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
5be4b488 3427 /* If recognition fails, store the new body anyway.
3428 It's normal to have recognition failures here
3429 due to bizarre memory addresses; reloading will fix them. */
3430 PATTERN (insn) = new_body;
eb77fc03 3431 }
5be4b488 3432 else
08a9dd06 3433 PATTERN (insn) = new_body;
3434
08a9dd06 3435 val = 1;
3436 }
c8ad158d 3437
93c7b06e 3438 /* Loop through all elimination pairs. See if any have changed.
ca9902fc 3439
08a9dd06 3440 We also detect a cases where register elimination cannot be done,
3441 namely, if a register would be both changed and referenced outside a MEM
3442 in the resulting insn since such an insn is often undefined and, even if
3443 not, we cannot know what meaning will be given to it. Note that it is
3444 valid to have a register used in an address in an insn that changes it
3445 (presumably with a pre- or post-increment or decrement).
3446
3447 If anything changes, return nonzero. */
3448
08a9dd06 3449 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3450 {
3451 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3452 ep->can_eliminate = 0;
3453
3454 ep->ref_outside_mem = 0;
3455
3456 if (ep->previous_offset != ep->offset)
3457 val = 1;
08a9dd06 3458 }
3459
3460 done:
c3418f42 3461 /* If we changed something, perform elimination in REG_NOTES. This is
afca26e1 3462 needed even when REPLACE is zero because a REG_DEAD note might refer
3463 to a register that we eliminate and could cause a different number
3464 of spill registers to be needed in the final reload pass than in
3465 the pre-passes. */
1d5f8a5b 3466 if (val && REG_NOTES (insn) != 0)
6182a80f 3467 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
afca26e1 3468
08a9dd06 3469 if (! replace)
3470 pop_obstacks ();
3471
3472 return val;
3473}
3474
93c7b06e 3475/* Loop through all elimination pairs.
3476 Recalculate the number not at initial offset.
3477
3478 Compute the maximum offset (minimum offset if the stack does not
3479 grow downward) for each elimination pair. */
3480
3481static void
3482update_eliminable_offsets ()
3483{
3484 struct elim_table *ep;
3485
3486 num_not_at_initial_offset = 0;
3487 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3488 {
3489 ep->previous_offset = ep->offset;
3490 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3491 num_not_at_initial_offset++;
93c7b06e 3492 }
3493}
3494
08a9dd06 3495/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3496 replacement we currently believe is valid, mark it as not eliminable if X
3497 modifies DEST in any way other than by adding a constant integer to it.
3498
3499 If DEST is the frame pointer, we do nothing because we assume that
ecbd56da 3500 all assignments to the hard frame pointer are nonlocal gotos and are being
3501 done at a time when they are valid and do not disturb anything else.
08a9dd06 3502 Some machines want to eliminate a fake argument pointer with either the
ecbd56da 3503 frame or stack pointer. Assignments to the hard frame pointer must not
3504 prevent this elimination.
08a9dd06 3505
3506 Called via note_stores from reload before starting its passes to scan
3507 the insns of the function. */
3508
3509static void
3510mark_not_eliminable (dest, x)
3511 rtx dest;
3512 rtx x;
3513{
274c11d8 3514 register unsigned int i;
08a9dd06 3515
3516 /* A SUBREG of a hard register here is just changing its mode. We should
3517 not see a SUBREG of an eliminable hard register, but check just in
3518 case. */
3519 if (GET_CODE (dest) == SUBREG)
3520 dest = SUBREG_REG (dest);
3521
ecbd56da 3522 if (dest == hard_frame_pointer_rtx)
08a9dd06 3523 return;
3524
3525 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3526 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3527 && (GET_CODE (x) != SET
3528 || GET_CODE (SET_SRC (x)) != PLUS
3529 || XEXP (SET_SRC (x), 0) != dest
3530 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3531 {
3532 reg_eliminate[i].can_eliminate_previous
3533 = reg_eliminate[i].can_eliminate = 0;
3534 num_eliminable--;
3535 }
3536}
9d24e570 3537
8ad9ded8 3538/* Verify that the initial elimination offsets did not change since the
3539 last call to set_initial_elim_offsets. This is used to catch cases
3540 where something illegal happened during reload_as_needed that could
3541 cause incorrect code to be generated if we did not check for it. */
3542static void
3543verify_initial_elim_offsets ()
3544{
3545 int t;
3546
3547#ifdef ELIMINABLE_REGS
3548 struct elim_table *ep;
3549
3550 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3551 {
3552 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3553 if (t != ep->initial_offset)
3554 abort ();
3555 }
3556#else
3557 INITIAL_FRAME_POINTER_OFFSET (t);
3558 if (t != reg_eliminate[0].initial_offset)
3559 abort ();
3560#endif
3561}
3562
9d24e570 3563/* Reset all offsets on eliminable registers to their initial values. */
3564static void
3565set_initial_elim_offsets ()
3566{
f87f6d5d 3567 struct elim_table *ep = reg_eliminate;
9d24e570 3568
3569#ifdef ELIMINABLE_REGS
f87f6d5d 3570 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
9d24e570 3571 {
3572 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
f87f6d5d 3573 ep->previous_offset = ep->offset = ep->initial_offset;
9d24e570 3574 }
3575#else
f87f6d5d 3576 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3577 ep->previous_offset = ep->offset = ep->initial_offset;
9d24e570 3578#endif
3579
3580 num_not_at_initial_offset = 0;
f87f6d5d 3581}
9d24e570 3582
f87f6d5d 3583/* Initialize the known label offsets.
3584 Set a known offset for each forced label to be at the initial offset
3585 of each elimination. We do this because we assume that all
3586 computed jumps occur from a location where each elimination is
3587 at its initial offset.
3588 For all other labels, show that we don't know the offsets. */
9d24e570 3589
f87f6d5d 3590static void
3591set_initial_label_offsets ()
3592{
3593 rtx x;
3594 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
9d24e570 3595
3596 for (x = forced_labels; x; x = XEXP (x, 1))
3597 if (XEXP (x, 0))
3598 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3599}
3600
f87f6d5d 3601/* Set all elimination offsets to the known values for the code label given
3602 by INSN. */
3603static void
3604set_offsets_for_label (insn)
3605 rtx insn;
3606{
3c1d7436 3607 unsigned int i;
f87f6d5d 3608 int label_nr = CODE_LABEL_NUMBER (insn);
3609 struct elim_table *ep;
3610
3611 num_not_at_initial_offset = 0;
3612 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3613 {
3614 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3615 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3616 num_not_at_initial_offset++;
3617 }
3618}
3619
9d24e570 3620/* See if anything that happened changes which eliminations are valid.
3621 For example, on the Sparc, whether or not the frame pointer can
3622 be eliminated can depend on what registers have been used. We need
3623 not check some conditions again (such as flag_omit_frame_pointer)
3624 since they can't have changed. */
3625
3626static void
3627update_eliminables (pset)
3628 HARD_REG_SET *pset;
3629{
3630#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3631 int previous_frame_pointer_needed = frame_pointer_needed;
3632#endif
3633 struct elim_table *ep;
3634
3635 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3636 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3637#ifdef ELIMINABLE_REGS
3638 || ! CAN_ELIMINATE (ep->from, ep->to)
3639#endif
3640 )
3641 ep->can_eliminate = 0;
3642
3643 /* Look for the case where we have discovered that we can't replace
3644 register A with register B and that means that we will now be
3645 trying to replace register A with register C. This means we can
3646 no longer replace register C with register B and we need to disable
3647 such an elimination, if it exists. This occurs often with A == ap,
3648 B == sp, and C == fp. */
3649
3650 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3651 {
3652 struct elim_table *op;
3653 register int new_to = -1;
3654
3655 if (! ep->can_eliminate && ep->can_eliminate_previous)
3656 {
3657 /* Find the current elimination for ep->from, if there is a
3658 new one. */
3659 for (op = reg_eliminate;
3660 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3661 if (op->from == ep->from && op->can_eliminate)
3662 {
3663 new_to = op->to;
3664 break;
3665 }
3666
3667 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3668 disable it. */
3669 for (op = reg_eliminate;
3670 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3671 if (op->from == new_to && op->to == ep->to)
3672 op->can_eliminate = 0;
3673 }
3674 }
3675
3676 /* See if any registers that we thought we could eliminate the previous
3677 time are no longer eliminable. If so, something has changed and we
3678 must spill the register. Also, recompute the number of eliminable
3679 registers and see if the frame pointer is needed; it is if there is
3680 no elimination of the frame pointer that we can perform. */
3681
3682 frame_pointer_needed = 1;
3683 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3684 {
3685 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3686 && ep->to != HARD_FRAME_POINTER_REGNUM)
3687 frame_pointer_needed = 0;
3688
3689 if (! ep->can_eliminate && ep->can_eliminate_previous)
3690 {
3691 ep->can_eliminate_previous = 0;
3692 SET_HARD_REG_BIT (*pset, ep->from);
3693 num_eliminable--;
3694 }
3695 }
3696
3697#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3698 /* If we didn't need a frame pointer last time, but we do now, spill
3699 the hard frame pointer. */
3700 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3701 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3702#endif
3703}
3704
3705/* Initialize the table of registers to eliminate. */
3706static void
3707init_elim_table ()
3708{
3709 struct elim_table *ep;
911d0ac2 3710#ifdef ELIMINABLE_REGS
3711 struct elim_table_1 *ep1;
3712#endif
9d24e570 3713
911d0ac2 3714 if (!reg_eliminate)
3715 {
3716 reg_eliminate = (struct elim_table *)
3717 xmalloc(sizeof(struct elim_table) * NUM_ELIMINABLE_REGS);
3718 bzero ((PTR) reg_eliminate,
3719 sizeof(struct elim_table) * NUM_ELIMINABLE_REGS);
3720 }
3721
9d24e570 3722 /* Does this function require a frame pointer? */
3723
3724 frame_pointer_needed = (! flag_omit_frame_pointer
3725#ifdef EXIT_IGNORE_STACK
3726 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3727 and restore sp for alloca. So we can't eliminate
3728 the frame pointer in that case. At some point,
3729 we should improve this by emitting the
3730 sp-adjusting insns for this case. */
3731 || (current_function_calls_alloca
3732 && EXIT_IGNORE_STACK)
3733#endif
3734 || FRAME_POINTER_REQUIRED);
3735
3736 num_eliminable = 0;
3737
3738#ifdef ELIMINABLE_REGS
911d0ac2 3739 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3740 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
9d24e570 3741 {
911d0ac2 3742 ep->from = ep1->from;
3743 ep->to = ep1->to;
9d24e570 3744 ep->can_eliminate = ep->can_eliminate_previous
3745 = (CAN_ELIMINATE (ep->from, ep->to)
3746 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3747 }
3748#else
911d0ac2 3749 reg_eliminate[0].from = reg_eliminate_1[0].from;
3750 reg_eliminate[0].to = reg_eliminate_1[0].to;
9d24e570 3751 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3752 = ! frame_pointer_needed;
3753#endif
3754
3755 /* Count the number of eliminable registers and build the FROM and TO
3756 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3757 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3758 We depend on this. */
3759 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3760 {
3761 num_eliminable += ep->can_eliminate;
3762 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3763 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3764 }
3765}
08a9dd06 3766\f
3767/* Kick all pseudos out of hard register REGNO.
08a9dd06 3768 If DUMPFILE is nonzero, log actions taken on that file.
3769
3770 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3771 because we found we can't eliminate some register. In the case, no pseudos
3772 are allowed to be in the register, even if they are only in a block that
3773 doesn't require spill registers, unlike the case when we are spilling this
3774 hard reg to produce another spill register.
3775
3776 Return nonzero if any pseudos needed to be kicked out. */
3777
dab171c5 3778static void
3779spill_hard_reg (regno, dumpfile, cant_eliminate)
08a9dd06 3780 register int regno;
08a9dd06 3781 FILE *dumpfile;
3782 int cant_eliminate;
3783{
08a9dd06 3784 register int i;
3785
0394c2e6 3786 if (cant_eliminate)
dab171c5 3787 {
3788 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3789 regs_ever_live[regno] = 1;
3790 }
0394c2e6 3791
08a9dd06 3792 /* Spill every pseudo reg that was allocated to this reg
3793 or to something that overlaps this reg. */
3794
3795 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3796 if (reg_renumber[i] >= 0
3797 && reg_renumber[i] <= regno
c8ad158d 3798 && (reg_renumber[i]
08a9dd06 3799 + HARD_REGNO_NREGS (reg_renumber[i],
3800 PSEUDO_REGNO_MODE (i))
3801 > regno))
dab171c5 3802 SET_REGNO_REG_SET (spilled_pseudos, i);
3803}
08a9dd06 3804
dab171c5 3805/* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3806 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3807static void
3808ior_hard_reg_set (set1, set2)
3809 HARD_REG_SET *set1, *set2;
3810{
3811 IOR_HARD_REG_SET (*set1, *set2);
3812}
3813
3814/* After find_reload_regs has been run for all insn that need reloads,
3815 and/or spill_hard_regs was called, this function is used to actually
3816 spill pseudo registers and try to reallocate them. It also sets up the
3817 spill_regs array for use by choose_reload_regs. */
c8ad158d 3818
dab171c5 3819static int
3820finish_spills (global, dumpfile)
3821 int global;
3822 FILE *dumpfile;
3823{
3824 struct insn_chain *chain;
3825 int something_changed = 0;
3826 int i;
3827
3828 /* Build the spill_regs array for the function. */
3829 /* If there are some registers still to eliminate and one of the spill regs
3830 wasn't ever used before, additional stack space may have to be
3831 allocated to store this register. Thus, we may have changed the offset
3832 between the stack and frame pointers, so mark that something has changed.
08a9dd06 3833
dab171c5 3834 One might think that we need only set VAL to 1 if this is a call-used
3835 register. However, the set of registers that must be saved by the
3836 prologue is not identical to the call-used set. For example, the
3837 register used by the call insn for the return PC is a call-used register,
3838 but must be saved by the prologue. */
3839
3840 n_spills = 0;
3841 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3842 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3843 {
3844 spill_reg_order[i] = n_spills;
3845 spill_regs[n_spills++] = i;
3846 if (num_eliminable && ! regs_ever_live[i])
3847 something_changed = 1;
3848 regs_ever_live[i] = 1;
3849 }
3850 else
3851 spill_reg_order[i] = -1;
3852
3853 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3854 if (REGNO_REG_SET_P (spilled_pseudos, i))
3855 {
3856 /* Record the current hard register the pseudo is allocated to in
3857 pseudo_previous_regs so we avoid reallocating it to the same
3858 hard reg in a later pass. */
3859 if (reg_renumber[i] < 0)
3860 abort ();
3861 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
08a9dd06 3862 /* Mark it as no longer having a hard register home. */
3863 reg_renumber[i] = -1;
3864 /* We will need to scan everything again. */
3865 something_changed = 1;
dab171c5 3866 }
590ec786 3867
dab171c5 3868 /* Retry global register allocation if possible. */
3869 if (global)
3870 {
3871 bzero ((char *) pseudo_forbidden_regs, max_regno * sizeof (HARD_REG_SET));
3872 /* For every insn that needs reloads, set the registers used as spill
3873 regs in pseudo_forbidden_regs for every pseudo live across the
3874 insn. */
3875 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3876 {
3877 EXECUTE_IF_SET_IN_REG_SET
3878 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
3879 {
3880 ior_hard_reg_set (pseudo_forbidden_regs + i,
3881 &chain->used_spill_regs);
3882 });
3883 EXECUTE_IF_SET_IN_REG_SET
3884 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
3885 {
3886 ior_hard_reg_set (pseudo_forbidden_regs + i,
3887 &chain->used_spill_regs);
3888 });
3889 }
590ec786 3890
dab171c5 3891 /* Retry allocating the spilled pseudos. For each reg, merge the
3892 various reg sets that indicate which hard regs can't be used,
3893 and call retry_global_alloc.
3894 We change spill_pseudos here to only contain pseudos that did not
3895 get a new hard register. */
3896 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3897 if (reg_old_renumber[i] != reg_renumber[i])
08a9dd06 3898 {
dab171c5 3899 HARD_REG_SET forbidden;
3900 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3901 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3902 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3903 retry_global_alloc (i, forbidden);
3904 if (reg_renumber[i] >= 0)
3905 CLEAR_REGNO_REG_SET (spilled_pseudos, i);
08a9dd06 3906 }
dab171c5 3907 }
590ec786 3908
dab171c5 3909 /* Fix up the register information in the insn chain.
3910 This involves deleting those of the spilled pseudos which did not get
3911 a new hard register home from the live_{before,after} sets. */
590ec786 3912 for (chain = reload_insn_chain; chain; chain = chain->next)
3913 {
dab171c5 3914 HARD_REG_SET used_by_pseudos;
3915 HARD_REG_SET used_by_pseudos2;
3916
590ec786 3917 AND_COMPL_REG_SET (chain->live_before, spilled_pseudos);
3918 AND_COMPL_REG_SET (chain->live_after, spilled_pseudos);
dab171c5 3919
3920 /* Mark any unallocated hard regs as available for spills. That
3921 makes inheritance work somewhat better. */
3922 if (chain->need_reload)
3923 {
3924 REG_SET_TO_HARD_REG_SET (used_by_pseudos, chain->live_before);
3925 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, chain->live_after);
3926 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3927
3928 /* Save the old value for the sanity test below. */
3929 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3930
3931 compute_use_by_pseudos (&used_by_pseudos, chain->live_before);
3932 compute_use_by_pseudos (&used_by_pseudos, chain->live_after);
3933 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3934 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3935
3936 /* Make sure we only enlarge the set. */
3937 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3938 abort ();
3939 ok:;
3940 }
590ec786 3941 }
dab171c5 3942
3943 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3944 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3945 {
3946 int regno = reg_renumber[i];
3947 if (reg_old_renumber[i] == regno)
3948 continue;
3949
3950 alter_reg (i, reg_old_renumber[i]);
3951 reg_old_renumber[i] = regno;
3952 if (dumpfile)
3953 {
3954 if (regno == -1)
3955 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3956 else
3957 fprintf (dumpfile, " Register %d now in %d.\n\n",
3958 i, reg_renumber[i]);
3959 }
3960 }
3961
3962 return something_changed;
590ec786 3963}
08a9dd06 3964\f
e715e2a3 3965/* Find all paradoxical subregs within X and update reg_max_ref_width.
3966 Also mark any hard registers used to store user variables as
3967 forbidden from being used for spill registers. */
08a9dd06 3968
3969static void
3970scan_paradoxical_subregs (x)
3971 register rtx x;
3972{
3973 register int i;
3974 register char *fmt;
3975 register enum rtx_code code = GET_CODE (x);
3976
3977 switch (code)
3978 {
e715e2a3 3979 case REG:
dab171c5 3980#if 0
0dbd1c74 3981 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
2fcd559a 3982 && REG_USERVAR_P (x))
dab171c5 3983 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
3984#endif
e715e2a3 3985 return;
3986
08a9dd06 3987 case CONST_INT:
3988 case CONST:
3989 case SYMBOL_REF:
3990 case LABEL_REF:
3991 case CONST_DOUBLE:
3992 case CC0:
3993 case PC:
08a9dd06 3994 case USE:
3995 case CLOBBER:
3996 return;
3997
3998 case SUBREG:
3999 if (GET_CODE (SUBREG_REG (x)) == REG
4000 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4001 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4002 = GET_MODE_SIZE (GET_MODE (x));
4003 return;
0dbd1c74 4004
4005 default:
4006 break;
08a9dd06 4007 }
4008
4009 fmt = GET_RTX_FORMAT (code);
4010 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4011 {
4012 if (fmt[i] == 'e')
4013 scan_paradoxical_subregs (XEXP (x, i));
4014 else if (fmt[i] == 'E')
4015 {
4016 register int j;
4017 for (j = XVECLEN (x, i) - 1; j >=0; j--)
4018 scan_paradoxical_subregs (XVECEXP (x, i, j));
4019 }
4020 }
4021}
4022\f
08a9dd06 4023static int
04b08097 4024hard_reg_use_compare (p1p, p2p)
dab171c5 4025 const GENERIC_PTR p1p;
4026 const GENERIC_PTR p2p;
4027{
4028 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p;
4029 struct hard_reg_n_uses *p2 = (struct hard_reg_n_uses *)p2p;
4030 int bad1 = TEST_HARD_REG_BIT (bad_spill_regs, p1->regno);
4031 int bad2 = TEST_HARD_REG_BIT (bad_spill_regs, p2->regno);
4032 if (bad1 && bad2)
4033 return p1->regno - p2->regno;
4034 if (bad1)
4035 return 1;
4036 if (bad2)
4037 return -1;
4038 if (p1->uses > p2->uses)
4039 return 1;
4040 if (p1->uses < p2->uses)
4041 return -1;
08a9dd06 4042 /* If regs are equally good, sort by regno,
4043 so that the results of qsort leave nothing to chance. */
4044 return p1->regno - p2->regno;
4045}
4046
dab171c5 4047/* Used for communication between order_regs_for_reload and count_pseudo.
4048 Used to avoid counting one pseudo twice. */
4049static regset pseudos_counted;
4050
4051/* Update the costs in N_USES, considering that pseudo REG is live. */
4052static void
4053count_pseudo (n_uses, reg)
4054 struct hard_reg_n_uses *n_uses;
4055 int reg;
4056{
4057 int r = reg_renumber[reg];
4058 int nregs;
4059
4060 if (REGNO_REG_SET_P (pseudos_counted, reg))
4061 return;
4062 SET_REGNO_REG_SET (pseudos_counted, reg);
4063
4064 if (r < 0)
4065 abort ();
4066
4067 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
4068 while (nregs-- > 0)
4069 n_uses[r++].uses += REG_N_REFS (reg);
4070}
08a9dd06 4071/* Choose the order to consider regs for use as reload registers
4072 based on how much trouble would be caused by spilling one.
4073 Store them in order of decreasing preference in potential_reload_regs. */
4074
4075static void
dab171c5 4076order_regs_for_reload (chain)
4077 struct insn_chain *chain;
08a9dd06 4078{
dab171c5 4079 register int i;
08a9dd06 4080 register int o = 0;
08a9dd06 4081 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
4082
dab171c5 4083 pseudos_counted = ALLOCA_REG_SET ();
08a9dd06 4084
dab171c5 4085 COPY_HARD_REG_SET (bad_spill_regs, bad_spill_regs_global);
08a9dd06 4086
4087 /* Count number of uses of each hard reg by pseudo regs allocated to it
4088 and then order them by decreasing use. */
4089
4090 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4091 {
dab171c5 4092 int j;
4093
08a9dd06 4094 hard_reg_n_uses[i].regno = i;
dab171c5 4095 hard_reg_n_uses[i].uses = 0;
08a9dd06 4096
dab171c5 4097 /* Test the various reasons why we can't use a register for
4098 spilling in this insn. */
4099 if (fixed_regs[i]
4100 || REGNO_REG_SET_P (chain->live_before, i)
4101 || REGNO_REG_SET_P (chain->live_after, i))
08a9dd06 4102 {
08a9dd06 4103 SET_HARD_REG_BIT (bad_spill_regs, i);
dab171c5 4104 continue;
08a9dd06 4105 }
08a9dd06 4106
dab171c5 4107 /* Now find out which pseudos are allocated to it, and update
4108 hard_reg_n_uses. */
4109 CLEAR_REG_SET (pseudos_counted);
4110
4111 EXECUTE_IF_SET_IN_REG_SET
4112 (chain->live_before, FIRST_PSEUDO_REGISTER, j,
4113 {
4114 count_pseudo (hard_reg_n_uses, j);
4115 });
4116 EXECUTE_IF_SET_IN_REG_SET
4117 (chain->live_after, FIRST_PSEUDO_REGISTER, j,
4118 {
4119 count_pseudo (hard_reg_n_uses, j);
4120 });
08a9dd06 4121 }
dab171c5 4122
4123 FREE_REG_SET (pseudos_counted);
08a9dd06 4124
4125 /* Prefer registers not so far used, for use in temporary loading.
4126 Among them, if REG_ALLOC_ORDER is defined, use that order.
4127 Otherwise, prefer registers not preserved by calls. */
4128
4129#ifdef REG_ALLOC_ORDER
4130 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4131 {
4132 int regno = reg_alloc_order[i];
4133
dab171c5 4134 if (hard_reg_n_uses[regno].uses == 0
4135 && ! TEST_HARD_REG_BIT (bad_spill_regs, regno))
08a9dd06 4136 potential_reload_regs[o++] = regno;
4137 }
4138#else
4139 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4140 {
dab171c5 4141 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i]
4142 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
08a9dd06 4143 potential_reload_regs[o++] = i;
4144 }
4145 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4146 {
dab171c5 4147 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i]
4148 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
08a9dd06 4149 potential_reload_regs[o++] = i;
4150 }
4151#endif
4152
4153 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
4154 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
4155
4156 /* Now add the regs that are already used,
4157 preferring those used less often. The fixed and otherwise forbidden
4158 registers will be at the end of this list. */
4159
4160 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
dab171c5 4161 if (hard_reg_n_uses[i].uses != 0
4162 && ! TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4163 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4165 if (TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
08a9dd06 4166 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4167}
4168\f
4169/* Reload pseudo-registers into hard regs around each insn as needed.
4170 Additional register load insns are output before the insn that needs it
4171 and perhaps store insns after insns that modify the reloaded pseudo reg.
4172
4173 reg_last_reload_reg and reg_reloaded_contents keep track of
a6f0d869 4174 which registers are already available in reload registers.
08a9dd06 4175 We update these for the reloads that we perform,
4176 as the insns are scanned. */
4177
4178static void
590ec786 4179reload_as_needed (live_known)
08a9dd06 4180 int live_known;
4181{
590ec786 4182 struct insn_chain *chain;
3c1d7436 4183#if defined (AUTO_INC_DEC) || defined (INSN_CLOBBERS_REGNO_P)
08a9dd06 4184 register int i;
3c1d7436 4185#endif
08a9dd06 4186 rtx x;
08a9dd06 4187
748e6d74 4188 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
4189 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
08a9dd06 4190 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
748e6d74 4191 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
08a9dd06 4192 reg_has_output_reload = (char *) alloca (max_regno);
a5e95c30 4193 CLEAR_HARD_REG_SET (reg_reloaded_valid);
08a9dd06 4194
f87f6d5d 4195 set_initial_elim_offsets ();
08a9dd06 4196
590ec786 4197 for (chain = reload_insn_chain; chain; chain = chain->next)
08a9dd06 4198 {
dab171c5 4199 rtx prev;
590ec786 4200 rtx insn = chain->insn;
4201 rtx old_next = NEXT_INSN (insn);
08a9dd06 4202
4203 /* If we pass a label, copy the offsets from the label information
4204 into the current offsets of each elimination. */
4205 if (GET_CODE (insn) == CODE_LABEL)
f87f6d5d 4206 set_offsets_for_label (insn);
08a9dd06 4207
4208 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4209 {
5f4f8eb2 4210 rtx oldpat = PATTERN (insn);
08a9dd06 4211
a22e7476 4212 /* If this is a USE and CLOBBER of a MEM, ensure that any
4213 references to eliminable registers have been removed. */
4214
4215 if ((GET_CODE (PATTERN (insn)) == USE
4216 || GET_CODE (PATTERN (insn)) == CLOBBER)
4217 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4218 XEXP (XEXP (PATTERN (insn), 0), 0)
4219 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
e87a9401 4220 GET_MODE (XEXP (PATTERN (insn), 0)),
6182a80f 4221 NULL_RTX);
a22e7476 4222
08a9dd06 4223 /* If we need to do register elimination processing, do so.
4224 This might delete the insn, in which case we are done. */
aa8d28af 4225 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
08a9dd06 4226 {
4227 eliminate_regs_in_insn (insn, 1);
4228 if (GET_CODE (insn) == NOTE)
93c7b06e 4229 {
4230 update_eliminable_offsets ();
4231 continue;
4232 }
08a9dd06 4233 }
4234
590ec786 4235 /* If need_elim is nonzero but need_reload is zero, one might think
4236 that we could simply set n_reloads to 0. However, find_reloads
4237 could have done some manipulation of the insn (such as swapping
4238 commutative operands), and these manipulations are lost during
4239 the first pass for every insn that needs register elimination.
4240 So the actions of find_reloads must be redone here. */
4241
dab171c5 4242 if (! chain->need_elim && ! chain->need_reload
4243 && ! chain->need_operand_change)
08a9dd06 4244 n_reloads = 0;
4245 /* First find the pseudo regs that must be reloaded for this insn.
4246 This info is returned in the tables reload_... (see reload.h).
4247 Also modify the body of INSN by substituting RELOAD
4248 rtx's for those pseudo regs. */
4249 else
4250 {
4251 bzero (reg_has_output_reload, max_regno);
4252 CLEAR_HARD_REG_SET (reg_is_output_reload);
4253
4254 find_reloads (insn, 1, spill_indirect_levels, live_known,
4255 spill_reg_order);
4256 }
4257
c2496df6 4258 if (num_eliminable && chain->need_elim)
93c7b06e 4259 update_eliminable_offsets ();
4260
08a9dd06 4261 if (n_reloads > 0)
4262 {
93c7b06e 4263 rtx next = NEXT_INSN (insn);
bb552490 4264 rtx p;
08a9dd06 4265
93c7b06e 4266 prev = PREV_INSN (insn);
4267
08a9dd06 4268 /* Now compute which reload regs to reload them into. Perhaps
4269 reusing reload regs from previous insns, or else output
4270 load insns to reload them. Maybe output store insns too.
4271 Record the choices of reload reg in reload_reg_rtx. */
dab171c5 4272 choose_reload_regs (chain);
08a9dd06 4273
81d0fbb3 4274 /* Merge any reloads that we didn't combine for fear of
4275 increasing the number of spill registers needed but now
4276 discover can be safely merged. */
2fcd559a 4277 if (SMALL_REGISTER_CLASSES)
4278 merge_assigned_reloads (insn);
81d0fbb3 4279
08a9dd06 4280 /* Generate the insns to reload operands into or out of
4281 their reload regs. */
590ec786 4282 emit_reload_insns (chain);
08a9dd06 4283
4284 /* Substitute the chosen reload regs from reload_reg_rtx
4285 into the insn's body (or perhaps into the bodies of other
4286 load and store insn that we just made for reloading
4287 and that we moved the structure into). */
4288 subst_reloads ();
bb552490 4289
4290 /* If this was an ASM, make sure that all the reload insns
4291 we have generated are valid. If not, give an error
4292 and delete them. */
4293
4294 if (asm_noperands (PATTERN (insn)) >= 0)
4295 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4296 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4297 && (recog_memoized (p) < 0
7f82be90 4298 || (extract_insn (p), ! constrain_operands (1))))
bb552490 4299 {
4300 error_for_asm (insn,
4301 "`asm' operand requires impossible reload");
4302 PUT_CODE (p, NOTE);
4303 NOTE_SOURCE_FILE (p) = 0;
4304 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4305 }
08a9dd06 4306 }
4307 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4308 is no longer validly lying around to save a future reload.
4309 Note that this does not detect pseudos that were reloaded
4310 for this insn in order to be stored in
4311 (obeying register constraints). That is correct; such reload
4312 registers ARE still valid. */
5f4f8eb2 4313 note_stores (oldpat, forget_old_reloads_1);
08a9dd06 4314
4315 /* There may have been CLOBBER insns placed after INSN. So scan
4316 between INSN and NEXT and use them to forget old reloads. */
590ec786 4317 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
08a9dd06 4318 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4319 note_stores (PATTERN (x), forget_old_reloads_1);
4320
4321#ifdef AUTO_INC_DEC
93c7b06e 4322 /* Likewise for regs altered by auto-increment in this insn.
4323 REG_INC notes have been changed by reloading:
4324 find_reloads_address_1 records substitutions for them,
4325 which have been performed by subst_reloads above. */
4326 for (i = n_reloads - 1; i >= 0; i--)
4327 {
4328 rtx in_reg = reload_in_reg[i];
4329 if (in_reg)
4330 {
4331 enum rtx_code code = GET_CODE (in_reg);
4332 /* PRE_INC / PRE_DEC will have the reload register ending up
4333 with the same value as the stack slot, but that doesn't
4334 hold true for POST_INC / POST_DEC. Either we have to
4335 convert the memory access to a true POST_INC / POST_DEC,
4336 or we can't use the reload register for inheritance. */
4337 if ((code == POST_INC || code == POST_DEC)
4338 && TEST_HARD_REG_BIT (reg_reloaded_valid,
c13446db 4339 REGNO (reload_reg_rtx[i]))
4340 /* Make sure it is the inc/dec pseudo, and not
4341 some other (e.g. output operand) pseudo. */
4342 && (reg_reloaded_contents[REGNO (reload_reg_rtx[i])]
4343 == REGNO (XEXP (in_reg, 0))))
4344
93c7b06e 4345 {
4346 rtx reload_reg = reload_reg_rtx[i];
4347 enum machine_mode mode = GET_MODE (reload_reg);
4348 int n = 0;
4349 rtx p;
4350
4351 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4352 {
4353 /* We really want to ignore REG_INC notes here, so
4354 use PATTERN (p) as argument to reg_set_p . */
4355 if (reg_set_p (reload_reg, PATTERN (p)))
4356 break;
4357 n = count_occurrences (PATTERN (p), reload_reg);
4358 if (! n)
4359 continue;
4360 if (n == 1)
420d3340 4361 {
4362 n = validate_replace_rtx (reload_reg,
4363 gen_rtx (code, mode,
4364 reload_reg),
4365 p);
4366
4367 /* We must also verify that the constraints
4368 are met after the replacement. */
4369 extract_insn (p);
4370 if (n)
4371 n = constrain_operands (1);
4372 else
4373 break;
4374
4375 /* If the constraints were not met, then
4376 undo the replacement. */
4377 if (!n)
4378 {
4379 validate_replace_rtx (gen_rtx (code, mode,
4380 reload_reg),
4381 reload_reg, p);
4382 break;
4383 }
4384
4385 }
93c7b06e 4386 break;
4387 }
4388 if (n == 1)
4389 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4390 REG_NOTES (p));
4391 else
4392 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX);
4393 }
4394 }
4395 }
4396#if 0 /* ??? Is this code obsolete now? Need to check carefully. */
08a9dd06 4397 /* Likewise for regs altered by auto-increment in this insn.
4398 But note that the reg-notes are not changed by reloading:
4399 they still contain the pseudo-regs, not the spill regs. */
4400 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4401 if (REG_NOTE_KIND (x) == REG_INC)
4402 {
4403 /* See if this pseudo reg was reloaded in this insn.
4404 If so, its last-reload info is still valid
4405 because it is based on this insn's reload. */
4406 for (i = 0; i < n_reloads; i++)
4407 if (reload_out[i] == XEXP (x, 0))
4408 break;
4409
ff25496d 4410 if (i == n_reloads)
b990dc72 4411 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
08a9dd06 4412 }
93c7b06e 4413#endif
08a9dd06 4414#endif
4415 }
4416 /* A reload reg's contents are unknown after a label. */
4417 if (GET_CODE (insn) == CODE_LABEL)
a5e95c30 4418 CLEAR_HARD_REG_SET (reg_reloaded_valid);
08a9dd06 4419
4420 /* Don't assume a reload reg is still good after a call insn
4421 if it is a call-used reg. */
81d0fbb3 4422 else if (GET_CODE (insn) == CALL_INSN)
a5e95c30 4423 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
08a9dd06 4424
4425 /* In case registers overlap, allow certain insns to invalidate
4426 particular hard registers. */
4427
4428#ifdef INSN_CLOBBERS_REGNO_P
a5e95c30 4429 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4430 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4431 && INSN_CLOBBERS_REGNO_P (insn, i))
4432 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
08a9dd06 4433#endif
4434
08a9dd06 4435#ifdef USE_C_ALLOCA
4436 alloca (0);
4437#endif
4438 }
4439}
4440
4441/* Discard all record of any value reloaded from X,
4442 or reloaded in X from someplace else;
4443 unless X is an output reload reg of the current insn.
4444
4445 X may be a hard reg (the reload reg)
4446 or it may be a pseudo reg that was reloaded from. */
4447
4448static void
b990dc72 4449forget_old_reloads_1 (x, ignored)
08a9dd06 4450 rtx x;
0e93a6ac 4451 rtx ignored ATTRIBUTE_UNUSED;
08a9dd06 4452{
4453 register int regno;
4454 int nr;
9cc47c53 4455 int offset = 0;
4456
4457 /* note_stores does give us subregs of hard regs. */
4458 while (GET_CODE (x) == SUBREG)
4459 {
4460 offset += SUBREG_WORD (x);
4461 x = SUBREG_REG (x);
4462 }
08a9dd06 4463
4464 if (GET_CODE (x) != REG)
4465 return;
4466
9cc47c53 4467 regno = REGNO (x) + offset;
08a9dd06 4468
4469 if (regno >= FIRST_PSEUDO_REGISTER)
4470 nr = 1;
4471 else
4472 {
4473 int i;
4474 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4475 /* Storing into a spilled-reg invalidates its contents.
4476 This can happen if a block-local pseudo is allocated to that reg
4477 and it wasn't spilled because this block's total need is 0.
4478 Then some insn might have an optional reload and use this reg. */
4479 for (i = 0; i < nr; i++)
a5e95c30 4480 /* But don't do this if the reg actually serves as an output
4481 reload reg in the current instruction. */
4482 if (n_reloads == 0
4483 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4484 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
08a9dd06 4485 }
4486
4487 /* Since value of X has changed,
4488 forget any value previously copied from it. */
4489
4490 while (nr-- > 0)
4491 /* But don't forget a copy if this is the output reload
4492 that establishes the copy's validity. */
4493 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4494 reg_last_reload_reg[regno + nr] = 0;
4495}
4496\f
4497/* For each reload, the mode of the reload register. */
4498static enum machine_mode reload_mode[MAX_RELOADS];
4499
4500/* For each reload, the largest number of registers it will require. */
4501static int reload_nregs[MAX_RELOADS];
4502
4503/* Comparison function for qsort to decide which of two reloads
4504 should be handled first. *P1 and *P2 are the reload numbers. */
4505
4506static int
04b08097 4507reload_reg_class_lower (r1p, r2p)
4508 const GENERIC_PTR r1p;
4509 const GENERIC_PTR r2p;
08a9dd06 4510{
04b08097 4511 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
08a9dd06 4512 register int t;
c8ad158d 4513
08a9dd06 4514 /* Consider required reloads before optional ones. */
4515 t = reload_optional[r1] - reload_optional[r2];
4516 if (t != 0)
4517 return t;
4518
4519 /* Count all solitary classes before non-solitary ones. */
4520 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4521 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4522 if (t != 0)
4523 return t;
4524
4525 /* Aside from solitaires, consider all multi-reg groups first. */
4526 t = reload_nregs[r2] - reload_nregs[r1];
4527 if (t != 0)
4528 return t;
4529
4530 /* Consider reloads in order of increasing reg-class number. */
4531 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4532 if (t != 0)
4533 return t;
4534
4535 /* If reloads are equally urgent, sort by reload number,
4536 so that the results of qsort leave nothing to chance. */
4537 return r1 - r2;
4538}
4539\f
4540/* The following HARD_REG_SETs indicate when each hard register is
4541 used for a reload of various parts of the current insn. */
4542
4543/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4544static HARD_REG_SET reload_reg_used;
81d0fbb3 4545/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4546static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
9e53d777 4547/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4548static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
81d0fbb3 4549/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4550static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
9e53d777 4551/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4552static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
81d0fbb3 4553/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4554static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4555/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4556static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
08a9dd06 4557/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4558static HARD_REG_SET reload_reg_used_in_op_addr;
58866c2c 4559/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4560static HARD_REG_SET reload_reg_used_in_op_addr_reload;
81d0fbb3 4561/* If reg is in use for a RELOAD_FOR_INSN reload. */
4562static HARD_REG_SET reload_reg_used_in_insn;
4563/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4564static HARD_REG_SET reload_reg_used_in_other_addr;
08a9dd06 4565
4566/* If reg is in use as a reload reg for any sort of reload. */
4567static HARD_REG_SET reload_reg_used_at_all;
4568
7ac01373 4569/* If reg is use as an inherited reload. We just mark the first register
4570 in the group. */
4571static HARD_REG_SET reload_reg_used_for_inherit;
4572
9e519b97 4573/* Records which hard regs are used in any way, either as explicit use or
4574 by being allocated to a pseudo during any point of the current insn. */
4575static HARD_REG_SET reg_used_in_insn;
88ce984a 4576
81d0fbb3 4577/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4578 TYPE. MODE is used to indicate how many consecutive regs are
4579 actually used. */
08a9dd06 4580
4581static void
81d0fbb3 4582mark_reload_reg_in_use (regno, opnum, type, mode)
08a9dd06 4583 int regno;
81d0fbb3 4584 int opnum;
4585 enum reload_type type;
08a9dd06 4586 enum machine_mode mode;
4587{
4588 int nregs = HARD_REGNO_NREGS (regno, mode);
4589 int i;
4590
4591 for (i = regno; i < nregs + regno; i++)
4592 {
81d0fbb3 4593 switch (type)
08a9dd06 4594 {
4595 case RELOAD_OTHER:
4596 SET_HARD_REG_BIT (reload_reg_used, i);
4597 break;
4598
81d0fbb3 4599 case RELOAD_FOR_INPUT_ADDRESS:
4600 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
08a9dd06 4601 break;
4602
9e53d777 4603 case RELOAD_FOR_INPADDR_ADDRESS:
4604 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4605 break;
4606
81d0fbb3 4607 case RELOAD_FOR_OUTPUT_ADDRESS:
4608 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
08a9dd06 4609 break;
4610
9e53d777 4611 case RELOAD_FOR_OUTADDR_ADDRESS:
4612 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4613 break;
4614
08a9dd06 4615 case RELOAD_FOR_OPERAND_ADDRESS:
4616 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4617 break;
4618
58866c2c 4619 case RELOAD_FOR_OPADDR_ADDR:
4620 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4621 break;
4622
81d0fbb3 4623 case RELOAD_FOR_OTHER_ADDRESS:
4624 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4625 break;
4626
08a9dd06 4627 case RELOAD_FOR_INPUT:
81d0fbb3 4628 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
08a9dd06 4629 break;
4630
4631 case RELOAD_FOR_OUTPUT:
81d0fbb3 4632 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4633 break;
4634
4635 case RELOAD_FOR_INSN:
4636 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
08a9dd06 4637 break;
4638 }
4639
4640 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4641 }
4642}
4643
7ac01373 4644/* Similarly, but show REGNO is no longer in use for a reload. */
4645
4646static void
4647clear_reload_reg_in_use (regno, opnum, type, mode)
4648 int regno;
4649 int opnum;
4650 enum reload_type type;
4651 enum machine_mode mode;
4652{
4653 int nregs = HARD_REGNO_NREGS (regno, mode);
93c7b06e 4654 int start_regno, end_regno;
7ac01373 4655 int i;
93c7b06e 4656 /* A complication is that for some reload types, inheritance might
4657 allow multiple reloads of the same types to share a reload register.
4658 We set check_opnum if we have to check only reloads with the same
4659 operand number, and check_any if we have to check all reloads. */
4660 int check_opnum = 0;
4661 int check_any = 0;
4662 HARD_REG_SET *used_in_set;
7ac01373 4663
93c7b06e 4664 switch (type)
7ac01373 4665 {
93c7b06e 4666 case RELOAD_OTHER:
4667 used_in_set = &reload_reg_used;
4668 break;
7ac01373 4669
93c7b06e 4670 case RELOAD_FOR_INPUT_ADDRESS:
4671 used_in_set = &reload_reg_used_in_input_addr[opnum];
4672 break;
7ac01373 4673
93c7b06e 4674 case RELOAD_FOR_INPADDR_ADDRESS:
4675 check_opnum = 1;
4676 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4677 break;
9e53d777 4678
93c7b06e 4679 case RELOAD_FOR_OUTPUT_ADDRESS:
4680 used_in_set = &reload_reg_used_in_output_addr[opnum];
4681 break;
7ac01373 4682
93c7b06e 4683 case RELOAD_FOR_OUTADDR_ADDRESS:
4684 check_opnum = 1;
4685 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4686 break;
9e53d777 4687
93c7b06e 4688 case RELOAD_FOR_OPERAND_ADDRESS:
4689 used_in_set = &reload_reg_used_in_op_addr;
4690 break;
7ac01373 4691
93c7b06e 4692 case RELOAD_FOR_OPADDR_ADDR:
4693 check_any = 1;
4694 used_in_set = &reload_reg_used_in_op_addr_reload;
4695 break;
58866c2c 4696
93c7b06e 4697 case RELOAD_FOR_OTHER_ADDRESS:
4698 used_in_set = &reload_reg_used_in_other_addr;
4699 check_any = 1;
4700 break;
7ac01373 4701
93c7b06e 4702 case RELOAD_FOR_INPUT:
4703 used_in_set = &reload_reg_used_in_input[opnum];
4704 break;
7ac01373 4705
93c7b06e 4706 case RELOAD_FOR_OUTPUT:
4707 used_in_set = &reload_reg_used_in_output[opnum];
4708 break;
7ac01373 4709
93c7b06e 4710 case RELOAD_FOR_INSN:
4711 used_in_set = &reload_reg_used_in_insn;
4712 break;
4713 default:
4714 abort ();
4715 }
4716 /* We resolve conflicts with remaining reloads of the same type by
4717 excluding the intervals of of reload registers by them from the
4718 interval of freed reload registers. Since we only keep track of
4719 one set of interval bounds, we might have to exclude somewhat
4720 more then what would be necessary if we used a HARD_REG_SET here.
4721 But this should only happen very infrequently, so there should
4722 be no reason to worry about it. */
4723
4724 start_regno = regno;
4725 end_regno = regno + nregs;
4726 if (check_opnum || check_any)
4727 {
4728 for (i = n_reloads - 1; i >= 0; i--)
4729 {
4730 if (reload_when_needed[i] == type
4731 && (check_any || reload_opnum[i] == opnum)
4732 && reload_reg_rtx[i])
4733 {
4734 int conflict_start = true_regnum (reload_reg_rtx[i]);
4735 int conflict_end
4736 = (conflict_start
4737 + HARD_REGNO_NREGS (conflict_start, reload_mode[i]));
4738
4739 /* If there is an overlap with the first to-be-freed register,
4740 adjust the interval start. */
4741 if (conflict_start <= start_regno && conflict_end > start_regno)
4742 start_regno = conflict_end;
4743 /* Otherwise, if there is a conflict with one of the other
4744 to-be-freed registers, adjust the interval end. */
4745 if (conflict_start > start_regno && conflict_start < end_regno)
4746 end_regno = conflict_start;
4747 }
7ac01373 4748 }
4749 }
93c7b06e 4750 for (i = start_regno; i < end_regno; i++)
4751 CLEAR_HARD_REG_BIT (*used_in_set, i);
7ac01373 4752}
4753
08a9dd06 4754/* 1 if reg REGNO is free as a reload reg for a reload of the sort
81d0fbb3 4755 specified by OPNUM and TYPE. */
08a9dd06 4756
4757static int
81d0fbb3 4758reload_reg_free_p (regno, opnum, type)
08a9dd06 4759 int regno;
81d0fbb3 4760 int opnum;
4761 enum reload_type type;
08a9dd06 4762{
81d0fbb3 4763 int i;
4764
8fa27ffb 4765 /* In use for a RELOAD_OTHER means it's not available for anything. */
4766 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
08a9dd06 4767 return 0;
81d0fbb3 4768
4769 switch (type)
08a9dd06 4770 {
4771 case RELOAD_OTHER:
8fa27ffb 4772 /* In use for anything means we can't use it for RELOAD_OTHER. */
4773 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
563fb44c 4774 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4775 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4776 return 0;
4777
4778 for (i = 0; i < reload_n_operands; i++)
4779 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
9e53d777 4780 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
563fb44c 4781 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
9e53d777 4782 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
563fb44c 4783 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4784 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4785 return 0;
4786
4787 return 1;
08a9dd06 4788
08a9dd06 4789 case RELOAD_FOR_INPUT:
81d0fbb3 4790 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4791 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4792 return 0;
4793
58866c2c 4794 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4795 return 0;
4796
81d0fbb3 4797 /* If it is used for some other input, can't use it. */
4798 for (i = 0; i < reload_n_operands; i++)
4799 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4800 return 0;
4801
4802 /* If it is used in a later operand's address, can't use it. */
4803 for (i = opnum + 1; i < reload_n_operands; i++)
9e53d777 4804 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4805 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
81d0fbb3 4806 return 0;
4807
4808 return 1;
4809
4810 case RELOAD_FOR_INPUT_ADDRESS:
4811 /* Can't use a register if it is used for an input address for this
4812 operand or used as an input in an earlier one. */
9e53d777 4813 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4814 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4815 return 0;
4816
4817 for (i = 0; i < opnum; i++)
4818 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4819 return 0;
4820
4821 return 1;
4822
4823 case RELOAD_FOR_INPADDR_ADDRESS:
4824 /* Can't use a register if it is used for an input address
3398e91d 4825 for this operand or used as an input in an earlier
9e53d777 4826 one. */
4827 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
81d0fbb3 4828 return 0;
4829
4830 for (i = 0; i < opnum; i++)
4831 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4832 return 0;
4833
4834 return 1;
4835
4836 case RELOAD_FOR_OUTPUT_ADDRESS:
4837 /* Can't use a register if it is used for an output address for this
4838 operand or used as an output in this or a later operand. */
4839 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4840 return 0;
4841
4842 for (i = opnum; i < reload_n_operands; i++)
4843 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4844 return 0;
4845
4846 return 1;
4847
9e53d777 4848 case RELOAD_FOR_OUTADDR_ADDRESS:
4849 /* Can't use a register if it is used for an output address
3398e91d 4850 for this operand or used as an output in this or a
9e53d777 4851 later operand. */
4852 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4853 return 0;
4854
4855 for (i = opnum; i < reload_n_operands; i++)
4856 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4857 return 0;
4858
4859 return 1;
4860
08a9dd06 4861 case RELOAD_FOR_OPERAND_ADDRESS:
81d0fbb3 4862 for (i = 0; i < reload_n_operands; i++)
4863 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4864 return 0;
4865
4866 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4867 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4868
58866c2c 4869 case RELOAD_FOR_OPADDR_ADDR:
4870 for (i = 0; i < reload_n_operands; i++)
4871 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4872 return 0;
4873
4429ee8b 4874 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
58866c2c 4875
08a9dd06 4876 case RELOAD_FOR_OUTPUT:
81d0fbb3 4877 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4878 outputs, or an operand address for this or an earlier output. */
4879 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4880 return 0;
4881
4882 for (i = 0; i < reload_n_operands; i++)
4883 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4884 return 0;
4885
4886 for (i = 0; i <= opnum; i++)
9e53d777 4887 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4888 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
81d0fbb3 4889 return 0;
4890
4891 return 1;
4892
4893 case RELOAD_FOR_INSN:
4894 for (i = 0; i < reload_n_operands; i++)
4895 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4896 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4897 return 0;
4898
4899 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4900 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4901
4902 case RELOAD_FOR_OTHER_ADDRESS:
4903 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
08a9dd06 4904 }
4905 abort ();
4906}
4907
08a9dd06 4908/* Return 1 if the value in reload reg REGNO, as used by a reload
81d0fbb3 4909 needed for the part of the insn specified by OPNUM and TYPE,
08a9dd06 4910 is still available in REGNO at the end of the insn.
4911
4912 We can assume that the reload reg was already tested for availability
4913 at the time it is needed, and we should not check this again,
4914 in case the reg has already been marked in use. */
4915
4916static int
81d0fbb3 4917reload_reg_reaches_end_p (regno, opnum, type)
08a9dd06 4918 int regno;
81d0fbb3 4919 int opnum;
4920 enum reload_type type;
08a9dd06 4921{
81d0fbb3 4922 int i;
4923
4924 switch (type)
08a9dd06 4925 {
4926 case RELOAD_OTHER:
4927 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4928 its value must reach the end. */
4929 return 1;
4930
4931 /* If this use is for part of the insn,
81d0fbb3 4932 its value reaches if no subsequent part uses the same register.
4933 Just like the above function, don't try to do this with lots
4934 of fallthroughs. */
4935
4936 case RELOAD_FOR_OTHER_ADDRESS:
4937 /* Here we check for everything else, since these don't conflict
4938 with anything else and everything comes later. */
4939
4940 for (i = 0; i < reload_n_operands; i++)
4941 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
9e53d777 4942 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
81d0fbb3 4943 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4944 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
9e53d777 4945 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
81d0fbb3 4946 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4947 return 0;
4948
4949 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4950 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4951 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4952
4953 case RELOAD_FOR_INPUT_ADDRESS:
9e53d777 4954 case RELOAD_FOR_INPADDR_ADDRESS:
81d0fbb3 4955 /* Similar, except that we check only for this and subsequent inputs
4956 and the address of only subsequent inputs and we do not need
4957 to check for RELOAD_OTHER objects since they are known not to
4958 conflict. */
4959
4960 for (i = opnum; i < reload_n_operands; i++)
4961 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4962 return 0;
4963
4964 for (i = opnum + 1; i < reload_n_operands; i++)
9e53d777 4965 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4966 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
81d0fbb3 4967 return 0;
4968
4969 for (i = 0; i < reload_n_operands; i++)
4970 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
9e53d777 4971 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
81d0fbb3 4972 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4973 return 0;
4974
58866c2c 4975 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4976 return 0;
4977
81d0fbb3 4978 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4979 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4980
08a9dd06 4981 case RELOAD_FOR_INPUT:
81d0fbb3 4982 /* Similar to input address, except we start at the next operand for
4983 both input and input address and we do not check for
4984 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4985 would conflict. */
4986
4987 for (i = opnum + 1; i < reload_n_operands; i++)
4988 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
9e53d777 4989 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
81d0fbb3 4990 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4991 return 0;
4992
a92771b8 4993 /* ... fall through ... */
81d0fbb3 4994
08a9dd06 4995 case RELOAD_FOR_OPERAND_ADDRESS:
81d0fbb3 4996 /* Check outputs and their addresses. */
4997
4998 for (i = 0; i < reload_n_operands; i++)
4999 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
9e53d777 5000 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
81d0fbb3 5001 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5002 return 0;
5003
5004 return 1;
5005
58866c2c 5006 case RELOAD_FOR_OPADDR_ADDR:
5007 for (i = 0; i < reload_n_operands; i++)
5008 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
9e53d777 5009 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
58866c2c 5010 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5011 return 0;
5012
4429ee8b 5013 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5014 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
58866c2c 5015
81d0fbb3 5016 case RELOAD_FOR_INSN:
58866c2c 5017 /* These conflict with other outputs with RELOAD_OTHER. So
81d0fbb3 5018 we need only check for output addresses. */
5019
5020 opnum = -1;
5021
a92771b8 5022 /* ... fall through ... */
81d0fbb3 5023
08a9dd06 5024 case RELOAD_FOR_OUTPUT:
81d0fbb3 5025 case RELOAD_FOR_OUTPUT_ADDRESS:
9e53d777 5026 case RELOAD_FOR_OUTADDR_ADDRESS:
81d0fbb3 5027 /* We already know these can't conflict with a later output. So the
5028 only thing to check are later output addresses. */
5029 for (i = opnum + 1; i < reload_n_operands; i++)
9e53d777 5030 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5031 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
81d0fbb3 5032 return 0;
5033
08a9dd06 5034 return 1;
5035 }
81d0fbb3 5036
08a9dd06 5037 abort ();
5038}
5039\f
fff4060f 5040/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5041 Return 0 otherwise.
5042
5043 This function uses the same algorithm as reload_reg_free_p above. */
5044
c5aa1e92 5045int
fff4060f 5046reloads_conflict (r1, r2)
5047 int r1, r2;
5048{
5049 enum reload_type r1_type = reload_when_needed[r1];
5050 enum reload_type r2_type = reload_when_needed[r2];
5051 int r1_opnum = reload_opnum[r1];
5052 int r2_opnum = reload_opnum[r2];
5053
8fa27ffb 5054 /* RELOAD_OTHER conflicts with everything. */
5055 if (r2_type == RELOAD_OTHER)
fff4060f 5056 return 1;
5057
5058 /* Otherwise, check conflicts differently for each type. */
5059
5060 switch (r1_type)
5061 {
5062 case RELOAD_FOR_INPUT:
5063 return (r2_type == RELOAD_FOR_INSN
5064 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
58866c2c 5065 || r2_type == RELOAD_FOR_OPADDR_ADDR
fff4060f 5066 || r2_type == RELOAD_FOR_INPUT
9e53d777 5067 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5068 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5069 && r2_opnum > r1_opnum));
fff4060f 5070
5071 case RELOAD_FOR_INPUT_ADDRESS:
5072 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5073 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5074
9e53d777 5075 case RELOAD_FOR_INPADDR_ADDRESS:
5076 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5077 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5078
fff4060f 5079 case RELOAD_FOR_OUTPUT_ADDRESS:
5080 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5081 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5082
9e53d777 5083 case RELOAD_FOR_OUTADDR_ADDRESS:
5084 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5085 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5086
fff4060f 5087 case RELOAD_FOR_OPERAND_ADDRESS:
5088 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4429ee8b 5089 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
fff4060f 5090
58866c2c 5091 case RELOAD_FOR_OPADDR_ADDR:
5092 return (r2_type == RELOAD_FOR_INPUT
4429ee8b 5093 || r2_type == RELOAD_FOR_OPADDR_ADDR);
58866c2c 5094
fff4060f 5095 case RELOAD_FOR_OUTPUT:
5096 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
9e53d777 5097 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5098 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
fff4060f 5099 && r2_opnum >= r1_opnum));
5100
5101 case RELOAD_FOR_INSN:
5102 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5103 || r2_type == RELOAD_FOR_INSN
5104 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5105
5106 case RELOAD_FOR_OTHER_ADDRESS:
5107 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5108
bd0217a9 5109 case RELOAD_OTHER:
8fa27ffb 5110 return 1;
bd0217a9 5111
fff4060f 5112 default:
5113 abort ();
5114 }
5115}
5116\f
08a9dd06 5117/* Vector of reload-numbers showing the order in which the reloads should
5118 be processed. */
5119short reload_order[MAX_RELOADS];
5120
5121/* Indexed by reload number, 1 if incoming value
5122 inherited from previous insns. */
5123char reload_inherited[MAX_RELOADS];
5124
5125/* For an inherited reload, this is the insn the reload was inherited from,
5126 if we know it. Otherwise, this is 0. */
5127rtx reload_inheritance_insn[MAX_RELOADS];
5128
5129/* If non-zero, this is a place to get the value of the reload,
5130 rather than using reload_in. */
5131rtx reload_override_in[MAX_RELOADS];
5132
a5e95c30 5133/* For each reload, the hard register number of the register used,
5134 or -1 if we did not need a register for this reload. */
08a9dd06 5135int reload_spill_index[MAX_RELOADS];
5136
88148277 5137/* Return 1 if the value in reload reg REGNO, as used by a reload
5138 needed for the part of the insn specified by OPNUM and TYPE,
5139 may be used to load VALUE into it.
2f013d29 5140
5141 Other read-only reloads with the same value do not conflict
5142 unless OUT is non-zero and these other reloads have to live while
5143 output reloads live.
c6bdbe11 5144 If OUT is CONST0_RTX, this is a special case: it means that the
5145 test should not be for using register REGNO as reload register, but
5146 for copying from register REGNO into the reload register.
2f013d29 5147
5148 RELOADNUM is the number of the reload we want to load this value for;
5149 a reload does not conflict with itself.
5150
c6bdbe11 5151 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5152 reloads that load an address for the very reload we are considering.
5153
88148277 5154 The caller has to make sure that there is no conflict with the return
5155 register. */
5156static int
c6bdbe11 5157reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum,
5158 ignore_address_reloads)
88148277 5159 int regno;
5160 int opnum;
5161 enum reload_type type;
2f013d29 5162 rtx value, out;
5163 int reloadnum;
c785719f 5164 int ignore_address_reloads;
88148277 5165{
5166 int time1;
5167 int i;
c6bdbe11 5168 int copy = 0;
5169
5170 if (out == const0_rtx)
5171 {
5172 copy = 1;
5173 out = NULL_RTX;
5174 }
88148277 5175
5176 /* We use some pseudo 'time' value to check if the lifetimes of the
5177 new register use would overlap with the one of a previous reload
5178 that is not read-only or uses a different value.
5179 The 'time' used doesn't have to be linear in any shape or form, just
5180 monotonic.
5181 Some reload types use different 'buckets' for each operand.
5182 So there are MAX_RECOG_OPERANDS different time values for each
cfaa75fa 5183 such reload type.
5184 We compute TIME1 as the time when the register for the prospective
5185 new reload ceases to be live, and TIME2 for each existing
5186 reload as the time when that the reload register of that reload
5187 becomes live.
5188 Where there is little to be gained by exact lifetime calculations,
5189 we just make conservative assumptions, i.e. a longer lifetime;
5190 this is done in the 'default:' cases. */
88148277 5191 switch (type)
5192 {
5193 case RELOAD_FOR_OTHER_ADDRESS:
5194 time1 = 0;
5195 break;
c6bdbe11 5196 case RELOAD_OTHER:
5197 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5198 break;
88148277 5199 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5200 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5201 respectively, to the time values for these, we get distinct time
5202 values. To get distinct time values for each operand, we have to
5203 multiply opnum by at least three. We round that up to four because
5204 multiply by four is often cheaper. */
5205 case RELOAD_FOR_INPADDR_ADDRESS:
c6bdbe11 5206 time1 = opnum * 4 + 2;
88148277 5207 break;
5208 case RELOAD_FOR_INPUT_ADDRESS:
c6bdbe11 5209 time1 = opnum * 4 + 3;
5210 break;
5211 case RELOAD_FOR_INPUT:
5212 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5213 executes (inclusive). */
5214 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
88148277 5215 break;
93c7b06e 5216 case RELOAD_FOR_OPADDR_ADDR:
c6bdbe11 5217 /* opnum * 4 + 4
93c7b06e 5218 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
93c7b06e 5219 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5220 break;
5221 case RELOAD_FOR_OPERAND_ADDRESS:
5222 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5223 is executed. */
c6bdbe11 5224 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5225 break;
5226 case RELOAD_FOR_OUTADDR_ADDRESS:
5227 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
88148277 5228 break;
88148277 5229 case RELOAD_FOR_OUTPUT_ADDRESS:
c6bdbe11 5230 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
88148277 5231 break;
5232 default:
c6bdbe11 5233 time1 = MAX_RECOG_OPERANDS * 5 + 5;
88148277 5234 }
5235
5236 for (i = 0; i < n_reloads; i++)
5237 {
5238 rtx reg = reload_reg_rtx[i];
5239 if (reg && GET_CODE (reg) == REG
5240 && ((unsigned) regno - true_regnum (reg)
1704bc4a 5241 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
2f013d29 5242 && i != reloadnum)
88148277 5243 {
2f013d29 5244 if (! reload_in[i] || ! rtx_equal_p (reload_in[i], value)
daed075e 5245 || reload_out[i] || out)
88148277 5246 {
2f013d29 5247 int time2;
5248 switch (reload_when_needed[i])
5249 {
5250 case RELOAD_FOR_OTHER_ADDRESS:
5251 time2 = 0;
5252 break;
5253 case RELOAD_FOR_INPADDR_ADDRESS:
93c7b06e 5254 /* find_reloads makes sure that a
5255 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5256 by at most one - the first -
5257 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5258 address reload is inherited, the address address reload
5259 goes away, so we can ignore this conflict. */
c6bdbe11 5260 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5261 && ignore_address_reloads
5262 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5263 Then the address address is still needed to store
5264 back the new address. */
5265 && ! reload_out[reloadnum])
93c7b06e 5266 continue;
c6bdbe11 5267 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5268 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5269 reloads go away. */
5270 if (type == RELOAD_FOR_INPUT && opnum == reload_opnum[i]
5271 && ignore_address_reloads
5272 /* Unless we are reloading an auto_inc expression. */
5273 && ! reload_out[reloadnum])
5274 continue;
5275 time2 = reload_opnum[i] * 4 + 2;
2f013d29 5276 break;
5277 case RELOAD_FOR_INPUT_ADDRESS:
c6bdbe11 5278 if (type == RELOAD_FOR_INPUT && opnum == reload_opnum[i]
5279 && ignore_address_reloads
5280 && ! reload_out[reloadnum])
5281 continue;
5282 time2 = reload_opnum[i] * 4 + 3;
2f013d29 5283 break;
5284 case RELOAD_FOR_INPUT:
c6bdbe11 5285 time2 = reload_opnum[i] * 4 + 4;
2f013d29 5286 break;
c6bdbe11 5287 /* reload_opnum[i] * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5288 == MAX_RECOG_OPERAND * 4 */
93c7b06e 5289 case RELOAD_FOR_OPADDR_ADDR:
c6bdbe11 5290 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5291 && ignore_address_reloads
5292 && ! reload_out[reloadnum])
93c7b06e 5293 continue;
c6bdbe11 5294 time2 = MAX_RECOG_OPERANDS * 4 + 1;
93c7b06e 5295 break;
5296 case RELOAD_FOR_OPERAND_ADDRESS:
c6bdbe11 5297 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5298 break;
5299 case RELOAD_FOR_INSN:
5300 time2 = MAX_RECOG_OPERANDS * 4 + 3;
93c7b06e 5301 break;
2f013d29 5302 case RELOAD_FOR_OUTPUT:
5303 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5304 instruction is executed. */
c6bdbe11 5305 time2 = MAX_RECOG_OPERANDS * 4 + 4;
2f013d29 5306 break;
c6bdbe11 5307 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5308 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5309 value. */
93c7b06e 5310 case RELOAD_FOR_OUTADDR_ADDRESS:
c6bdbe11 5311 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5312 && ignore_address_reloads
5313 && ! reload_out[reloadnum])
93c7b06e 5314 continue;
c6bdbe11 5315 time2 = MAX_RECOG_OPERANDS * 4 + 4 + reload_opnum[i];
5316 break;
2f013d29 5317 case RELOAD_FOR_OUTPUT_ADDRESS:
c6bdbe11 5318 time2 = MAX_RECOG_OPERANDS * 4 + 5 + reload_opnum[i];
2f013d29 5319 break;
5320 case RELOAD_OTHER:
c6bdbe11 5321 /* If there is no conflict in the input part, handle this
5322 like an output reload. */
2f013d29 5323 if (! reload_in[i] || rtx_equal_p (reload_in[i], value))
5324 {
c6bdbe11 5325 time2 = MAX_RECOG_OPERANDS * 4 + 4;
2f013d29 5326 break;
5327 }
c6bdbe11 5328 time2 = 1;
5329 /* RELOAD_OTHER might be live beyond instruction execution,
5330 but this is not obvious when we set time2 = 1. So check
5331 here if there might be a problem with the new reload
5332 clobbering the register used by the RELOAD_OTHER. */
5333 if (out)
5334 return 0;
5335 break;
2f013d29 5336 default:
c6bdbe11 5337 return 0;
2f013d29 5338 }
daed075e 5339 if ((time1 >= time2
5340 && (! reload_in[i] || reload_out[i]
5341 || ! rtx_equal_p (reload_in[i], value)))
23ccd46e 5342 || (out && reload_out_reg[reloadnum]
5343 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
2f013d29 5344 return 0;
88148277 5345 }
88148277 5346 }
5347 }
5348 return 1;
5349}
5350
08a9dd06 5351/* Find a spill register to use as a reload register for reload R.
5352 LAST_RELOAD is non-zero if this is the last reload for the insn being
5353 processed.
5354
5355 Set reload_reg_rtx[R] to the register allocated.
5356
5357 If NOERROR is nonzero, we return 1 if successful,
5358 or 0 if we couldn't find a spill reg and we didn't change anything. */
5359
5360static int
590ec786 5361allocate_reload_reg (chain, r, last_reload, noerror)
5362 struct insn_chain *chain;
08a9dd06 5363 int r;
08a9dd06 5364 int last_reload;
5365 int noerror;
5366{
590ec786 5367 rtx insn = chain->insn;
dab171c5 5368 int i, pass, count, regno;
08a9dd06 5369 rtx new;
08a9dd06 5370
5371 /* If we put this reload ahead, thinking it is a group,
5372 then insist on finding a group. Otherwise we can grab a
c8ad158d 5373 reg that some other reload needs.
08a9dd06 5374 (That can happen when we have a 68000 DATA_OR_FP_REG
5375 which is a group of data regs or one fp reg.)
5376 We need not be so restrictive if there are no more reloads
5377 for this insn.
5378
5379 ??? Really it would be nicer to have smarter handling
5380 for that kind of reg class, where a problem like this is normal.
5381 Perhaps those classes should be avoided for reloading
5382 by use of more alternatives. */
5383
5384 int force_group = reload_nregs[r] > 1 && ! last_reload;
5385
5386 /* If we want a single register and haven't yet found one,
5387 take any reg in the right class and not in use.
5388 If we want a consecutive group, here is where we look for it.
5389
5390 We use two passes so we can first look for reload regs to
5391 reuse, which are already in use for other reloads in this insn,
5392 and only then use additional registers.
5393 I think that maximizing reuse is needed to make sure we don't
5394 run out of reload regs. Suppose we have three reloads, and
5395 reloads A and B can share regs. These need two regs.
5396 Suppose A and B are given different regs.
5397 That leaves none for C. */
5398 for (pass = 0; pass < 2; pass++)
5399 {
5400 /* I is the index in spill_regs.
5401 We advance it round-robin between insns to use all spill regs
5402 equally, so that inherited reloads have a chance
28e0535a 5403 of leapfrogging each other. Don't do this, however, when we have
5404 group needs and failure would be fatal; if we only have a relatively
5405 small number of spill registers, and more than one of them has
5406 group needs, then by starting in the middle, we may end up
5407 allocating the first one in such a way that we are not left with
5408 sufficient groups to handle the rest. */
5409
5410 if (noerror || ! force_group)
5411 i = last_spill_reg;
5412 else
5413 i = -1;
5414
5415 for (count = 0; count < n_spills; count++)
08a9dd06 5416 {
5417 int class = (int) reload_reg_class[r];
dab171c5 5418 int regnum;
08a9dd06 5419
dab171c5 5420 i++;
5421 if (i >= n_spills)
5422 i -= n_spills;
5423 regnum = spill_regs[i];
08a9dd06 5424
dab171c5 5425 if ((reload_reg_free_p (regnum, reload_opnum[r],
88148277 5426 reload_when_needed[r])
2f013d29 5427 || (reload_in[r]
88148277 5428 /* We check reload_reg_used to make sure we
5429 don't clobber the return register. */
dab171c5 5430 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5431 && reload_reg_free_for_value_p (regnum,
88148277 5432 reload_opnum[r],
5433 reload_when_needed[r],
2f013d29 5434 reload_in[r],
c6bdbe11 5435 reload_out[r], r, 1)))
dab171c5 5436 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5437 && HARD_REGNO_MODE_OK (regnum, reload_mode[r])
7ac01373 5438 /* Look first for regs to share, then for unshared. But
5439 don't share regs used for inherited reloads; they are
5440 the ones we want to preserve. */
5441 && (pass
5442 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
dab171c5 5443 regnum)
7ac01373 5444 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
dab171c5 5445 regnum))))
08a9dd06 5446 {
dab171c5 5447 int nr = HARD_REGNO_NREGS (regnum, reload_mode[r]);
08a9dd06 5448 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5449 (on 68000) got us two FP regs. If NR is 1,
5450 we would reject both of them. */
5451 if (force_group)
5452 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5453 /* If we need only one reg, we have already won. */
5454 if (nr == 1)
5455 {
5456 /* But reject a single reg if we demand a group. */
5457 if (force_group)
5458 continue;
5459 break;
5460 }
5461 /* Otherwise check that as many consecutive regs as we need
5462 are available here.
5463 Also, don't use for a group registers that are
5464 needed for nongroups. */
dab171c5 5465 if (! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regnum))
08a9dd06 5466 while (nr > 1)
5467 {
dab171c5 5468 regno = regnum + nr - 1;
08a9dd06 5469 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5470 && spill_reg_order[regno] >= 0
81d0fbb3 5471 && reload_reg_free_p (regno, reload_opnum[r],
5472 reload_when_needed[r])
dab171c5 5473 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups,
08a9dd06 5474 regno)))
5475 break;
5476 nr--;
5477 }
5478 if (nr == 1)
5479 break;
5480 }
5481 }
5482
5483 /* If we found something on pass 1, omit pass 2. */
5484 if (count < n_spills)
5485 break;
5486 }
5487
5488 /* We should have found a spill register by now. */
5489 if (count == n_spills)
5490 {
5491 if (noerror)
5492 return 0;
3e775099 5493 goto failure;
08a9dd06 5494 }
5495
7ac01373 5496 /* I is the index in SPILL_REG_RTX of the reload register we are to
5497 allocate. Get an rtx for it and find its register number. */
08a9dd06 5498
5499 new = spill_reg_rtx[i];
5500
5501 if (new == 0 || GET_MODE (new) != reload_mode[r])
7ac01373 5502 spill_reg_rtx[i] = new
941522d6 5503 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
7ac01373 5504
08a9dd06 5505 regno = true_regnum (new);
5506
5507 /* Detect when the reload reg can't hold the reload mode.
5508 This used to be one `if', but Sequent compiler can't handle that. */
5509 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5510 {
5511 enum machine_mode test_mode = VOIDmode;
5512 if (reload_in[r])
5513 test_mode = GET_MODE (reload_in[r]);
5514 /* If reload_in[r] has VOIDmode, it means we will load it
5515 in whatever mode the reload reg has: to wit, reload_mode[r].
5516 We have already tested that for validity. */
5517 /* Aside from that, we need to test that the expressions
5518 to reload from or into have modes which are valid for this
5519 reload register. Otherwise the reload insns would be invalid. */
5520 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5521 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5522 if (! (reload_out[r] != 0
5523 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
7ac01373 5524 {
5525 /* The reg is OK. */
5526 last_spill_reg = i;
5527
5528 /* Mark as in use for this insn the reload regs we use
5529 for this. */
5530 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5531 reload_when_needed[r], reload_mode[r]);
5532
5533 reload_reg_rtx[r] = new;
a5e95c30 5534 reload_spill_index[r] = spill_regs[i];
7ac01373 5535 return 1;
5536 }
08a9dd06 5537 }
5538
5539 /* The reg is not OK. */
5540 if (noerror)
5541 return 0;
5542
3e775099 5543 failure:
08a9dd06 5544 if (asm_noperands (PATTERN (insn)) < 0)
5545 /* It's the compiler's fault. */
ed6a2d98 5546 fatal_insn ("Could not find a spill register", insn);
08a9dd06 5547
5548 /* It's the user's fault; the operand's mode and constraint
5549 don't match. Disable this reload so we don't crash in final. */
5550 error_for_asm (insn,
5551 "`asm' operand constraint incompatible with operand size");
5552 reload_in[r] = 0;
5553 reload_out[r] = 0;
5554 reload_reg_rtx[r] = 0;
5555 reload_optional[r] = 1;
5556 reload_secondary_p[r] = 1;
5557
5558 return 1;
5559}
5560\f
5561/* Assign hard reg targets for the pseudo-registers we must reload
5562 into hard regs for this insn.
5563 Also output the instructions to copy them in and out of the hard regs.
5564
5565 For machines with register classes, we are responsible for
5566 finding a reload reg in the proper class. */
5567
5568static void
dab171c5 5569choose_reload_regs (chain)
590ec786 5570 struct insn_chain *chain;
08a9dd06 5571{
590ec786 5572 rtx insn = chain->insn;
08a9dd06 5573 register int i, j;
5574 int max_group_size = 1;
5575 enum reg_class group_class = NO_REGS;
5576 int inheritance;
93c7b06e 5577 int pass;
08a9dd06 5578
5579 rtx save_reload_reg_rtx[MAX_RELOADS];
5580 char save_reload_inherited[MAX_RELOADS];
5581 rtx save_reload_inheritance_insn[MAX_RELOADS];
5582 rtx save_reload_override_in[MAX_RELOADS];
5583 int save_reload_spill_index[MAX_RELOADS];
5584 HARD_REG_SET save_reload_reg_used;
81d0fbb3 5585 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
9e53d777 5586 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
81d0fbb3 5587 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
9e53d777 5588 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
81d0fbb3 5589 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5590 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
08a9dd06 5591 HARD_REG_SET save_reload_reg_used_in_op_addr;
58866c2c 5592 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
81d0fbb3 5593 HARD_REG_SET save_reload_reg_used_in_insn;
5594 HARD_REG_SET save_reload_reg_used_in_other_addr;
08a9dd06 5595 HARD_REG_SET save_reload_reg_used_at_all;
5596
5597 bzero (reload_inherited, MAX_RELOADS);
748e6d74 5598 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5599 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
08a9dd06 5600
5601 CLEAR_HARD_REG_SET (reload_reg_used);
5602 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
08a9dd06 5603 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
58866c2c 5604 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
81d0fbb3 5605 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5606 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
08a9dd06 5607
9e519b97 5608 CLEAR_HARD_REG_SET (reg_used_in_insn);
5609 {
5610 HARD_REG_SET tmp;
5611 REG_SET_TO_HARD_REG_SET (tmp, chain->live_before);
5612 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5613 REG_SET_TO_HARD_REG_SET (tmp, chain->live_after);
5614 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5615 compute_use_by_pseudos (&reg_used_in_insn, chain->live_before);
5616 compute_use_by_pseudos (&reg_used_in_insn, chain->live_after);
5617 }
81d0fbb3 5618 for (i = 0; i < reload_n_operands; i++)
5619 {
5620 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5621 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5622 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
9e53d777 5623 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
81d0fbb3 5624 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
9e53d777 5625 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
81d0fbb3 5626 }
08a9dd06 5627
dab171c5 5628 IOR_COMPL_HARD_REG_SET (reload_reg_used, chain->used_spill_regs);
5629
08a9dd06 5630#if 0 /* Not needed, now that we can always retry without inheritance. */
5631 /* See if we have more mandatory reloads than spill regs.
5632 If so, then we cannot risk optimizations that could prevent
c8ad158d 5633 reloads from sharing one spill register.
08a9dd06 5634
5635 Since we will try finding a better register than reload_reg_rtx
5636 unless it is equal to reload_in or reload_out, count such reloads. */
5637
5638 {
dab171c5 5639 int tem = 0;
08a9dd06 5640 for (j = 0; j < n_reloads; j++)
5641 if (! reload_optional[j]
5642 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5643 && (reload_reg_rtx[j] == 0
5644 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5645 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5646 tem++;
5647 if (tem > n_spills)
5648 must_reuse = 1;
5649 }
5650#endif
5651
08a9dd06 5652 /* In order to be certain of getting the registers we need,
5653 we must sort the reloads into order of increasing register class.
5654 Then our grabbing of reload registers will parallel the process
c8ad158d 5655 that provided the reload registers.
08a9dd06 5656
5657 Also note whether any of the reloads wants a consecutive group of regs.
5658 If so, record the maximum size of the group desired and what
5659 register class contains all the groups needed by this insn. */
5660
5661 for (j = 0; j < n_reloads; j++)
5662 {
5663 reload_order[j] = j;
5664 reload_spill_index[j] = -1;
5665
5666 reload_mode[j]
81d0fbb3 5667 = (reload_inmode[j] == VOIDmode
5668 || (GET_MODE_SIZE (reload_outmode[j])
5669 > GET_MODE_SIZE (reload_inmode[j])))
5670 ? reload_outmode[j] : reload_inmode[j];
08a9dd06 5671
5672 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5673
5674 if (reload_nregs[j] > 1)
5675 {
5676 max_group_size = MAX (reload_nregs[j], max_group_size);
5677 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5678 }
5679
5680 /* If we have already decided to use a certain register,
5681 don't use it in another way. */
5682 if (reload_reg_rtx[j])
81d0fbb3 5683 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
08a9dd06 5684 reload_when_needed[j], reload_mode[j]);
5685 }
5686
5687 if (n_reloads > 1)
5688 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5689
748e6d74 5690 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5691 sizeof reload_reg_rtx);
08a9dd06 5692 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
748e6d74 5693 bcopy ((char *) reload_inheritance_insn,
5694 (char *) save_reload_inheritance_insn,
08a9dd06 5695 sizeof reload_inheritance_insn);
748e6d74 5696 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
08a9dd06 5697 sizeof reload_override_in);
748e6d74 5698 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
08a9dd06 5699 sizeof reload_spill_index);
5700 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5701 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
08a9dd06 5702 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5703 reload_reg_used_in_op_addr);
58866c2c 5704
5705 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5706 reload_reg_used_in_op_addr_reload);
5707
81d0fbb3 5708 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5709 reload_reg_used_in_insn);
5710 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5711 reload_reg_used_in_other_addr);
5712
5713 for (i = 0; i < reload_n_operands; i++)
5714 {
5715 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5716 reload_reg_used_in_output[i]);
5717 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5718 reload_reg_used_in_input[i]);
5719 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5720 reload_reg_used_in_input_addr[i]);
9e53d777 5721 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5722 reload_reg_used_in_inpaddr_addr[i]);
81d0fbb3 5723 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5724 reload_reg_used_in_output_addr[i]);
9e53d777 5725 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5726 reload_reg_used_in_outaddr_addr[i]);
81d0fbb3 5727 }
08a9dd06 5728
5ef88ef5 5729 /* If -O, try first with inheritance, then turning it off.
5730 If not -O, don't do inheritance.
5731 Using inheritance when not optimizing leads to paradoxes
5732 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5733 because one side of the comparison might be inherited. */
08a9dd06 5734
5ef88ef5 5735 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
08a9dd06 5736 {
5737 /* Process the reloads in order of preference just found.
5738 Beyond this point, subregs can be found in reload_reg_rtx.
5739
5740 This used to look for an existing reloaded home for all
5741 of the reloads, and only then perform any new reloads.
5742 But that could lose if the reloads were done out of reg-class order
5743 because a later reload with a looser constraint might have an old
5744 home in a register needed by an earlier reload with a tighter constraint.
5745
5746 To solve this, we make two passes over the reloads, in the order
5747 described above. In the first pass we try to inherit a reload
5748 from a previous insn. If there is a later reload that needs a
5749 class that is a proper subset of the class being processed, we must
5750 also allocate a spill register during the first pass.
5751
5752 Then make a second pass over the reloads to allocate any reloads
5753 that haven't been given registers yet. */
5754
7ac01373 5755 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5756
08a9dd06 5757 for (j = 0; j < n_reloads; j++)
5758 {
5759 register int r = reload_order[j];
5760
5761 /* Ignore reloads that got marked inoperative. */
f2a79eb6 5762 if (reload_out[r] == 0 && reload_in[r] == 0
5763 && ! reload_secondary_p[r])
08a9dd06 5764 continue;
5765
c8ff929f 5766 /* If find_reloads chose to use reload_in or reload_out as a reload
f2a79eb6 5767 register, we don't need to chose one. Otherwise, try even if it
5768 found one since we might save an insn if we find the value lying
c8ff929f 5769 around.
5770 Try also when reload_in is a pseudo without a hard reg. */
08a9dd06 5771 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5772 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
c8ff929f 5773 || (rtx_equal_p (reload_out[r], reload_reg_rtx[r])
5774 && GET_CODE (reload_in[r]) != MEM
5775 && true_regnum (reload_in[r]) < FIRST_PSEUDO_REGISTER)))
08a9dd06 5776 continue;
5777
5778#if 0 /* No longer needed for correct operation.
5779 It might give better code, or might not; worth an experiment? */
5780 /* If this is an optional reload, we can't inherit from earlier insns
5781 until we are sure that any non-optional reloads have been allocated.
5782 The following code takes advantage of the fact that optional reloads
5783 are at the end of reload_order. */
5784 if (reload_optional[r] != 0)
5785 for (i = 0; i < j; i++)
5786 if ((reload_out[reload_order[i]] != 0
5787 || reload_in[reload_order[i]] != 0
5788 || reload_secondary_p[reload_order[i]])
5789 && ! reload_optional[reload_order[i]]
5790 && reload_reg_rtx[reload_order[i]] == 0)
590ec786 5791 allocate_reload_reg (chain, reload_order[i], 0, inheritance);
08a9dd06 5792#endif
5793
5794 /* First see if this pseudo is already available as reloaded
5795 for a previous insn. We cannot try to inherit for reloads
5796 that are smaller than the maximum number of registers needed
5797 for groups unless the register we would allocate cannot be used
5798 for the groups.
5799
5800 We could check here to see if this is a secondary reload for
5801 an object that is already in a register of the desired class.
5802 This would avoid the need for the secondary reload register.
5803 But this is complex because we can't easily determine what
f2a79eb6 5804 objects might want to be loaded via this reload. So let a
5805 register be allocated here. In `emit_reload_insns' we suppress
5806 one of the loads in the case described above. */
08a9dd06 5807
5808 if (inheritance)
5809 {
93c7b06e 5810 int word = 0;
08a9dd06 5811 register int regno = -1;
bab79d5d 5812 enum machine_mode mode;
08a9dd06 5813
5814 if (reload_in[r] == 0)
5815 ;
5816 else if (GET_CODE (reload_in[r]) == REG)
bab79d5d 5817 {
5818 regno = REGNO (reload_in[r]);
5819 mode = GET_MODE (reload_in[r]);
5820 }
08a9dd06 5821 else if (GET_CODE (reload_in_reg[r]) == REG)
bab79d5d 5822 {
5823 regno = REGNO (reload_in_reg[r]);
5824 mode = GET_MODE (reload_in_reg[r]);
5825 }
93c7b06e 5826 else if (GET_CODE (reload_in_reg[r]) == SUBREG
5827 && GET_CODE (SUBREG_REG (reload_in_reg[r])) == REG)
6aed5e78 5828 {
93c7b06e 5829 word = SUBREG_WORD (reload_in_reg[r]);
5830 regno = REGNO (SUBREG_REG (reload_in_reg[r]));
5831 if (regno < FIRST_PSEUDO_REGISTER)
5832 regno += word;
5833 mode = GET_MODE (reload_in_reg[r]);
5834 }
5835#ifdef AUTO_INC_DEC
5836 else if ((GET_CODE (reload_in_reg[r]) == PRE_INC
5837 || GET_CODE (reload_in_reg[r]) == PRE_DEC
5838 || GET_CODE (reload_in_reg[r]) == POST_INC
5839 || GET_CODE (reload_in_reg[r]) == POST_DEC)
5840 && GET_CODE (XEXP (reload_in_reg[r], 0)) == REG)
5841 {
5842 regno = REGNO (XEXP (reload_in_reg[r], 0));
5843 mode = GET_MODE (XEXP (reload_in_reg[r], 0));
5844 reload_out[r] = reload_in[r];
6aed5e78 5845 }
93c7b06e 5846#endif
08a9dd06 5847#if 0
5848 /* This won't work, since REGNO can be a pseudo reg number.
5849 Also, it takes much more hair to keep track of all the things
5850 that can invalidate an inherited reload of part of a pseudoreg. */
5851 else if (GET_CODE (reload_in[r]) == SUBREG
5852 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5853 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5854#endif
5855
5856 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5857 {
93c7b06e 5858 enum reg_class class = reload_reg_class[r], last_class;
5859 rtx last_reg = reg_last_reload_reg[regno];
5860
5861 i = REGNO (last_reg) + word;
5862 last_class = REGNO_REG_CLASS (i);
5863 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5864 >= GET_MODE_SIZE (mode) + word * UNITS_PER_WORD)
5865 && reg_reloaded_contents[i] == regno
a5e95c30 5866 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
a5e95c30 5867 && HARD_REGNO_MODE_OK (i, reload_mode[r])
93c7b06e 5868 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5869 /* Even if we can't use this register as a reload
5870 register, we might use it for reload_override_in,
5871 if copying it to the desired class is cheap
5872 enough. */
5873 || ((REGISTER_MOVE_COST (last_class, class)
5874 < MEMORY_MOVE_COST (mode, class, 1))
5875#ifdef SECONDARY_INPUT_RELOAD_CLASS
5876 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5877 last_reg)
5878 == NO_REGS)
5879#endif
5880#ifdef SECONDARY_MEMORY_NEEDED
5881 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5882 mode)
5883#endif
5884 ))
5885
08a9dd06 5886 && (reload_nregs[r] == max_group_size
5887 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
a5e95c30 5888 i))
c6bdbe11 5889 && reload_reg_free_for_value_p (i, reload_opnum[r],
5890 reload_when_needed[r],
5891 reload_in[r],
5892 const0_rtx, r, 1))
08a9dd06 5893 {
5894 /* If a group is needed, verify that all the subsequent
a92771b8 5895 registers still have their values intact. */
08a9dd06 5896 int nr
a5e95c30 5897 = HARD_REGNO_NREGS (i, reload_mode[r]);
08a9dd06 5898 int k;
5899
5900 for (k = 1; k < nr; k++)
a5e95c30 5901 if (reg_reloaded_contents[i + k] != regno
5902 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
08a9dd06 5903 break;
5904
5905 if (k == nr)
5906 {
99a8d31a 5907 int i1;
5908
93c7b06e 5909 last_reg = (GET_MODE (last_reg) == mode
5910 ? last_reg : gen_rtx_REG (mode, i));
5911
99a8d31a 5912 /* We found a register that contains the
5913 value we need. If this register is the
5914 same as an `earlyclobber' operand of the
5915 current insn, just mark it as a place to
5916 reload from since we can't use it as the
5917 reload register itself. */
5918
5919 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5920 if (reg_overlap_mentioned_for_reload_p
5921 (reg_last_reload_reg[regno],
5922 reload_earlyclobbers[i1]))
5923 break;
5924
c06cbe22 5925 if (i1 != n_earlyclobbers
c6bdbe11 5926 || ! (reload_reg_free_for_value_p
5927 (i, reload_opnum[r], reload_when_needed[r],
5928 reload_in[r], reload_out[r], r, 1))
a5e95c30 5929 /* Don't use it if we'd clobber a pseudo reg. */
9e519b97 5930 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
a5e95c30 5931 && reload_out[r]
5932 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
c06cbe22 5933 /* Don't really use the inherited spill reg
5934 if we need it wider than we've got it. */
5935 || (GET_MODE_SIZE (reload_mode[r])
c8ff929f 5936 > GET_MODE_SIZE (mode))
93c7b06e 5937 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5938 i)
5939
c8ff929f 5940 /* If find_reloads chose reload_out as reload
5941 register, stay with it - that leaves the
5942 inherited register for subsequent reloads. */
88ce984a 5943 || (reload_out[r] && reload_reg_rtx[r]
c8ff929f 5944 && rtx_equal_p (reload_out[r],
5945 reload_reg_rtx[r])))
93c7b06e 5946 {
5947 reload_override_in[r] = last_reg;
5948 reload_inheritance_insn[r]
5949 = reg_reloaded_insn[i];
5950 }
99a8d31a 5951 else
5952 {
a24e1dd2 5953 int k;
99a8d31a 5954 /* We can use this as a reload reg. */
5955 /* Mark the register as in use for this part of
5956 the insn. */
a5e95c30 5957 mark_reload_reg_in_use (i,
99a8d31a 5958 reload_opnum[r],
5959 reload_when_needed[r],
5960 reload_mode[r]);
93c7b06e 5961 reload_reg_rtx[r] = last_reg;
99a8d31a 5962 reload_inherited[r] = 1;
5963 reload_inheritance_insn[r]
5964 = reg_reloaded_insn[i];
5965 reload_spill_index[r] = i;
a24e1dd2 5966 for (k = 0; k < nr; k++)
5967 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
a5e95c30 5968 i + k);
99a8d31a 5969 }
08a9dd06 5970 }
5971 }
5972 }
5973 }
5974
5975 /* Here's another way to see if the value is already lying around. */
5976 if (inheritance
5977 && reload_in[r] != 0
5978 && ! reload_inherited[r]
5979 && reload_out[r] == 0
5980 && (CONSTANT_P (reload_in[r])
5981 || GET_CODE (reload_in[r]) == PLUS
5982 || GET_CODE (reload_in[r]) == REG
5983 || GET_CODE (reload_in[r]) == MEM)
5984 && (reload_nregs[r] == max_group_size
5985 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5986 {
5987 register rtx equiv
5988 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
e5fdd564 5989 -1, NULL_PTR, 0, reload_mode[r]);
08a9dd06 5990 int regno;
5991
5992 if (equiv != 0)
5993 {
5994 if (GET_CODE (equiv) == REG)
5995 regno = REGNO (equiv);
5996 else if (GET_CODE (equiv) == SUBREG)
5997 {
ef5b5374 5998 /* This must be a SUBREG of a hard register.
5999 Make a new REG since this might be used in an
6000 address and not all machines support SUBREGs
6001 there. */
6002 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
941522d6 6003 equiv = gen_rtx_REG (reload_mode[r], regno);
08a9dd06 6004 }
6005 else
6006 abort ();
6007 }
6008
6009 /* If we found a spill reg, reject it unless it is free
6010 and of the desired class. */
6011 if (equiv != 0
93c7b06e 6012 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
6013 && ! reload_reg_free_for_value_p (regno, reload_opnum[r],
6014 reload_when_needed[r],
6015 reload_in[r],
c6bdbe11 6016 reload_out[r], r, 1))
08a9dd06 6017 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
6018 regno)))
6019 equiv = 0;
6020
08a9dd06 6021 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
6022 equiv = 0;
6023
6024 /* We found a register that contains the value we need.
6025 If this register is the same as an `earlyclobber' operand
6026 of the current insn, just mark it as a place to reload from
6027 since we can't use it as the reload register itself. */
6028
6029 if (equiv != 0)
6030 for (i = 0; i < n_earlyclobbers; i++)
d3862ba2 6031 if (reg_overlap_mentioned_for_reload_p (equiv,
6032 reload_earlyclobbers[i]))
08a9dd06 6033 {
6034 reload_override_in[r] = equiv;
6035 equiv = 0;
6036 break;
6037 }
6038
ec89129f 6039 /* If the equiv register we have found is explicitly clobbered
6040 in the current insn, it depends on the reload type if we
6041 can use it, use it for reload_override_in, or not at all.
6042 In particular, we then can't use EQUIV for a
6043 RELOAD_FOR_OUTPUT_ADDRESS reload. */
08a9dd06 6044
6045 if (equiv != 0 && regno_clobbered_p (regno, insn))
6046 {
ec89129f 6047 switch (reload_when_needed[r])
6048 {
6049 case RELOAD_FOR_OTHER_ADDRESS:
6050 case RELOAD_FOR_INPADDR_ADDRESS:
6051 case RELOAD_FOR_INPUT_ADDRESS:
6052 case RELOAD_FOR_OPADDR_ADDR:
6053 break;
6054 case RELOAD_OTHER:
6055 case RELOAD_FOR_INPUT:
6056 case RELOAD_FOR_OPERAND_ADDRESS:
6057 reload_override_in[r] = equiv;
6058 /* Fall through. */
6059 default:
6060 equiv = 0;
6061 break;
6062 }
08a9dd06 6063 }
6064
6065 /* If we found an equivalent reg, say no code need be generated
6066 to load it, and use it as our reload reg. */
ecbd56da 6067 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
08a9dd06 6068 {
7786d760 6069 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
6070 int k;
08a9dd06 6071 reload_reg_rtx[r] = equiv;
6072 reload_inherited[r] = 1;
7786d760 6073
bd24587d 6074 /* If reg_reloaded_valid is not set for this register,
6075 there might be a stale spill_reg_store lying around.
6076 We must clear it, since otherwise emit_reload_insns
6077 might delete the store. */
6078 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6079 spill_reg_store[regno] = NULL_RTX;
7786d760 6080 /* If any of the hard registers in EQUIV are spill
6081 registers, mark them as in use for this insn. */
6082 for (k = 0; k < nr; k++)
7ac01373 6083 {
7786d760 6084 i = spill_reg_order[regno + k];
6085 if (i >= 0)
6086 {
6087 mark_reload_reg_in_use (regno, reload_opnum[r],
6088 reload_when_needed[r],
6089 reload_mode[r]);
6090 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6091 regno + k);
6092 }
7ac01373 6093 }
08a9dd06 6094 }
6095 }
6096
6097 /* If we found a register to use already, or if this is an optional
6098 reload, we are done. */
6099 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
6100 continue;
6101
6102#if 0 /* No longer needed for correct operation. Might or might not
6103 give better code on the average. Want to experiment? */
6104
6105 /* See if there is a later reload that has a class different from our
6106 class that intersects our class or that requires less register
6107 than our reload. If so, we must allocate a register to this
6108 reload now, since that reload might inherit a previous reload
6109 and take the only available register in our class. Don't do this
6110 for optional reloads since they will force all previous reloads
6111 to be allocated. Also don't do this for reloads that have been
6112 turned off. */
6113
6114 for (i = j + 1; i < n_reloads; i++)
6115 {
6116 int s = reload_order[i];
6117
f9e15121 6118 if ((reload_in[s] == 0 && reload_out[s] == 0
6119 && ! reload_secondary_p[s])
08a9dd06 6120 || reload_optional[s])
6121 continue;
6122
6123 if ((reload_reg_class[s] != reload_reg_class[r]
6124 && reg_classes_intersect_p (reload_reg_class[r],
6125 reload_reg_class[s]))
6126 || reload_nregs[s] < reload_nregs[r])
6127 break;
6128 }
6129
6130 if (i == n_reloads)
6131 continue;
6132
590ec786 6133 allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance);
08a9dd06 6134#endif
6135 }
6136
6137 /* Now allocate reload registers for anything non-optional that
6138 didn't get one yet. */
6139 for (j = 0; j < n_reloads; j++)
6140 {
6141 register int r = reload_order[j];
6142
6143 /* Ignore reloads that got marked inoperative. */
6144 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
6145 continue;
6146
6147 /* Skip reloads that already have a register allocated or are
a92771b8 6148 optional. */
08a9dd06 6149 if (reload_reg_rtx[r] != 0 || reload_optional[r])
6150 continue;
6151
590ec786 6152 if (! allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance))
08a9dd06 6153 break;
6154 }
6155
6156 /* If that loop got all the way, we have won. */
6157 if (j == n_reloads)
6158 break;
6159
08a9dd06 6160 /* Loop around and try without any inheritance. */
6161 /* First undo everything done by the failed attempt
6162 to allocate with inheritance. */
748e6d74 6163 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
6164 sizeof reload_reg_rtx);
6165 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
6166 sizeof reload_inherited);
6167 bcopy ((char *) save_reload_inheritance_insn,
6168 (char *) reload_inheritance_insn,
08a9dd06 6169 sizeof reload_inheritance_insn);
748e6d74 6170 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
08a9dd06 6171 sizeof reload_override_in);
748e6d74 6172 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
08a9dd06 6173 sizeof reload_spill_index);
6174 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
6175 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
08a9dd06 6176 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
6177 save_reload_reg_used_in_op_addr);
58866c2c 6178 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
6179 save_reload_reg_used_in_op_addr_reload);
81d0fbb3 6180 COPY_HARD_REG_SET (reload_reg_used_in_insn,
6181 save_reload_reg_used_in_insn);
6182 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
6183 save_reload_reg_used_in_other_addr);
6184
6185 for (i = 0; i < reload_n_operands; i++)
6186 {
6187 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
6188 save_reload_reg_used_in_input[i]);
6189 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
6190 save_reload_reg_used_in_output[i]);
6191 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
6192 save_reload_reg_used_in_input_addr[i]);
9e53d777 6193 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
6194 save_reload_reg_used_in_inpaddr_addr[i]);
81d0fbb3 6195 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
6196 save_reload_reg_used_in_output_addr[i]);
9e53d777 6197 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
6198 save_reload_reg_used_in_outaddr_addr[i]);
81d0fbb3 6199 }
08a9dd06 6200 }
6201
6202 /* If we thought we could inherit a reload, because it seemed that
6203 nothing else wanted the same reload register earlier in the insn,
93c7b06e 6204 verify that assumption, now that all reloads have been assigned.
6205 Likewise for reloads where reload_override_in has been set. */
08a9dd06 6206
93c7b06e 6207 /* If doing expensive optimizations, do one preliminary pass that doesn't
6208 cancel any inheritance, but removes reloads that have been needed only
6209 for reloads that we know can be inherited. */
6210 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
08a9dd06 6211 {
93c7b06e 6212 for (j = 0; j < n_reloads; j++)
524613c6 6213 {
93c7b06e 6214 register int r = reload_order[j];
6215 rtx check_reg;
93c7b06e 6216 if (reload_inherited[r] && reload_reg_rtx[r])
6217 check_reg = reload_reg_rtx[r];
6218 else if (reload_override_in[r]
6219 && (GET_CODE (reload_override_in[r]) == REG
6220 || GET_CODE (reload_override_in[r]) == SUBREG))
6221 check_reg = reload_override_in[r];
6222 else
6223 continue;
c6bdbe11 6224 if (! reload_reg_free_for_value_p (true_regnum (check_reg),
93c7b06e 6225 reload_opnum[r],
6226 reload_when_needed[r],
6227 reload_in[r],
c6bdbe11 6228 (reload_inherited[r]
6229 ? reload_out[r] : const0_rtx),
6230 r, 1))
524613c6 6231 {
93c7b06e 6232 if (pass)
6233 continue;
6234 reload_inherited[r] = 0;
6235 reload_override_in[r] = 0;
524613c6 6236 }
93c7b06e 6237 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6238 reload_override_in, then we do not need its related
6239 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6240 likewise for other reload types.
6241 We handle this by removing a reload when its only replacement
6242 is mentioned in reload_in of the reload we are going to inherit.
6243 A special case are auto_inc expressions; even if the input is
6244 inherited, we still need the address for the output. We can
6245 recognize them because they have RELOAD_OUT set but not
6246 RELOAD_OUT_REG.
6247 If we suceeded removing some reload and we are doing a preliminary
6248 pass just to remove such reloads, make another pass, since the
6249 removal of one reload might allow us to inherit another one. */
6250 else if ((! reload_out[r] || reload_out_reg[r])
6251 && remove_address_replacements (reload_in[r]) && pass)
6252 pass = 2;
08a9dd06 6253 }
6254 }
6255
6256 /* Now that reload_override_in is known valid,
6257 actually override reload_in. */
6258 for (j = 0; j < n_reloads; j++)
6259 if (reload_override_in[j])
6260 reload_in[j] = reload_override_in[j];
6261
6262 /* If this reload won't be done because it has been cancelled or is
6263 optional and not inherited, clear reload_reg_rtx so other
6264 routines (such as subst_reloads) don't get confused. */
6265 for (j = 0; j < n_reloads; j++)
7ac01373 6266 if (reload_reg_rtx[j] != 0
6267 && ((reload_optional[j] && ! reload_inherited[j])
6268 || (reload_in[j] == 0 && reload_out[j] == 0
6269 && ! reload_secondary_p[j])))
6270 {
6271 int regno = true_regnum (reload_reg_rtx[j]);
6272
6273 if (spill_reg_order[regno] >= 0)
6274 clear_reload_reg_in_use (regno, reload_opnum[j],
6275 reload_when_needed[j], reload_mode[j]);
6276 reload_reg_rtx[j] = 0;
6277 }
08a9dd06 6278
6279 /* Record which pseudos and which spill regs have output reloads. */
6280 for (j = 0; j < n_reloads; j++)
6281 {
6282 register int r = reload_order[j];
6283
6284 i = reload_spill_index[r];
6285
a5e95c30 6286 /* I is nonneg if this reload uses a register.
08a9dd06 6287 If reload_reg_rtx[r] is 0, this is an optional reload
6288 that we opted to ignore. */
93c7b06e 6289 if (reload_out_reg[r] != 0 && GET_CODE (reload_out_reg[r]) == REG
08a9dd06 6290 && reload_reg_rtx[r] != 0)
6291 {
93c7b06e 6292 register int nregno = REGNO (reload_out_reg[r]);
4ddbcdd2 6293 int nr = 1;
6294
6295 if (nregno < FIRST_PSEUDO_REGISTER)
6296 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
08a9dd06 6297
6298 while (--nr >= 0)
4ddbcdd2 6299 reg_has_output_reload[nregno + nr] = 1;
6300
6301 if (i >= 0)
08a9dd06 6302 {
a5e95c30 6303 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
4ddbcdd2 6304 while (--nr >= 0)
a5e95c30 6305 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
08a9dd06 6306 }
6307
6308 if (reload_when_needed[r] != RELOAD_OTHER
81d0fbb3 6309 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
6310 && reload_when_needed[r] != RELOAD_FOR_INSN)
08a9dd06 6311 abort ();
6312 }
6313 }
6314}
93c7b06e 6315
6316/* Deallocate the reload register for reload R. This is called from
6317 remove_address_replacements. */
6318void
6319deallocate_reload_reg (r)
6320 int r;
6321{
6322 int regno;
6323
6324 if (! reload_reg_rtx[r])
6325 return;
6326 regno = true_regnum (reload_reg_rtx[r]);
6327 reload_reg_rtx[r] = 0;
6328 if (spill_reg_order[regno] >= 0)
6329 clear_reload_reg_in_use (regno, reload_opnum[r], reload_when_needed[r],
6330 reload_mode[r]);
6331 reload_spill_index[r] = -1;
6332}
08a9dd06 6333\f
0dbd1c74 6334/* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
81d0fbb3 6335 reloads of the same item for fear that we might not have enough reload
6336 registers. However, normally they will get the same reload register
6337 and hence actually need not be loaded twice.
6338
6339 Here we check for the most common case of this phenomenon: when we have
6340 a number of reloads for the same object, each of which were allocated
6341 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6342 reload, and is not modified in the insn itself. If we find such,
6343 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6344 This will not increase the number of spill registers needed and will
6345 prevent redundant code. */
6346
81d0fbb3 6347static void
6348merge_assigned_reloads (insn)
6349 rtx insn;
6350{
6351 int i, j;
6352
6353 /* Scan all the reloads looking for ones that only load values and
6354 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6355 assigned and not modified by INSN. */
6356
6357 for (i = 0; i < n_reloads; i++)
6358 {
bb89c1cf 6359 int conflicting_input = 0;
6360 int max_input_address_opnum = -1;
6361 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6362
81d0fbb3 6363 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
6364 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
6365 || reg_set_p (reload_reg_rtx[i], insn))
6366 continue;
6367
6368 /* Look at all other reloads. Ensure that the only use of this
6369 reload_reg_rtx is in a reload that just loads the same value
6370 as we do. Note that any secondary reloads must be of the identical
6371 class since the values, modes, and result registers are the
6372 same, so we need not do anything with any secondary reloads. */
6373
6374 for (j = 0; j < n_reloads; j++)
6375 {
6376 if (i == j || reload_reg_rtx[j] == 0
6377 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
6378 reload_reg_rtx[i]))
6379 continue;
6380
bb89c1cf 6381 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6382 && reload_opnum[j] > max_input_address_opnum)
6383 max_input_address_opnum = reload_opnum[j];
6384
81d0fbb3 6385 /* If the reload regs aren't exactly the same (e.g, different modes)
bb89c1cf 6386 or if the values are different, we can't merge this reload.
6387 But if it is an input reload, we might still merge
6388 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
81d0fbb3 6389
6390 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6391 || reload_out[j] != 0 || reload_in[j] == 0
6392 || ! rtx_equal_p (reload_in[i], reload_in[j]))
bb89c1cf 6393 {
6394 if (reload_when_needed[j] != RELOAD_FOR_INPUT
6395 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
6396 || reload_opnum[i] > reload_opnum[j])
6397 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS))
6398 break;
6399 conflicting_input = 1;
6400 if (min_conflicting_input_opnum > reload_opnum[j])
6401 min_conflicting_input_opnum = reload_opnum[j];
6402 }
81d0fbb3 6403 }
6404
6405 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6406 we, in fact, found any matching reloads. */
6407
bb89c1cf 6408 if (j == n_reloads
6409 && max_input_address_opnum <= min_conflicting_input_opnum)
81d0fbb3 6410 {
6411 for (j = 0; j < n_reloads; j++)
6412 if (i != j && reload_reg_rtx[j] != 0
bb89c1cf 6413 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6414 && (! conflicting_input
6415 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6416 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS))
81d0fbb3 6417 {
6418 reload_when_needed[i] = RELOAD_OTHER;
6419 reload_in[j] = 0;
d70eda17 6420 reload_spill_index[j] = -1;
81d0fbb3 6421 transfer_replacements (i, j);
6422 }
6423
6424 /* If this is now RELOAD_OTHER, look for any reloads that load
6425 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6426 if they were for inputs, RELOAD_OTHER for outputs. Note that
6427 this test is equivalent to looking for reloads for this operand
6428 number. */
6429
6430 if (reload_when_needed[i] == RELOAD_OTHER)
6431 for (j = 0; j < n_reloads; j++)
6432 if (reload_in[j] != 0
6433 && reload_when_needed[i] != RELOAD_OTHER
6434 && reg_overlap_mentioned_for_reload_p (reload_in[j],
6435 reload_in[i]))
6436 reload_when_needed[j]
9e53d777 6437 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6438 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6439 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
81d0fbb3 6440 }
6441 }
6442}
0dbd1c74 6443
81d0fbb3 6444\f
08a9dd06 6445/* Output insns to reload values in and out of the chosen reload regs. */
6446
6447static void
590ec786 6448emit_reload_insns (chain)
6449 struct insn_chain *chain;
08a9dd06 6450{
590ec786 6451 rtx insn = chain->insn;
6452
08a9dd06 6453 register int j;
81d0fbb3 6454 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6455 rtx other_input_address_reload_insns = 0;
6456 rtx other_input_reload_insns = 0;
6457 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
9e53d777 6458 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
81d0fbb3 6459 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6460 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
9e53d777 6461 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
81d0fbb3 6462 rtx operand_reload_insns = 0;
58866c2c 6463 rtx other_operand_reload_insns = 0;
1dd0083e 6464 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
08a9dd06 6465 rtx following_insn = NEXT_INSN (insn);
2a588794 6466 rtx before_insn = PREV_INSN (insn);
08a9dd06 6467 int special;
6468 /* Values to be put in spill_reg_store are put here first. */
6469 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
a5e95c30 6470 HARD_REG_SET reg_reloaded_died;
6471
6472 CLEAR_HARD_REG_SET (reg_reloaded_died);
08a9dd06 6473
81d0fbb3 6474 for (j = 0; j < reload_n_operands; j++)
6475 input_reload_insns[j] = input_address_reload_insns[j]
9e53d777 6476 = inpaddr_address_reload_insns[j]
1dd0083e 6477 = output_reload_insns[j] = output_address_reload_insns[j]
9e53d777 6478 = outaddr_address_reload_insns[j]
1dd0083e 6479 = other_output_reload_insns[j] = 0;
81d0fbb3 6480
08a9dd06 6481 /* Now output the instructions to copy the data into and out of the
6482 reload registers. Do these in the order that the reloads were reported,
6483 since reloads of base and index registers precede reloads of operands
6484 and the operands may need the base and index registers reloaded. */
6485
6486 for (j = 0; j < n_reloads; j++)
6487 {
6488 register rtx old;
6489 rtx oldequiv_reg = 0;
83bfaafc 6490 rtx this_reload_insn = 0;
6aed5e78 6491 int expect_occurrences = 1;
78737868 6492
93c7b06e 6493 if (reload_reg_rtx[j]
6494 && REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER)
6495 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = 0;
08a9dd06 6496
93c7b06e 6497 old = (reload_in[j] && GET_CODE (reload_in[j]) == MEM
6498 ? reload_in_reg[j] : reload_in[j]);
6499
6500 if (old != 0
6501 /* AUTO_INC reloads need to be handled even if inherited. We got an
6502 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6503 && (! reload_inherited[j] || (reload_out[j] && ! reload_out_reg[j]))
08a9dd06 6504 && ! rtx_equal_p (reload_reg_rtx[j], old)
6505 && reload_reg_rtx[j] != 0)
6506 {
6507 register rtx reloadreg = reload_reg_rtx[j];
6508 rtx oldequiv = 0;
6509 enum machine_mode mode;
81d0fbb3 6510 rtx *where;
08a9dd06 6511
6512 /* Determine the mode to reload in.
6513 This is very tricky because we have three to choose from.
6514 There is the mode the insn operand wants (reload_inmode[J]).
6515 There is the mode of the reload register RELOADREG.
6516 There is the intrinsic mode of the operand, which we could find
6517 by stripping some SUBREGs.
6518 It turns out that RELOADREG's mode is irrelevant:
6519 we can change that arbitrarily.
6520
6521 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6522 then the reload reg may not support QImode moves, so use SImode.
6523 If foo is in memory due to spilling a pseudo reg, this is safe,
6524 because the QImode value is in the least significant part of a
6525 slot big enough for a SImode. If foo is some other sort of
6526 memory reference, then it is impossible to reload this case,
6527 so previous passes had better make sure this never happens.
6528
6529 Then consider a one-word union which has SImode and one of its
6530 members is a float, being fetched as (SUBREG:SF union:SI).
6531 We must fetch that as SFmode because we could be loading into
6532 a float-only register. In this case OLD's mode is correct.
6533
6534 Consider an immediate integer: it has VOIDmode. Here we need
6535 to get a mode from something else.
6536
6537 In some cases, there is a fourth mode, the operand's
6538 containing mode. If the insn specifies a containing mode for
6539 this operand, it overrides all others.
6540
6541 I am not sure whether the algorithm here is always right,
6542 but it does the right things in those cases. */
6543
6544 mode = GET_MODE (old);
6545 if (mode == VOIDmode)
6546 mode = reload_inmode[j];
08a9dd06 6547
6548#ifdef SECONDARY_INPUT_RELOAD_CLASS
6549 /* If we need a secondary register for this operation, see if
6550 the value is already in a register in that class. Don't
6551 do this if the secondary register will be used as a scratch
6552 register. */
6553
46f00337 6554 if (reload_secondary_in_reload[j] >= 0
6555 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5ef88ef5 6556 && optimize)
08a9dd06 6557 oldequiv
6558 = find_equiv_reg (old, insn,
46f00337 6559 reload_reg_class[reload_secondary_in_reload[j]],
e5fdd564 6560 -1, NULL_PTR, 0, mode);
08a9dd06 6561#endif
6562
6563 /* If reloading from memory, see if there is a register
6564 that already holds the same value. If so, reload from there.
6565 We can pass 0 as the reload_reg_p argument because
6566 any other reload has either already been emitted,
6567 in which case find_equiv_reg will see the reload-insn,
6568 or has yet to be emitted, in which case it doesn't matter
6569 because we will use this equiv reg right away. */
6570
5ef88ef5 6571 if (oldequiv == 0 && optimize
08a9dd06 6572 && (GET_CODE (old) == MEM
6573 || (GET_CODE (old) == REG
6574 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6575 && reg_renumber[REGNO (old)] < 0)))
81d0fbb3 6576 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
e5fdd564 6577 -1, NULL_PTR, 0, mode);
08a9dd06 6578
6579 if (oldequiv)
6580 {
6581 int regno = true_regnum (oldequiv);
6582
c6bdbe11 6583 /* Don't use OLDEQUIV if any other reload changes it at an
6584 earlier stage of this insn or at this stage. */
6585 if (! reload_reg_free_for_value_p (regno, reload_opnum[j],
6586 reload_when_needed[j],
6587 reload_in[j], const0_rtx, j,
6588 0))
08a9dd06 6589 oldequiv = 0;
6590
81d0fbb3 6591 /* If it is no cheaper to copy from OLDEQUIV into the
6592 reload register than it would be to move from memory,
6593 don't use it. Likewise, if we need a secondary register
6594 or memory. */
6595
6596 if (oldequiv != 0
6597 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6598 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6599 reload_reg_class[j])
906a8d5b 6600 >= MEMORY_MOVE_COST (mode, reload_reg_class[j], 1)))
81d0fbb3 6601#ifdef SECONDARY_INPUT_RELOAD_CLASS
6602 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6603 mode, oldequiv)
6604 != NO_REGS)
6605#endif
6606#ifdef SECONDARY_MEMORY_NEEDED
906a8d5b 6607 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6608 reload_reg_class[j],
81d0fbb3 6609 mode)
6610#endif
6611 ))
6612 oldequiv = 0;
08a9dd06 6613 }
6614
93c7b06e 6615 /* delete_output_reload is only invoked properly if old contains
6616 the original pseudo register. Since this is replaced with a
6617 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6618 find the pseudo in RELOAD_IN_REG. */
6619 if (oldequiv == 0
6620 && reload_override_in[j]
6621 && GET_CODE (reload_in_reg[j]) == REG)
6622 {
6623 oldequiv = old;
6624 old = reload_in_reg[j];
6625 }
08a9dd06 6626 if (oldequiv == 0)
6627 oldequiv = old;
6628 else if (GET_CODE (oldequiv) == REG)
6629 oldequiv_reg = oldequiv;
6630 else if (GET_CODE (oldequiv) == SUBREG)
6631 oldequiv_reg = SUBREG_REG (oldequiv);
6632
4f022f61 6633 /* If we are reloading from a register that was recently stored in
6634 with an output-reload, see if we can prove there was
6635 actually no need to store the old value in it. */
6636
6637 if (optimize && GET_CODE (oldequiv) == REG
6638 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
a5e95c30 6639 && spill_reg_store[REGNO (oldequiv)]
93c7b06e 6640 && GET_CODE (old) == REG
6641 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6642 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6643 reload_out_reg[j])))
6644 delete_output_reload (insn, j, REGNO (oldequiv));
4f022f61 6645
08a9dd06 6646 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
a35a3d54 6647 then load RELOADREG from OLDEQUIV. Note that we cannot use
6648 gen_lowpart_common since it can do the wrong thing when
6649 RELOADREG has a multi-word mode. Note that RELOADREG
6650 must always be a REG here. */
08a9dd06 6651
6652 if (GET_MODE (reloadreg) != mode)
941522d6 6653 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
08a9dd06 6654 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6655 oldequiv = SUBREG_REG (oldequiv);
6656 if (GET_MODE (oldequiv) != VOIDmode
6657 && mode != GET_MODE (oldequiv))
941522d6 6658 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
08a9dd06 6659
81d0fbb3 6660 /* Switch to the right place to emit the reload insns. */
08a9dd06 6661 switch (reload_when_needed[j])
6662 {
08a9dd06 6663 case RELOAD_OTHER:
81d0fbb3 6664 where = &other_input_reload_insns;
6665 break;
6666 case RELOAD_FOR_INPUT:
6667 where = &input_reload_insns[reload_opnum[j]];
08a9dd06 6668 break;
81d0fbb3 6669 case RELOAD_FOR_INPUT_ADDRESS:
6670 where = &input_address_reload_insns[reload_opnum[j]];
08a9dd06 6671 break;
9e53d777 6672 case RELOAD_FOR_INPADDR_ADDRESS:
6673 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6674 break;
81d0fbb3 6675 case RELOAD_FOR_OUTPUT_ADDRESS:
6676 where = &output_address_reload_insns[reload_opnum[j]];
08a9dd06 6677 break;
9e53d777 6678 case RELOAD_FOR_OUTADDR_ADDRESS:
6679 where = &outaddr_address_reload_insns[reload_opnum[j]];
6680 break;
08a9dd06 6681 case RELOAD_FOR_OPERAND_ADDRESS:
81d0fbb3 6682 where = &operand_reload_insns;
6683 break;
58866c2c 6684 case RELOAD_FOR_OPADDR_ADDR:
6685 where = &other_operand_reload_insns;
6686 break;
81d0fbb3 6687 case RELOAD_FOR_OTHER_ADDRESS:
6688 where = &other_input_address_reload_insns;
6689 break;
6690 default:
6691 abort ();
08a9dd06 6692 }
6693
81d0fbb3 6694 push_to_sequence (*where);
08a9dd06 6695 special = 0;
6696
6697 /* Auto-increment addresses must be reloaded in a special way. */
93c7b06e 6698 if (reload_out[j] && ! reload_out_reg[j])
08a9dd06 6699 {
6700 /* We are not going to bother supporting the case where a
6701 incremented register can't be copied directly from
6702 OLDEQUIV since this seems highly unlikely. */
46f00337 6703 if (reload_secondary_in_reload[j] >= 0)
08a9dd06 6704 abort ();
93c7b06e 6705
6706 if (reload_inherited[j])
6707 oldequiv = reloadreg;
6708
6709 old = XEXP (reload_in_reg[j], 0);
6710
6711 if (optimize && GET_CODE (oldequiv) == REG
6712 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6713 && spill_reg_store[REGNO (oldequiv)]
6714 && GET_CODE (old) == REG
6715 && (dead_or_set_p (insn,
6716 spill_reg_stored_to[REGNO (oldequiv)])
6717 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6718 old)))
6719 delete_output_reload (insn, j, REGNO (oldequiv));
6720
08a9dd06 6721 /* Prevent normal processing of this reload. */
6722 special = 1;
6723 /* Output a special code sequence for this case. */
93c7b06e 6724 new_spill_reg_store[REGNO (reloadreg)]
6725 = inc_for_reload (reloadreg, oldequiv, reload_out[j],
6726 reload_inc[j]);
08a9dd06 6727 }
6728
6729 /* If we are reloading a pseudo-register that was set by the previous
6730 insn, see if we can get rid of that pseudo-register entirely
6731 by redirecting the previous insn into our reload register. */
6732
6733 else if (optimize && GET_CODE (old) == REG
6734 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6735 && dead_or_set_p (insn, old)
6736 /* This is unsafe if some other reload
6737 uses the same reg first. */
c6bdbe11 6738 && reload_reg_free_for_value_p (REGNO (reloadreg),
6739 reload_opnum[j],
6740 reload_when_needed[j],
6741 old, reload_out[j],
6742 j, 0))
08a9dd06 6743 {
6744 rtx temp = PREV_INSN (insn);
6745 while (temp && GET_CODE (temp) == NOTE)
6746 temp = PREV_INSN (temp);
6747 if (temp
6748 && GET_CODE (temp) == INSN
6749 && GET_CODE (PATTERN (temp)) == SET
6750 && SET_DEST (PATTERN (temp)) == old
6751 /* Make sure we can access insn_operand_constraint. */
6752 && asm_noperands (PATTERN (temp)) < 0
6753 /* This is unsafe if prev insn rejects our reload reg. */
6754 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6755 reloadreg)
6756 /* This is unsafe if operand occurs more than once in current
6757 insn. Perhaps some occurrences aren't reloaded. */
6758 && count_occurrences (PATTERN (insn), old) == 1
6759 /* Don't risk splitting a matching pair of operands. */
6760 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6761 {
6762 /* Store into the reload register instead of the pseudo. */
6763 SET_DEST (PATTERN (temp)) = reloadreg;
68fe1709 6764
6765 /* If the previous insn is an output reload, the source is
6766 a reload register, and its spill_reg_store entry will
6767 contain the previous destination. This is now
6768 invalid. */
6769 if (GET_CODE (SET_SRC (PATTERN (temp))) == REG
6770 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6771 {
6772 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6773 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6774 }
6775
08a9dd06 6776 /* If these are the only uses of the pseudo reg,
6777 pretend for GDB it lives in the reload reg we used. */
394685a4 6778 if (REG_N_DEATHS (REGNO (old)) == 1
6779 && REG_N_SETS (REGNO (old)) == 1)
08a9dd06 6780 {
6781 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6782 alter_reg (REGNO (old), -1);
6783 }
6784 special = 1;
6785 }
6786 }
6787
81d0fbb3 6788 /* We can't do that, so output an insn to load RELOADREG. */
6789
08a9dd06 6790 if (! special)
6791 {
6792#ifdef SECONDARY_INPUT_RELOAD_CLASS
6793 rtx second_reload_reg = 0;
6794 enum insn_code icode;
6795
6796 /* If we have a secondary reload, pick up the secondary register
497de2d4 6797 and icode, if any. If OLDEQUIV and OLD are different or
6798 if this is an in-out reload, recompute whether or not we
6799 still need a secondary register and what the icode should
6800 be. If we still need a secondary register and the class or
6801 icode is different, go back to reloading from OLD if using
6802 OLDEQUIV means that we got the wrong type of register. We
6803 cannot have different class or icode due to an in-out reload
6804 because we don't make such reloads when both the input and
6805 output need secondary reload registers. */
08a9dd06 6806
46f00337 6807 if (reload_secondary_in_reload[j] >= 0)
08a9dd06 6808 {
46f00337 6809 int secondary_reload = reload_secondary_in_reload[j];
3a875c6b 6810 rtx real_oldequiv = oldequiv;
6811 rtx real_old = old;
6812
6813 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6814 and similarly for OLD.
46f00337 6815 See comments in get_secondary_reload in reload.c. */
93c7b06e 6816 /* If it is a pseudo that cannot be replaced with its
6817 equivalent MEM, we must fall back to reload_in, which
6818 will have all the necessary substitutions registered. */
6819
3a875c6b 6820 if (GET_CODE (oldequiv) == REG
6821 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
93c7b06e 6822 && reg_equiv_memory_loc[REGNO (oldequiv)] != 0)
6823 {
6824 if (reg_equiv_address[REGNO (oldequiv)]
6825 || num_not_at_initial_offset)
6826 real_oldequiv = reload_in[j];
6827 else
6828 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6829 }
3a875c6b 6830
6831 if (GET_CODE (old) == REG
6832 && REGNO (old) >= FIRST_PSEUDO_REGISTER
93c7b06e 6833 && reg_equiv_memory_loc[REGNO (old)] != 0)
6834 {
6835 if (reg_equiv_address[REGNO (old)]
6836 || num_not_at_initial_offset)
6837 real_old = reload_in[j];
6838 else
6839 real_old = reg_equiv_mem[REGNO (old)];
6840 }
3a875c6b 6841
08a9dd06 6842 second_reload_reg = reload_reg_rtx[secondary_reload];
46f00337 6843 icode = reload_secondary_in_icode[j];
08a9dd06 6844
497de2d4 6845 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6846 || (reload_in[j] != 0 && reload_out[j] != 0))
08a9dd06 6847 {
6848 enum reg_class new_class
6849 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
3a875c6b 6850 mode, real_oldequiv);
08a9dd06 6851
6852 if (new_class == NO_REGS)
6853 second_reload_reg = 0;
6854 else
6855 {
6856 enum insn_code new_icode;
6857 enum machine_mode new_mode;
6858
6859 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6860 REGNO (second_reload_reg)))
3a875c6b 6861 oldequiv = old, real_oldequiv = real_old;
08a9dd06 6862 else
6863 {
6864 new_icode = reload_in_optab[(int) mode];
6865 if (new_icode != CODE_FOR_nothing
6866 && ((insn_operand_predicate[(int) new_icode][0]
c8ad158d 6867 && ! ((*insn_operand_predicate[(int) new_icode][0])
08a9dd06 6868 (reloadreg, mode)))
c8ad158d 6869 || (insn_operand_predicate[(int) new_icode][1]
6870 && ! ((*insn_operand_predicate[(int) new_icode][1])
3a875c6b 6871 (real_oldequiv, mode)))))
08a9dd06 6872 new_icode = CODE_FOR_nothing;
6873
6874 if (new_icode == CODE_FOR_nothing)
6875 new_mode = mode;
6876 else
2cf190fe 6877 new_mode = insn_operand_mode[(int) new_icode][2];
08a9dd06 6878
6879 if (GET_MODE (second_reload_reg) != new_mode)
6880 {
6881 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6882 new_mode))
3a875c6b 6883 oldequiv = old, real_oldequiv = real_old;
08a9dd06 6884 else
6885 second_reload_reg
941522d6 6886 = gen_rtx_REG (new_mode,
6887 REGNO (second_reload_reg));
08a9dd06 6888 }
6889 }
6890 }
6891 }
6892
6893 /* If we still need a secondary reload register, check
6894 to see if it is being used as a scratch or intermediate
3a875c6b 6895 register and generate code appropriately. If we need
6896 a scratch register, use REAL_OLDEQUIV since the form of
6897 the insn may depend on the actual address if it is
6898 a MEM. */
08a9dd06 6899
6900 if (second_reload_reg)
6901 {
6902 if (icode != CODE_FOR_nothing)
6903 {
b6fdd226 6904 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6905 second_reload_reg));
08a9dd06 6906 special = 1;
6907 }
6908 else
6909 {
6910 /* See if we need a scratch register to load the
6911 intermediate register (a tertiary reload). */
6912 enum insn_code tertiary_icode
46f00337 6913 = reload_secondary_in_icode[secondary_reload];
08a9dd06 6914
6915 if (tertiary_icode != CODE_FOR_nothing)
6916 {
6917 rtx third_reload_reg
46f00337 6918 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
08a9dd06 6919
81d0fbb3 6920 emit_insn ((GEN_FCN (tertiary_icode)
6921 (second_reload_reg, real_oldequiv,
6922 third_reload_reg)));
08a9dd06 6923 }
6924 else
93c7b06e 6925 gen_reload (second_reload_reg, real_oldequiv,
b6fdd226 6926 reload_opnum[j],
6927 reload_when_needed[j]);
81d0fbb3 6928
6929 oldequiv = second_reload_reg;
08a9dd06 6930 }
6931 }
6932 }
6933#endif
6934
fb96d361 6935 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
93c7b06e 6936 {
6937 rtx real_oldequiv = oldequiv;
6938
6939 if ((GET_CODE (oldequiv) == REG
6940 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6941 && reg_equiv_memory_loc[REGNO (oldequiv)] != 0)
6942 || (GET_CODE (oldequiv) == SUBREG
6943 && GET_CODE (SUBREG_REG (oldequiv)) == REG
6944 && (REGNO (SUBREG_REG (oldequiv))
6945 >= FIRST_PSEUDO_REGISTER)
6946 && (reg_equiv_memory_loc
6947 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6948 real_oldequiv = reload_in[j];
6949 gen_reload (reloadreg, real_oldequiv, reload_opnum[j],
6950 reload_when_needed[j]);
6951 }
08a9dd06 6952
08a9dd06 6953 }
6954
83bfaafc 6955 this_reload_insn = get_last_insn ();
81d0fbb3 6956 /* End this sequence. */
6957 *where = get_insns ();
6958 end_sequence ();
93c7b06e 6959
6960 /* Update reload_override_in so that delete_address_reloads_1
6961 can see the actual register usage. */
6962 if (oldequiv_reg)
6963 reload_override_in[j] = oldequiv;
08a9dd06 6964 }
6965
6aed5e78 6966 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6967 e.g. inheriting a SImode output reload for
6968 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6969 if (optimize && reload_inherited[j] && reload_in[j]
6970 && GET_CODE (reload_in[j]) == MEM
93c7b06e 6971 && GET_CODE (reload_in_reg[j]) == MEM
6aed5e78 6972 && reload_spill_index[j] >= 0
6973 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6974 {
6975 expect_occurrences
6976 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1;
6977 reload_in[j]
6978 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6979 }
08a9dd06 6980
6981 /* If we are reloading a register that was recently stored in with an
6982 output-reload, see if we can prove there was
6983 actually no need to store the old value in it. */
6984
93c7b06e 6985 if (optimize
6986 && (reload_inherited[j] || reload_override_in[j])
6987 && reload_reg_rtx[j]
6988 && GET_CODE (reload_reg_rtx[j]) == REG
6989 && spill_reg_store[REGNO (reload_reg_rtx[j])] != 0
08a9dd06 6990#if 0
6991 /* There doesn't seem to be any reason to restrict this to pseudos
6992 and doing so loses in the case where we are copying from a
6993 register of the wrong class. */
93c7b06e 6994 && REGNO (spill_reg_stored_to[REGNO (reload_reg_rtx[j])])
6995 >= FIRST_PSEUDO_REGISTER
08a9dd06 6996#endif
93c7b06e 6997 /* The insn might have already some references to stackslots
6998 replaced by MEMs, while reload_out_reg still names the
6999 original pseudo. */
7000 && (dead_or_set_p (insn,
7001 spill_reg_stored_to[REGNO (reload_reg_rtx[j])])
7002 || rtx_equal_p (spill_reg_stored_to[REGNO (reload_reg_rtx[j])],
7003 reload_out_reg[j])))
7004 delete_output_reload (insn, j, REGNO (reload_reg_rtx[j]));
08a9dd06 7005
7006 /* Input-reloading is done. Now do output-reloading,
7007 storing the value from the reload-register after the main insn
7008 if reload_out[j] is nonzero.
7009
7010 ??? At some point we need to support handling output reloads of
7011 JUMP_INSNs or insns that set cc0. */
93c7b06e 7012
7013 /* If this is an output reload that stores something that is
7014 not loaded in this same reload, see if we can eliminate a previous
7015 store. */
7016 {
7017 rtx pseudo = reload_out_reg[j];
7018
7019 if (pseudo
7020 && GET_CODE (pseudo) == REG
7021 && ! rtx_equal_p (reload_in_reg[j], pseudo)
7022 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7023 && reg_last_reload_reg[REGNO (pseudo)])
7024 {
7025 int pseudo_no = REGNO (pseudo);
7026 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7027
7028 /* We don't need to test full validity of last_regno for
7029 inherit here; we only want to know if the store actually
7030 matches the pseudo. */
7031 if (reg_reloaded_contents[last_regno] == pseudo_no
7032 && spill_reg_store[last_regno]
7033 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7034 delete_output_reload (insn, j, last_regno);
7035 }
7036 }
7037
7038 old = reload_out_reg[j];
08a9dd06 7039 if (old != 0
7040 && reload_reg_rtx[j] != old
7041 && reload_reg_rtx[j] != 0)
7042 {
7043 register rtx reloadreg = reload_reg_rtx[j];
7bd36830 7044#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
08a9dd06 7045 register rtx second_reloadreg = 0;
7bd36830 7046#endif
08a9dd06 7047 rtx note, p;
7048 enum machine_mode mode;
7049 int special = 0;
7050
7051 /* An output operand that dies right away does need a reload,
7052 but need not be copied from it. Show the new location in the
7053 REG_UNUSED note. */
7054 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
7055 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7056 {
7057 XEXP (note, 0) = reload_reg_rtx[j];
7058 continue;
7059 }
fa9351ee 7060 /* Likewise for a SUBREG of an operand that dies. */
7061 else if (GET_CODE (old) == SUBREG
7062 && GET_CODE (SUBREG_REG (old)) == REG
7063 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7064 SUBREG_REG (old))))
7065 {
7066 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7067 reload_reg_rtx[j]);
7068 continue;
7069 }
08a9dd06 7070 else if (GET_CODE (old) == SCRATCH)
7071 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7072 but we don't want to make an output reload. */
7073 continue;
7074
7075#if 0
7076 /* Strip off of OLD any size-increasing SUBREGs such as
7077 (SUBREG:SI foo:QI 0). */
7078
7079 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
7080 && (GET_MODE_SIZE (GET_MODE (old))
7081 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
7082 old = SUBREG_REG (old);
7083#endif
7084
7085 /* If is a JUMP_INSN, we can't support output reloads yet. */
7086 if (GET_CODE (insn) == JUMP_INSN)
7087 abort ();
7088
a43b19e2 7089 if (reload_when_needed[j] == RELOAD_OTHER)
79e3bc8a 7090 start_sequence ();
a43b19e2 7091 else
7092 push_to_sequence (output_reload_insns[reload_opnum[j]]);
81d0fbb3 7093
93c7b06e 7094 old = reload_out[j];
7095
08a9dd06 7096 /* Determine the mode to reload in.
7097 See comments above (for input reloading). */
7098
7099 mode = GET_MODE (old);
7100 if (mode == VOIDmode)
d473859b 7101 {
7102 /* VOIDmode should never happen for an output. */
7103 if (asm_noperands (PATTERN (insn)) < 0)
7104 /* It's the compiler's fault. */
ed6a2d98 7105 fatal_insn ("VOIDmode on an output", insn);
d473859b 7106 error_for_asm (insn, "output operand is constant in `asm'");
7107 /* Prevent crash--use something we know is valid. */
7108 mode = word_mode;
941522d6 7109 old = gen_rtx_REG (mode, REGNO (reloadreg));
d473859b 7110 }
08a9dd06 7111
08a9dd06 7112 if (GET_MODE (reloadreg) != mode)
941522d6 7113 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
08a9dd06 7114
7115#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7116
7117 /* If we need two reload regs, set RELOADREG to the intermediate
b6fdd226 7118 one, since it will be stored into OLD. We might need a secondary
08a9dd06 7119 register only for an input reload, so check again here. */
7120
46f00337 7121 if (reload_secondary_out_reload[j] >= 0)
08a9dd06 7122 {
3a875c6b 7123 rtx real_old = old;
08a9dd06 7124
3a875c6b 7125 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
7126 && reg_equiv_mem[REGNO (old)] != 0)
7127 real_old = reg_equiv_mem[REGNO (old)];
08a9dd06 7128
3a875c6b 7129 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
7130 mode, real_old)
7131 != NO_REGS))
7132 {
7133 second_reloadreg = reloadreg;
46f00337 7134 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
08a9dd06 7135
3a875c6b 7136 /* See if RELOADREG is to be used as a scratch register
7137 or as an intermediate register. */
46f00337 7138 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
08a9dd06 7139 {
46f00337 7140 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
81d0fbb3 7141 (real_old, second_reloadreg, reloadreg)));
3a875c6b 7142 special = 1;
08a9dd06 7143 }
7144 else
3a875c6b 7145 {
7146 /* See if we need both a scratch and intermediate reload
7147 register. */
b6fdd226 7148
46f00337 7149 int secondary_reload = reload_secondary_out_reload[j];
3a875c6b 7150 enum insn_code tertiary_icode
46f00337 7151 = reload_secondary_out_icode[secondary_reload];
08a9dd06 7152
3a875c6b 7153 if (GET_MODE (reloadreg) != mode)
941522d6 7154 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
3a875c6b 7155
7156 if (tertiary_icode != CODE_FOR_nothing)
7157 {
7158 rtx third_reloadreg
46f00337 7159 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
fa9351ee 7160 rtx tem;
b6fdd226 7161
7162 /* Copy primary reload reg to secondary reload reg.
7163 (Note that these have been swapped above, then
7164 secondary reload reg to OLD using our insn. */
7165
fa9351ee 7166 /* If REAL_OLD is a paradoxical SUBREG, remove it
7167 and try to put the opposite SUBREG on
7168 RELOADREG. */
7169 if (GET_CODE (real_old) == SUBREG
7170 && (GET_MODE_SIZE (GET_MODE (real_old))
7171 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7172 && 0 != (tem = gen_lowpart_common
7173 (GET_MODE (SUBREG_REG (real_old)),
7174 reloadreg)))
7175 real_old = SUBREG_REG (real_old), reloadreg = tem;
7176
b6fdd226 7177 gen_reload (reloadreg, second_reloadreg,
7178 reload_opnum[j], reload_when_needed[j]);
7179 emit_insn ((GEN_FCN (tertiary_icode)
7180 (real_old, reloadreg, third_reloadreg)));
7181 special = 1;
20803561 7182 }
b6fdd226 7183
3a875c6b 7184 else
b6fdd226 7185 /* Copy between the reload regs here and then to
7186 OUT later. */
3a875c6b 7187
b6fdd226 7188 gen_reload (reloadreg, second_reloadreg,
7189 reload_opnum[j], reload_when_needed[j]);
3a875c6b 7190 }
08a9dd06 7191 }
7192 }
7193#endif
7194
7195 /* Output the last reload insn. */
7196 if (! special)
323c781a 7197 {
7198 rtx set;
7199
7200 /* Don't output the last reload if OLD is not the dest of
7201 INSN and is in the src and is clobbered by INSN. */
7202 if (! flag_expensive_optimizations
7203 || GET_CODE (old) != REG
7204 || !(set = single_set (insn))
7205 || rtx_equal_p (old, SET_DEST (set))
7206 || !reg_mentioned_p (old, SET_SRC (set))
7207 || !regno_clobbered_p (REGNO (old), insn))
7208 gen_reload (old, reloadreg, reload_opnum[j],
7209 reload_when_needed[j]);
7210 }
08a9dd06 7211
08a9dd06 7212 /* Look at all insns we emitted, just to be safe. */
81d0fbb3 7213 for (p = get_insns (); p; p = NEXT_INSN (p))
08a9dd06 7214 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7215 {
a5e95c30 7216 rtx pat = PATTERN (p);
7217
08a9dd06 7218 /* If this output reload doesn't come from a spill reg,
7219 clear any memory of reloaded copies of the pseudo reg.
7220 If this output reload comes from a spill reg,
7221 reg_has_output_reload will make this do nothing. */
a5e95c30 7222 note_stores (pat, forget_old_reloads_1);
7223
7224 if (reg_mentioned_p (reload_reg_rtx[j], pat))
7225 {
93c7b06e 7226 rtx set = single_set (insn);
a5e95c30 7227 if (reload_spill_index[j] < 0
93c7b06e 7228 && set
7229 && SET_SRC (set) == reload_reg_rtx[j])
a5e95c30 7230 {
93c7b06e 7231 int src = REGNO (SET_SRC (set));
08a9dd06 7232
a5e95c30 7233 reload_spill_index[j] = src;
7234 SET_HARD_REG_BIT (reg_is_output_reload, src);
7235 if (find_regno_note (insn, REG_DEAD, src))
7236 SET_HARD_REG_BIT (reg_reloaded_died, src);
7237 }
93c7b06e 7238 if (REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER)
9d06145e 7239 {
7240 int s = reload_secondary_out_reload[j];
93c7b06e 7241 set = single_set (p);
9d06145e 7242 /* If this reload copies only to the secondary reload
7243 register, the secondary reload does the actual
7244 store. */
7245 if (s >= 0 && set == NULL_RTX)
7246 ; /* We can't tell what function the secondary reload
7247 has and where the actual store to the pseudo is
7248 made; leave new_spill_reg_store alone. */
7249 else if (s >= 0
7250 && SET_SRC (set) == reload_reg_rtx[j]
7251 && SET_DEST (set) == reload_reg_rtx[s])
7252 {
7253 /* Usually the next instruction will be the
7254 secondary reload insn; if we can confirm
7255 that it is, setting new_spill_reg_store to
7256 that insn will allow an extra optimization. */
7257 rtx s_reg = reload_reg_rtx[s];
7258 rtx next = NEXT_INSN (p);
7259 reload_out[s] = reload_out[j];
93c7b06e 7260 reload_out_reg[s] = reload_out_reg[j];
9d06145e 7261 set = single_set (next);
7262 if (set && SET_SRC (set) == s_reg
7263 && ! new_spill_reg_store[REGNO (s_reg)])
93c7b06e 7264 {
7265 SET_HARD_REG_BIT (reg_is_output_reload,
7266 REGNO (s_reg));
7267 new_spill_reg_store[REGNO (s_reg)] = next;
7268 }
9d06145e 7269 }
7270 else
93c7b06e 7271 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = p;
9d06145e 7272 }
a5e95c30 7273 }
08a9dd06 7274 }
7275
a43b19e2 7276 if (reload_when_needed[j] == RELOAD_OTHER)
1dd0083e 7277 {
7278 emit_insns (other_output_reload_insns[reload_opnum[j]]);
7279 other_output_reload_insns[reload_opnum[j]] = get_insns ();
7280 }
7281 else
7282 output_reload_insns[reload_opnum[j]] = get_insns ();
a43b19e2 7283
81d0fbb3 7284 end_sequence ();
08a9dd06 7285 }
08a9dd06 7286 }
7287
81d0fbb3 7288 /* Now write all the insns we made for reloads in the order expected by
7289 the allocation functions. Prior to the insn being reloaded, we write
7290 the following reloads:
7291
7292 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7293
8fa27ffb 7294 RELOAD_OTHER reloads.
81d0fbb3 7295
9e53d777 7296 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7297 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7298 RELOAD_FOR_INPUT reload for the operand.
81d0fbb3 7299
58866c2c 7300 RELOAD_FOR_OPADDR_ADDRS reloads.
7301
81d0fbb3 7302 RELOAD_FOR_OPERAND_ADDRESS reloads.
7303
7304 After the insn being reloaded, we write the following:
7305
9e53d777 7306 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7307 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7308 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7309 reloads for the operand. The RELOAD_OTHER output reloads are
7310 output in descending order by reload number. */
81d0fbb3 7311
2a588794 7312 emit_insns_before (other_input_address_reload_insns, insn);
7313 emit_insns_before (other_input_reload_insns, insn);
81d0fbb3 7314
7315 for (j = 0; j < reload_n_operands; j++)
7316 {
2a588794 7317 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7318 emit_insns_before (input_address_reload_insns[j], insn);
7319 emit_insns_before (input_reload_insns[j], insn);
81d0fbb3 7320 }
7321
2a588794 7322 emit_insns_before (other_operand_reload_insns, insn);
7323 emit_insns_before (operand_reload_insns, insn);
81d0fbb3 7324
7325 for (j = 0; j < reload_n_operands; j++)
7326 {
9e53d777 7327 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
81d0fbb3 7328 emit_insns_before (output_address_reload_insns[j], following_insn);
7329 emit_insns_before (output_reload_insns[j], following_insn);
1dd0083e 7330 emit_insns_before (other_output_reload_insns[j], following_insn);
2a588794 7331 }
7332
7333 /* Keep basic block info up to date. */
7334 if (n_basic_blocks)
7335 {
68676d00 7336 if (BLOCK_HEAD (chain->block) == insn)
7337 BLOCK_HEAD (chain->block) = NEXT_INSN (before_insn);
7338 if (BLOCK_END (chain->block) == insn)
7339 BLOCK_END (chain->block) = PREV_INSN (following_insn);
81d0fbb3 7340 }
7341
08a9dd06 7342 /* For all the spill regs newly reloaded in this instruction,
7343 record what they were reloaded from, so subsequent instructions
497de2d4 7344 can inherit the reloads.
7345
7346 Update spill_reg_store for the reloads of this insn.
d9999eba 7347 Copy the elements that were updated in the loop above. */
08a9dd06 7348
7349 for (j = 0; j < n_reloads; j++)
7350 {
7351 register int r = reload_order[j];
7352 register int i = reload_spill_index[r];
7353
324b84e2 7354 /* If this is a non-inherited input reload from a pseudo, we must
7355 clear any memory of a previous store to the same pseudo. Only do
7356 something if there will not be an output reload for the pseudo
7357 being reloaded. */
7358 if (reload_in_reg[r] != 0
7359 && ! (reload_inherited[r] || reload_override_in[r]))
7360 {
7361 rtx reg = reload_in_reg[r];
7362
7363 if (GET_CODE (reg) == SUBREG)
7364 reg = SUBREG_REG (reg);
7365
7366 if (GET_CODE (reg) == REG
7367 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7368 && ! reg_has_output_reload[REGNO (reg)])
7369 {
7370 int nregno = REGNO (reg);
7371
7372 if (reg_last_reload_reg[nregno])
7373 {
7374 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7375
7376 if (reg_reloaded_contents[last_regno] == nregno)
7377 spill_reg_store[last_regno] = 0;
7378 }
7379 }
7380 }
7381
a5e95c30 7382 /* I is nonneg if this reload used a register.
08a9dd06 7383 If reload_reg_rtx[r] is 0, this is an optional reload
1d23c719 7384 that we opted to ignore. */
497de2d4 7385
1d23c719 7386 if (i >= 0 && reload_reg_rtx[r] != 0)
08a9dd06 7387 {
08a9dd06 7388 int nr
a5e95c30 7389 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r]));
08a9dd06 7390 int k;
1d23c719 7391 int part_reaches_end = 0;
7392 int all_reaches_end = 1;
08a9dd06 7393
1d23c719 7394 /* For a multi register reload, we need to check if all or part
7395 of the value lives to the end. */
08a9dd06 7396 for (k = 0; k < nr; k++)
7397 {
a5e95c30 7398 if (reload_reg_reaches_end_p (i + k, reload_opnum[r],
1d23c719 7399 reload_when_needed[r]))
7400 part_reaches_end = 1;
7401 else
7402 all_reaches_end = 0;
08a9dd06 7403 }
7404
1d23c719 7405 /* Ignore reloads that don't reach the end of the insn in
7406 entirety. */
7407 if (all_reaches_end)
08a9dd06 7408 {
1d23c719 7409 /* First, clear out memory of what used to be in this spill reg.
7410 If consecutive registers are used, clear them all. */
a6f0d869 7411
08a9dd06 7412 for (k = 0; k < nr; k++)
a5e95c30 7413 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
a6f0d869 7414
1d23c719 7415 /* Maybe the spill reg contains a copy of reload_out. */
93c7b06e 7416 if (reload_out[r] != 0
7417 && (GET_CODE (reload_out[r]) == REG
7418#ifdef AUTO_INC_DEC
7419 || ! reload_out_reg[r]
7420#endif
7421 || GET_CODE (reload_out_reg[r]) == REG))
1d23c719 7422 {
93c7b06e 7423 rtx out = (GET_CODE (reload_out[r]) == REG
7424 ? reload_out[r]
7425 : reload_out_reg[r]
7426 ? reload_out_reg[r]
7427/* AUTO_INC */ : XEXP (reload_in_reg[r], 0));
7428 register int nregno = REGNO (out);
1d23c719 7429 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7430 : HARD_REGNO_NREGS (nregno,
7431 GET_MODE (reload_reg_rtx[r])));
7432
7433 spill_reg_store[i] = new_spill_reg_store[i];
93c7b06e 7434 spill_reg_stored_to[i] = out;
1d23c719 7435 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7436
7437 /* If NREGNO is a hard register, it may occupy more than
7438 one register. If it does, say what is in the
7439 rest of the registers assuming that both registers
7440 agree on how many words the object takes. If not,
7441 invalidate the subsequent registers. */
7442
7443 if (nregno < FIRST_PSEUDO_REGISTER)
7444 for (k = 1; k < nnr; k++)
7445 reg_last_reload_reg[nregno + k]
7446 = (nr == nnr
941522d6 7447 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7448 REGNO (reload_reg_rtx[r]) + k)
1d23c719 7449 : 0);
7450
7451 /* Now do the inverse operation. */
7452 for (k = 0; k < nr; k++)
7453 {
a5e95c30 7454 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7455 reg_reloaded_contents[i + k]
1d23c719 7456 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7457 ? nregno
7458 : nregno + k);
a5e95c30 7459 reg_reloaded_insn[i + k] = insn;
7460 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
1d23c719 7461 }
7462 }
a6f0d869 7463
1d23c719 7464 /* Maybe the spill reg contains a copy of reload_in. Only do
7465 something if there will not be an output reload for
7466 the register being reloaded. */
93c7b06e 7467 else if (reload_out_reg[r] == 0
1d23c719 7468 && reload_in[r] != 0
7469 && ((GET_CODE (reload_in[r]) == REG
93c7b06e 7470 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER
1d23c719 7471 && ! reg_has_output_reload[REGNO (reload_in[r])])
7472 || (GET_CODE (reload_in_reg[r]) == REG
93c7b06e 7473 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))
7474 && ! reg_set_p (reload_reg_rtx[r], PATTERN (insn)))
1d23c719 7475 {
7476 register int nregno;
7477 int nnr;
497de2d4 7478
93c7b06e 7479 if (GET_CODE (reload_in[r]) == REG
7480 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER)
1d23c719 7481 nregno = REGNO (reload_in[r]);
93c7b06e 7482 else if (GET_CODE (reload_in_reg[r]) == REG)
1d23c719 7483 nregno = REGNO (reload_in_reg[r]);
93c7b06e 7484 else
7485 nregno = REGNO (XEXP (reload_in_reg[r], 0));
a6f0d869 7486
1d23c719 7487 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7488 : HARD_REGNO_NREGS (nregno,
7489 GET_MODE (reload_reg_rtx[r])));
7490
7491 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7492
7493 if (nregno < FIRST_PSEUDO_REGISTER)
7494 for (k = 1; k < nnr; k++)
7495 reg_last_reload_reg[nregno + k]
7496 = (nr == nnr
941522d6 7497 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7498 REGNO (reload_reg_rtx[r]) + k)
1d23c719 7499 : 0);
7500
7501 /* Unless we inherited this reload, show we haven't
93c7b06e 7502 recently done a store.
7503 Previous stores of inherited auto_inc expressions
7504 also have to be discarded. */
7505 if (! reload_inherited[r]
7506 || (reload_out[r] && ! reload_out_reg[r]))
1d23c719 7507 spill_reg_store[i] = 0;
7508
7509 for (k = 0; k < nr; k++)
7510 {
a5e95c30 7511 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7512 reg_reloaded_contents[i + k]
1d23c719 7513 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7514 ? nregno
7515 : nregno + k);
a5e95c30 7516 reg_reloaded_insn[i + k] = insn;
7517 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
1d23c719 7518 }
7519 }
7520 }
497de2d4 7521
1d23c719 7522 /* However, if part of the reload reaches the end, then we must
7523 invalidate the old info for the part that survives to the end. */
7524 else if (part_reaches_end)
7525 {
81d0fbb3 7526 for (k = 0; k < nr; k++)
a5e95c30 7527 if (reload_reg_reaches_end_p (i + k,
1d23c719 7528 reload_opnum[r],
7529 reload_when_needed[r]))
a5e95c30 7530 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
08a9dd06 7531 }
7532 }
7533
7534 /* The following if-statement was #if 0'd in 1.34 (or before...).
7535 It's reenabled in 1.35 because supposedly nothing else
7536 deals with this problem. */
7537
7538 /* If a register gets output-reloaded from a non-spill register,
7539 that invalidates any previous reloaded copy of it.
7540 But forget_old_reloads_1 won't get to see it, because
7541 it thinks only about the original insn. So invalidate it here. */
93c7b06e 7542 if (i < 0 && reload_out[r] != 0
7543 && (GET_CODE (reload_out[r]) == REG
7544 || (GET_CODE (reload_out[r]) == MEM
7545 && GET_CODE (reload_out_reg[r]) == REG)))
08a9dd06 7546 {
93c7b06e 7547 rtx out = (GET_CODE (reload_out[r]) == REG
7548 ? reload_out[r] : reload_out_reg[r]);
7549 register int nregno = REGNO (out);
9a2d5fa0 7550 if (nregno >= FIRST_PSEUDO_REGISTER)
93c7b06e 7551 {
7552 rtx src_reg, store_insn;
7553
7554 reg_last_reload_reg[nregno] = 0;
7555
7556 /* If we can find a hard register that is stored, record
7557 the storing insn so that we may delete this insn with
7558 delete_output_reload. */
7559 src_reg = reload_reg_rtx[r];
7560
7561 /* If this is an optional reload, try to find the source reg
7562 from an input reload. */
7563 if (! src_reg)
7564 {
7565 rtx set = single_set (insn);
72e8da69 7566 if (set && SET_DEST (set) == reload_out[r])
93c7b06e 7567 {
7568 int k;
7569
7570 src_reg = SET_SRC (set);
7571 store_insn = insn;
7572 for (k = 0; k < n_reloads; k++)
7573 {
7574 if (reload_in[k] == src_reg)
7575 {
7576 src_reg = reload_reg_rtx[k];
7577 break;
7578 }
7579 }
7580 }
7581 }
7582 else
7583 store_insn = new_spill_reg_store[REGNO (src_reg)];
7584 if (src_reg && GET_CODE (src_reg) == REG
7585 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7586 {
7587 int src_regno = REGNO (src_reg);
7588 int nr = HARD_REGNO_NREGS (src_regno, reload_mode[r]);
7589 /* The place where to find a death note varies with
7590 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7591 necessarily checked exactly in the code that moves
7592 notes, so just check both locations. */
7593 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7594 if (! note)
7595 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7596 while (nr-- > 0)
7597 {
7598 spill_reg_store[src_regno + nr] = store_insn;
7599 spill_reg_stored_to[src_regno + nr] = out;
7600 reg_reloaded_contents[src_regno + nr] = nregno;
7601 reg_reloaded_insn[src_regno + nr] = store_insn;
207fccde 7602 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
93c7b06e 7603 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7604 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7605 if (note)
7606 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7607 else
7608 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7609 }
7610 reg_last_reload_reg[nregno] = src_reg;
7611 }
7612 }
9a2d5fa0 7613 else
7614 {
7615 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
f16fc94d 7616
9a2d5fa0 7617 while (num_regs-- > 0)
7618 reg_last_reload_reg[nregno + num_regs] = 0;
7619 }
08a9dd06 7620 }
7621 }
a5e95c30 7622 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
08a9dd06 7623}
7624\f
b6fdd226 7625/* Emit code to perform a reload from IN (which may be a reload register) to
7626 OUT (which may also be a reload register). IN or OUT is from operand
7627 OPNUM with reload type TYPE.
81d0fbb3 7628
bb552490 7629 Returns first insn emitted. */
08a9dd06 7630
7631rtx
b6fdd226 7632gen_reload (out, in, opnum, type)
7633 rtx out;
08a9dd06 7634 rtx in;
81d0fbb3 7635 int opnum;
7636 enum reload_type type;
08a9dd06 7637{
81d0fbb3 7638 rtx last = get_last_insn ();
9fc7ac1a 7639 rtx tem;
7640
7641 /* If IN is a paradoxical SUBREG, remove it and try to put the
7642 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7643 if (GET_CODE (in) == SUBREG
7644 && (GET_MODE_SIZE (GET_MODE (in))
7645 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7646 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7647 in = SUBREG_REG (in), out = tem;
7648 else if (GET_CODE (out) == SUBREG
7649 && (GET_MODE_SIZE (GET_MODE (out))
7650 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7651 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7652 out = SUBREG_REG (out), in = tem;
08a9dd06 7653
c8ad158d 7654 /* How to do this reload can get quite tricky. Normally, we are being
08a9dd06 7655 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7656 register that didn't get a hard register. In that case we can just
7657 call emit_move_insn.
7658
82f1ce58 7659 We can also be asked to reload a PLUS that adds a register or a MEM to
7660 another register, constant or MEM. This can occur during frame pointer
7661 elimination and while reloading addresses. This case is handled by
7662 trying to emit a single insn to perform the add. If it is not valid,
7663 we use a two insn sequence.
08a9dd06 7664
7665 Finally, we could be called to handle an 'o' constraint by putting
7666 an address into a register. In that case, we first try to do this
7667 with a named pattern of "reload_load_address". If no such pattern
7668 exists, we just emit a SET insn and hope for the best (it will normally
7669 be valid on machines that use 'o').
7670
7671 This entire process is made complex because reload will never
7672 process the insns we generate here and so we must ensure that
7673 they will fit their constraints and also by the fact that parts of
7674 IN might be being reloaded separately and replaced with spill registers.
7675 Because of this, we are, in some sense, just guessing the right approach
7676 here. The one listed above seems to work.
7677
7678 ??? At some point, this whole thing needs to be rethought. */
7679
7680 if (GET_CODE (in) == PLUS
82f1ce58 7681 && (GET_CODE (XEXP (in, 0)) == REG
e54018d3 7682 || GET_CODE (XEXP (in, 0)) == SUBREG
82f1ce58 7683 || GET_CODE (XEXP (in, 0)) == MEM)
7684 && (GET_CODE (XEXP (in, 1)) == REG
e54018d3 7685 || GET_CODE (XEXP (in, 1)) == SUBREG
82f1ce58 7686 || CONSTANT_P (XEXP (in, 1))
7687 || GET_CODE (XEXP (in, 1)) == MEM))
08a9dd06 7688 {
82f1ce58 7689 /* We need to compute the sum of a register or a MEM and another
7690 register, constant, or MEM, and put it into the reload
2147669e 7691 register. The best possible way of doing this is if the machine
7692 has a three-operand ADD insn that accepts the required operands.
08a9dd06 7693
7694 The simplest approach is to try to generate such an insn and see if it
7695 is recognized and matches its constraints. If so, it can be used.
7696
7697 It might be better not to actually emit the insn unless it is valid,
45b79593 7698 but we need to pass the insn as an operand to `recog' and
7f82be90 7699 `extract_insn' and it is simpler to emit and then delete the insn if
45b79593 7700 not valid than to dummy things up. */
c8ad158d 7701
8a7a0a5a 7702 rtx op0, op1, tem, insn;
08a9dd06 7703 int code;
c8ad158d 7704
8a7a0a5a 7705 op0 = find_replacement (&XEXP (in, 0));
7706 op1 = find_replacement (&XEXP (in, 1));
7707
08a9dd06 7708 /* Since constraint checking is strict, commutativity won't be
7709 checked, so we need to do that here to avoid spurious failure
7710 if the add instruction is two-address and the second operand
7711 of the add is the same as the reload reg, which is frequently
7712 the case. If the insn would be A = B + A, rearrange it so
a92771b8 7713 it will be A = A + B as constrain_operands expects. */
c8ad158d 7714
08a9dd06 7715 if (GET_CODE (XEXP (in, 1)) == REG
b6fdd226 7716 && REGNO (out) == REGNO (XEXP (in, 1)))
8a7a0a5a 7717 tem = op0, op0 = op1, op1 = tem;
7718
7719 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
941522d6 7720 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
08a9dd06 7721
941522d6 7722 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
08a9dd06 7723 code = recog_memoized (insn);
7724
7725 if (code >= 0)
7726 {
7f82be90 7727 extract_insn (insn);
08a9dd06 7728 /* We want constrain operands to treat this insn strictly in
7729 its validity determination, i.e., the way it would after reload
7730 has completed. */
7f82be90 7731 if (constrain_operands (1))
08a9dd06 7732 return insn;
7733 }
7734
81d0fbb3 7735 delete_insns_since (last);
08a9dd06 7736
7737 /* If that failed, we must use a conservative two-insn sequence.
7738 use move to copy constant, MEM, or pseudo register to the reload
8a7a0a5a 7739 register since "move" will be able to handle an arbitrary operand,
7740 unlike add which can't, in general. Then add the registers.
08a9dd06 7741
7742 If there is another way to do this for a specific machine, a
7743 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7744 we emit below. */
7745
e54018d3 7746 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
8a7a0a5a 7747 || (GET_CODE (op1) == REG
7748 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7749 tem = op0, op0 = op1, op1 = tem;
08a9dd06 7750
e54018d3 7751 gen_reload (out, op0, opnum, type);
9fd8c471 7752
b6fdd226 7753 /* If OP0 and OP1 are the same, we can use OUT for OP1.
9fd8c471 7754 This fixes a problem on the 32K where the stack pointer cannot
7755 be used as an operand of an add insn. */
7756
7757 if (rtx_equal_p (op0, op1))
b6fdd226 7758 op1 = out;
9fd8c471 7759
b6fdd226 7760 insn = emit_insn (gen_add2_insn (out, op1));
b2b06b7e 7761
7762 /* If that failed, copy the address register to the reload register.
a92771b8 7763 Then add the constant to the reload register. */
b2b06b7e 7764
7765 code = recog_memoized (insn);
7766
7767 if (code >= 0)
7768 {
7f82be90 7769 extract_insn (insn);
b2b06b7e 7770 /* We want constrain operands to treat this insn strictly in
7771 its validity determination, i.e., the way it would after reload
7772 has completed. */
7f82be90 7773 if (constrain_operands (1))
7f060acf 7774 {
7775 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7776 REG_NOTES (insn)
e02c6d1f 7777 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7f060acf 7778 return insn;
7779 }
b2b06b7e 7780 }
7781
7782 delete_insns_since (last);
7783
e54018d3 7784 gen_reload (out, op1, opnum, type);
7f060acf 7785 insn = emit_insn (gen_add2_insn (out, op0));
e02c6d1f 7786 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
08a9dd06 7787 }
7788
c538053c 7789#ifdef SECONDARY_MEMORY_NEEDED
7790 /* If we need a memory location to do the move, do it that way. */
7791 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
b6fdd226 7792 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
c538053c 7793 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
b6fdd226 7794 REGNO_REG_CLASS (REGNO (out)),
7795 GET_MODE (out)))
c538053c 7796 {
7797 /* Get the memory to use and rewrite both registers to its mode. */
b6fdd226 7798 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
c538053c 7799
b6fdd226 7800 if (GET_MODE (loc) != GET_MODE (out))
941522d6 7801 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
c538053c 7802
7803 if (GET_MODE (loc) != GET_MODE (in))
941522d6 7804 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
c538053c 7805
e54018d3 7806 gen_reload (loc, in, opnum, type);
7807 gen_reload (out, loc, opnum, type);
c538053c 7808 }
7809#endif
7810
08a9dd06 7811 /* If IN is a simple operand, use gen_move_insn. */
7812 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
b6fdd226 7813 emit_insn (gen_move_insn (out, in));
08a9dd06 7814
7815#ifdef HAVE_reload_load_address
7816 else if (HAVE_reload_load_address)
b6fdd226 7817 emit_insn (gen_reload_load_address (out, in));
08a9dd06 7818#endif
7819
b6fdd226 7820 /* Otherwise, just write (set OUT IN) and hope for the best. */
08a9dd06 7821 else
941522d6 7822 emit_insn (gen_rtx_SET (VOIDmode, out, in));
08a9dd06 7823
7824 /* Return the first insn emitted.
81d0fbb3 7825 We can not just return get_last_insn, because there may have
08a9dd06 7826 been multiple instructions emitted. Also note that gen_move_insn may
7827 emit more than one insn itself, so we can not assume that there is one
7828 insn emitted per emit_insn_before call. */
7829
81d0fbb3 7830 return last ? NEXT_INSN (last) : get_insns ();
08a9dd06 7831}
7832\f
7833/* Delete a previously made output-reload
7834 whose result we now believe is not needed.
7835 First we double-check.
7836
7837 INSN is the insn now being processed.
93c7b06e 7838 LAST_RELOAD_REG is the hard register number for which we want to delete
7839 the last output reload.
7840 J is the reload-number that originally used REG. The caller has made
7841 certain that reload J doesn't use REG any longer for input. */
08a9dd06 7842
7843static void
93c7b06e 7844delete_output_reload (insn, j, last_reload_reg)
08a9dd06 7845 rtx insn;
7846 int j;
93c7b06e 7847 int last_reload_reg;
08a9dd06 7848{
93c7b06e 7849 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7850 rtx reg = spill_reg_stored_to[last_reload_reg];
7851 int k;
7852 int n_occurrences;
7853 int n_inherited = 0;
08a9dd06 7854 register rtx i1;
93c7b06e 7855 rtx substed;
7856
08a9dd06 7857 /* Get the raw pseudo-register referred to. */
7858
08a9dd06 7859 while (GET_CODE (reg) == SUBREG)
7860 reg = SUBREG_REG (reg);
93c7b06e 7861 substed = reg_equiv_memory_loc[REGNO (reg)];
7862
7863 /* This is unsafe if the operand occurs more often in the current
7864 insn than it is inherited. */
7865 for (k = n_reloads - 1; k >= 0; k--)
7866 {
7867 rtx reg2 = reload_in[k];
7868 if (! reg2)
7869 continue;
7870 if (GET_CODE (reg2) == MEM || reload_override_in[k])
7871 reg2 = reload_in_reg[k];
7872#ifdef AUTO_INC_DEC
7873 if (reload_out[k] && ! reload_out_reg[k])
7874 reg2 = XEXP (reload_in_reg[k], 0);
7875#endif
7876 while (GET_CODE (reg2) == SUBREG)
7877 reg2 = SUBREG_REG (reg2);
7878 if (rtx_equal_p (reg2, reg))
a1e5a0e1 7879 {
7880 if (reload_inherited[k] || reload_override_in[k] || k == j)
7881 {
93c7b06e 7882 n_inherited++;
a1e5a0e1 7883 reg2 = reload_out_reg[k];
7884 if (! reg2)
7885 continue;
7886 while (GET_CODE (reg2) == SUBREG)
7887 reg2 = XEXP (reg2, 0);
7888 if (rtx_equal_p (reg2, reg))
7889 n_inherited++;
7890 }
7891 else
7892 return;
7893 }
93c7b06e 7894 }
7895 n_occurrences = count_occurrences (PATTERN (insn), reg);
7896 if (substed)
7897 n_occurrences += count_occurrences (PATTERN (insn), substed);
7898 if (n_occurrences > n_inherited)
7899 return;
08a9dd06 7900
7901 /* If the pseudo-reg we are reloading is no longer referenced
7902 anywhere between the store into it and here,
7903 and no jumps or labels intervene, then the value can get
7904 here through the reload reg alone.
7905 Otherwise, give up--return. */
7906 for (i1 = NEXT_INSN (output_reload_insn);
7907 i1 != insn; i1 = NEXT_INSN (i1))
7908 {
7909 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7910 return;
7911 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7912 && reg_mentioned_p (reg, PATTERN (i1)))
fb4ed7f4 7913 {
93c7b06e 7914 /* If this is USE in front of INSN, we only have to check that
7915 there are no more references than accounted for by inheritance. */
7916 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
fb4ed7f4 7917 {
93c7b06e 7918 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
fb4ed7f4 7919 i1 = NEXT_INSN (i1);
7920 }
93c7b06e 7921 if (n_occurrences <= n_inherited && i1 == insn)
fb4ed7f4 7922 break;
7923 return;
7924 }
08a9dd06 7925 }
7926
fb4ed7f4 7927 /* The caller has already checked that REG dies or is set in INSN.
7928 It has also checked that we are optimizing, and thus some inaccurancies
7929 in the debugging information are acceptable.
7930 So we could just delete output_reload_insn.
7931 But in some cases we can improve the debugging information without
7932 sacrificing optimization - maybe even improving the code:
7933 See if the pseudo reg has been completely replaced
08a9dd06 7934 with reload regs. If so, delete the store insn
7935 and forget we had a stack slot for the pseudo. */
fb4ed7f4 7936 if (reload_out[j] != reload_in[j]
7937 && REG_N_DEATHS (REGNO (reg)) == 1
7304b676 7938 && REG_N_SETS (REGNO (reg)) == 1
fb4ed7f4 7939 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7940 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
08a9dd06 7941 {
7942 rtx i2;
7943
7944 /* We know that it was used only between here
7945 and the beginning of the current basic block.
7946 (We also know that the last use before INSN was
7947 the output reload we are thinking of deleting, but never mind that.)
7948 Search that range; see if any ref remains. */
7949 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7950 {
497de2d4 7951 rtx set = single_set (i2);
7952
08a9dd06 7953 /* Uses which just store in the pseudo don't count,
7954 since if they are the only uses, they are dead. */
497de2d4 7955 if (set != 0 && SET_DEST (set) == reg)
08a9dd06 7956 continue;
7957 if (GET_CODE (i2) == CODE_LABEL
7958 || GET_CODE (i2) == JUMP_INSN)
7959 break;
7960 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7961 && reg_mentioned_p (reg, PATTERN (i2)))
fb4ed7f4 7962 {
7963 /* Some other ref remains; just delete the output reload we
7964 know to be dead. */
93c7b06e 7965 delete_address_reloads (output_reload_insn, insn);
7966 PUT_CODE (output_reload_insn, NOTE);
7967 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7968 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
fb4ed7f4 7969 return;
7970 }
08a9dd06 7971 }
7972
7973 /* Delete the now-dead stores into this pseudo. */
7974 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7975 {
497de2d4 7976 rtx set = single_set (i2);
7977
7978 if (set != 0 && SET_DEST (set) == reg)
2b3f56be 7979 {
93c7b06e 7980 delete_address_reloads (i2, insn);
2b3f56be 7981 /* This might be a basic block head,
7982 thus don't use delete_insn. */
7983 PUT_CODE (i2, NOTE);
7984 NOTE_SOURCE_FILE (i2) = 0;
7985 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7986 }
08a9dd06 7987 if (GET_CODE (i2) == CODE_LABEL
7988 || GET_CODE (i2) == JUMP_INSN)
7989 break;
7990 }
7991
7992 /* For the debugging info,
7993 say the pseudo lives in this reload reg. */
7994 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7995 alter_reg (REGNO (reg), -1);
7996 }
93c7b06e 7997 delete_address_reloads (output_reload_insn, insn);
7998 PUT_CODE (output_reload_insn, NOTE);
7999 NOTE_SOURCE_FILE (output_reload_insn) = 0;
8000 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
8001
8002}
8003
8004/* We are going to delete DEAD_INSN. Recursively delete loads of
8005 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8006 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8007static void
8008delete_address_reloads (dead_insn, current_insn)
8009 rtx dead_insn, current_insn;
8010{
8011 rtx set = single_set (dead_insn);
8012 rtx set2, dst, prev, next;
8013 if (set)
8014 {
8015 rtx dst = SET_DEST (set);
8016 if (GET_CODE (dst) == MEM)
8017 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8018 }
8019 /* If we deleted the store from a reloaded post_{in,de}c expression,
8020 we can delete the matching adds. */
8021 prev = PREV_INSN (dead_insn);
8022 next = NEXT_INSN (dead_insn);
8023 if (! prev || ! next)
8024 return;
8025 set = single_set (next);
8026 set2 = single_set (prev);
8027 if (! set || ! set2
8028 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8029 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8030 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8031 return;
8032 dst = SET_DEST (set);
8033 if (! rtx_equal_p (dst, SET_DEST (set2))
8034 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8035 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8036 || (INTVAL (XEXP (SET_SRC (set), 1))
8037 != - INTVAL (XEXP (SET_SRC (set2), 1))))
8038 return;
8039 delete_insn (prev);
8040 delete_insn (next);
8041}
8042
8043/* Subfunction of delete_address_reloads: process registers found in X. */
8044static void
8045delete_address_reloads_1 (dead_insn, x, current_insn)
8046 rtx dead_insn, x, current_insn;
8047{
8048 rtx prev, set, dst, i2;
8049 int i, j;
8050 enum rtx_code code = GET_CODE (x);
8051
8052 if (code != REG)
8053 {
8054 char *fmt= GET_RTX_FORMAT (code);
8055 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8056 {
8057 if (fmt[i] == 'e')
8058 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8059 else if (fmt[i] == 'E')
8060 {
8061 for (j = XVECLEN (x, i) - 1; j >=0; j--)
8062 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8063 current_insn);
8064 }
8065 }
8066 return;
8067 }
8068
8069 if (spill_reg_order[REGNO (x)] < 0)
8070 return;
fb4ed7f4 8071
93c7b06e 8072 /* Scan backwards for the insn that sets x. This might be a way back due
8073 to inheritance. */
8074 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8075 {
8076 code = GET_CODE (prev);
8077 if (code == CODE_LABEL || code == JUMP_INSN)
8078 return;
8079 if (GET_RTX_CLASS (code) != 'i')
8080 continue;
8081 if (reg_set_p (x, PATTERN (prev)))
8082 break;
8083 if (reg_referenced_p (x, PATTERN (prev)))
8084 return;
8085 }
8086 if (! prev || INSN_UID (prev) < reload_first_uid)
8087 return;
8088 /* Check that PREV only sets the reload register. */
8089 set = single_set (prev);
8090 if (! set)
8091 return;
8092 dst = SET_DEST (set);
8093 if (GET_CODE (dst) != REG
8094 || ! rtx_equal_p (dst, x))
8095 return;
8096 if (! reg_set_p (dst, PATTERN (dead_insn)))
8097 {
8098 /* Check if DST was used in a later insn -
8099 it might have been inherited. */
8100 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8101 {
8102 if (GET_CODE (i2) == CODE_LABEL)
8103 break;
8104 if (GET_RTX_CLASS (GET_CODE (i2)) != 'i')
8105 continue;
8106 if (reg_referenced_p (dst, PATTERN (i2)))
8107 {
8108 /* If there is a reference to the register in the current insn,
8109 it might be loaded in a non-inherited reload. If no other
8110 reload uses it, that means the register is set before
8111 referenced. */
8112 if (i2 == current_insn)
8113 {
8114 for (j = n_reloads - 1; j >= 0; j--)
8115 if ((reload_reg_rtx[j] == dst && reload_inherited[j])
8116 || reload_override_in[j] == dst)
8117 return;
8118 for (j = n_reloads - 1; j >= 0; j--)
8119 if (reload_in[j] && reload_reg_rtx[j] == dst)
8120 break;
8121 if (j >= 0)
8122 break;
8123 }
8124 return;
8125 }
8126 if (GET_CODE (i2) == JUMP_INSN)
8127 break;
8128 if (reg_set_p (dst, PATTERN (i2)))
8129 break;
8130 /* If DST is still live at CURRENT_INSN, check if it is used for
8131 any reload. */
8132 if (i2 == current_insn)
8133 {
8134 for (j = n_reloads - 1; j >= 0; j--)
8135 if ((reload_reg_rtx[j] == dst && reload_inherited[j])
8136 || reload_override_in[j] == dst)
8137 return;
8138 /* ??? We can't finish the loop here, because dst might be
8139 allocated to a pseudo in this block if no reload in this
8140 block needs any of the clsses containing DST - see
8141 spill_hard_reg. There is no easy way to tell this, so we
8142 have to scan till the end of the basic block. */
8143 }
8144 }
8145 }
8146 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8147 reg_reloaded_contents[REGNO (dst)] = -1;
8148 /* Can't use delete_insn here because PREV might be a basic block head. */
8149 PUT_CODE (prev, NOTE);
8150 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED;
8151 NOTE_SOURCE_FILE (prev) = 0;
08a9dd06 8152}
08a9dd06 8153\f
c8ad158d 8154/* Output reload-insns to reload VALUE into RELOADREG.
df5e872e 8155 VALUE is an autoincrement or autodecrement RTX whose operand
08a9dd06 8156 is a register or memory location;
8157 so reloading involves incrementing that location.
93c7b06e 8158 IN is either identical to VALUE, or some cheaper place to reload from.
08a9dd06 8159
8160 INC_AMOUNT is the number to increment or decrement by (always positive).
93c7b06e 8161 This cannot be deduced from VALUE.
08a9dd06 8162
93c7b06e 8163 Return the instruction that stores into RELOADREG. */
8164
8165static rtx
8166inc_for_reload (reloadreg, in, value, inc_amount)
08a9dd06 8167 rtx reloadreg;
93c7b06e 8168 rtx in, value;
08a9dd06 8169 int inc_amount;
08a9dd06 8170{
8171 /* REG or MEM to be copied and incremented. */
8172 rtx incloc = XEXP (value, 0);
8173 /* Nonzero if increment after copying. */
8174 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
81d0fbb3 8175 rtx last;
45b79593 8176 rtx inc;
8177 rtx add_insn;
8178 int code;
93c7b06e 8179 rtx store;
8180 rtx real_in = in == value ? XEXP (in, 0) : in;
08a9dd06 8181
8182 /* No hard register is equivalent to this register after
8183 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
8184 we could inc/dec that register as well (maybe even using it for
8185 the source), but I'm not sure it's worth worrying about. */
8186 if (GET_CODE (incloc) == REG)
8187 reg_last_reload_reg[REGNO (incloc)] = 0;
8188
8189 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8190 inc_amount = - inc_amount;
8191
e5fdd564 8192 inc = GEN_INT (inc_amount);
45b79593 8193
8194 /* If this is post-increment, first copy the location to the reload reg. */
93c7b06e 8195 if (post && real_in != reloadreg)
8196 emit_insn (gen_move_insn (reloadreg, real_in));
45b79593 8197
93c7b06e 8198 if (in == value)
8199 {
8200 /* See if we can directly increment INCLOC. Use a method similar to
8201 that in gen_reload. */
45b79593 8202
93c7b06e 8203 last = get_last_insn ();
8204 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8205 gen_rtx_PLUS (GET_MODE (incloc),
8206 incloc, inc)));
45b79593 8207
93c7b06e 8208 code = recog_memoized (add_insn);
8209 if (code >= 0)
08a9dd06 8210 {
7f82be90 8211 extract_insn (add_insn);
8212 if (constrain_operands (1))
93c7b06e 8213 {
8214 /* If this is a pre-increment and we have incremented the value
8215 where it lives, copy the incremented value to RELOADREG to
8216 be used as an address. */
45b79593 8217
93c7b06e 8218 if (! post)
8219 emit_insn (gen_move_insn (reloadreg, incloc));
81d0fbb3 8220
93c7b06e 8221 return add_insn;
8222 }
08a9dd06 8223 }
93c7b06e 8224 delete_insns_since (last);
08a9dd06 8225 }
45b79593 8226
45b79593 8227 /* If couldn't do the increment directly, must increment in RELOADREG.
8228 The way we do this depends on whether this is pre- or post-increment.
8229 For pre-increment, copy INCLOC to the reload register, increment it
8230 there, then save back. */
8231
8232 if (! post)
8233 {
93c7b06e 8234 if (in != reloadreg)
8235 emit_insn (gen_move_insn (reloadreg, real_in));
81d0fbb3 8236 emit_insn (gen_add2_insn (reloadreg, inc));
93c7b06e 8237 store = emit_insn (gen_move_insn (incloc, reloadreg));
45b79593 8238 }
08a9dd06 8239 else
8240 {
45b79593 8241 /* Postincrement.
8242 Because this might be a jump insn or a compare, and because RELOADREG
8243 may not be available after the insn in an input reload, we must do
8244 the incrementation before the insn being reloaded for.
8245
93c7b06e 8246 We have already copied IN to RELOADREG. Increment the copy in
45b79593 8247 RELOADREG, save that back, then decrement RELOADREG so it has
8248 the original value. */
8249
81d0fbb3 8250 emit_insn (gen_add2_insn (reloadreg, inc));
93c7b06e 8251 store = emit_insn (gen_move_insn (incloc, reloadreg));
81d0fbb3 8252 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
08a9dd06 8253 }
45b79593 8254
93c7b06e 8255 return store;
08a9dd06 8256}
8257\f
8258/* Return 1 if we are certain that the constraint-string STRING allows
8259 the hard register REG. Return 0 if we can't be sure of this. */
8260
8261static int
8262constraint_accepts_reg_p (string, reg)
a8482e91 8263 const char *string;
08a9dd06 8264 rtx reg;
8265{
8266 int value = 0;
8267 int regno = true_regnum (reg);
8268 int c;
8269
8270 /* Initialize for first alternative. */
8271 value = 0;
8272 /* Check that each alternative contains `g' or `r'. */
8273 while (1)
8274 switch (c = *string++)
8275 {
8276 case 0:
8277 /* If an alternative lacks `g' or `r', we lose. */
8278 return value;
8279 case ',':
8280 /* If an alternative lacks `g' or `r', we lose. */
8281 if (value == 0)
8282 return 0;
8283 /* Initialize for next alternative. */
8284 value = 0;
8285 break;
8286 case 'g':
8287 case 'r':
8288 /* Any general reg wins for this alternative. */
8289 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8290 value = 1;
8291 break;
8292 default:
8293 /* Any reg in specified class wins for this alternative. */
8294 {
45b79593 8295 enum reg_class class = REG_CLASS_FROM_LETTER (c);
08a9dd06 8296
45b79593 8297 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
08a9dd06 8298 value = 1;
8299 }
8300 }
8301}
8302\f
497de2d4 8303/* Return the number of places FIND appears within X, but don't count
8304 an occurrence if some SET_DEST is FIND. */
08a9dd06 8305
57fc958f 8306int
08a9dd06 8307count_occurrences (x, find)
8308 register rtx x, find;
8309{
8310 register int i, j;
8311 register enum rtx_code code;
8312 register char *format_ptr;
8313 int count;
8314
8315 if (x == find)
8316 return 1;
8317 if (x == 0)
8318 return 0;
8319
8320 code = GET_CODE (x);
8321
8322 switch (code)
8323 {
8324 case REG:
8325 case QUEUED:
8326 case CONST_INT:
8327 case CONST_DOUBLE:
8328 case SYMBOL_REF:
8329 case CODE_LABEL:
8330 case PC:
8331 case CC0:
8332 return 0;
497de2d4 8333
93c7b06e 8334 case MEM:
8335 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
8336 return 1;
8337 break;
497de2d4 8338 case SET:
8339 if (SET_DEST (x) == find)
8340 return count_occurrences (SET_SRC (x), find);
8341 break;
0dbd1c74 8342
8343 default:
8344 break;
08a9dd06 8345 }
8346
8347 format_ptr = GET_RTX_FORMAT (code);
8348 count = 0;
8349
8350 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8351 {
8352 switch (*format_ptr++)
8353 {
8354 case 'e':
8355 count += count_occurrences (XEXP (x, i), find);
8356 break;
8357
8358 case 'E':
8359 if (XVEC (x, i) != NULL)
8360 {
8361 for (j = 0; j < XVECLEN (x, i); j++)
8362 count += count_occurrences (XVECEXP (x, i, j), find);
8363 }
8364 break;
8365 }
8366 }
8367 return count;
8368}
5e98e63d 8369\f
8370/* This array holds values which are equivalent to a hard register
8371 during reload_cse_regs. Each array element is an EXPR_LIST of
8372 values. Each time a hard register is set, we set the corresponding
8373 array element to the value. Each time a hard register is copied
8374 into memory, we add the memory location to the corresponding array
8375 element. We don't store values or memory addresses with side
8376 effects in this array.
8377
8378 If the value is a CONST_INT, then the mode of the containing
8379 EXPR_LIST is the mode in which that CONST_INT was referenced.
8380
8381 We sometimes clobber a specific entry in a list. In that case, we
8382 just set XEXP (list-entry, 0) to 0. */
8383
8384static rtx *reg_values;
8385
ea85204a 8386/* This is a preallocated REG rtx which we use as a temporary in
8387 reload_cse_invalidate_regno, so that we don't need to allocate a
8388 new one each time through a loop in that function. */
8389
8390static rtx invalidate_regno_rtx;
8391
5e98e63d 8392/* Invalidate any entries in reg_values which depend on REGNO,
8393 including those for REGNO itself. This is called if REGNO is
8394 changing. If CLOBBER is true, then always forget anything we
8395 currently know about REGNO. MODE is the mode of the assignment to
8396 REGNO, which is used to determine how many hard registers are being
8397 changed. If MODE is VOIDmode, then only REGNO is being changed;
8398 this is used when invalidating call clobbered registers across a
8399 call. */
8400
8401static void
8402reload_cse_invalidate_regno (regno, mode, clobber)
8403 int regno;
8404 enum machine_mode mode;
8405 int clobber;
8406{
8407 int endregno;
8408 register int i;
8409
8410 /* Our callers don't always go through true_regnum; we may see a
8411 pseudo-register here from a CLOBBER or the like. We probably
8412 won't ever see a pseudo-register that has a real register number,
8413 for we check anyhow for safety. */
8414 if (regno >= FIRST_PSEUDO_REGISTER)
8415 regno = reg_renumber[regno];
8416 if (regno < 0)
8417 return;
8418
8419 if (mode == VOIDmode)
8420 endregno = regno + 1;
8421 else
8422 endregno = regno + HARD_REGNO_NREGS (regno, mode);
8423
8424 if (clobber)
8425 for (i = regno; i < endregno; i++)
8426 reg_values[i] = 0;
8427
8428 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8429 {
8430 rtx x;
8431
8432 for (x = reg_values[i]; x; x = XEXP (x, 1))
8433 {
8434 if (XEXP (x, 0) != 0
9722a2f1 8435 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
5e98e63d 8436 {
8437 /* If this is the only entry on the list, clear
8438 reg_values[i]. Otherwise, just clear this entry on
8439 the list. */
8440 if (XEXP (x, 1) == 0 && x == reg_values[i])
8441 {
8442 reg_values[i] = 0;
8443 break;
8444 }
8445 XEXP (x, 0) = 0;
8446 }
8447 }
8448 }
ea85204a 8449
8450 /* We must look at earlier registers, in case REGNO is part of a
8451 multi word value but is not the first register. If an earlier
8452 register has a value in a mode which overlaps REGNO, then we must
8453 invalidate that earlier register. Note that we do not need to
8454 check REGNO or later registers (we must not check REGNO itself,
8455 because we would incorrectly conclude that there was a conflict). */
8456
8457 for (i = 0; i < regno; i++)
8458 {
8459 rtx x;
8460
8461 for (x = reg_values[i]; x; x = XEXP (x, 1))
8462 {
8463 if (XEXP (x, 0) != 0)
8464 {
ad4e9cb1 8465 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
ea85204a 8466 REGNO (invalidate_regno_rtx) = i;
8467 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
8468 NULL_PTR))
8469 {
8470 reload_cse_invalidate_regno (i, VOIDmode, 1);
8471 break;
8472 }
8473 }
8474 }
8475 }
5e98e63d 8476}
8477
04968e2c 8478/* The memory at address MEM_BASE is being changed.
8479 Return whether this change will invalidate VAL. */
5e98e63d 8480
8481static int
63c52f6e 8482reload_cse_mem_conflict_p (mem_base, val)
5e98e63d 8483 rtx mem_base;
5e98e63d 8484 rtx val;
8485{
8486 enum rtx_code code;
8487 char *fmt;
8488 int i;
8489
8490 code = GET_CODE (val);
8491 switch (code)
8492 {
8493 /* Get rid of a few simple cases quickly. */
8494 case REG:
5e98e63d 8495 case PC:
8496 case CC0:
8497 case SCRATCH:
8498 case CONST:
8499 case CONST_INT:
8500 case CONST_DOUBLE:
8501 case SYMBOL_REF:
8502 case LABEL_REF:
8503 return 0;
8504
8505 case MEM:
04968e2c 8506 if (GET_MODE (mem_base) == BLKmode
8507 || GET_MODE (val) == BLKmode)
8508 return 1;
0dbd1c74 8509 if (anti_dependence (val, mem_base))
8510 return 1;
8511 /* The address may contain nested MEMs. */
8512 break;
5e98e63d 8513
8514 default:
8515 break;
8516 }
8517
8518 fmt = GET_RTX_FORMAT (code);
8519
8520 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8521 {
8522 if (fmt[i] == 'e')
8523 {
63c52f6e 8524 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
5e98e63d 8525 return 1;
8526 }
8527 else if (fmt[i] == 'E')
8528 {
8529 int j;
8530
8531 for (j = 0; j < XVECLEN (val, i); j++)
63c52f6e 8532 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
5e98e63d 8533 return 1;
8534 }
8535 }
8536
8537 return 0;
8538}
8539
8540/* Invalidate any entries in reg_values which are changed because of a
8541 store to MEM_RTX. If this is called because of a non-const call
8542 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8543
8544static void
8545reload_cse_invalidate_mem (mem_rtx)
8546 rtx mem_rtx;
8547{
8548 register int i;
5e98e63d 8549
8550 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8551 {
8552 rtx x;
8553
8554 for (x = reg_values[i]; x; x = XEXP (x, 1))
8555 {
8556 if (XEXP (x, 0) != 0
63c52f6e 8557 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
5e98e63d 8558 {
8559 /* If this is the only entry on the list, clear
8560 reg_values[i]. Otherwise, just clear this entry on
8561 the list. */
8562 if (XEXP (x, 1) == 0 && x == reg_values[i])
8563 {
8564 reg_values[i] = 0;
8565 break;
8566 }
8567 XEXP (x, 0) = 0;
8568 }
8569 }
8570 }
8571}
8572
8573/* Invalidate DEST, which is being assigned to or clobbered. The
8574 second parameter exists so that this function can be passed to
8575 note_stores; it is ignored. */
8576
8577static void
8578reload_cse_invalidate_rtx (dest, ignore)
8579 rtx dest;
0e93a6ac 8580 rtx ignore ATTRIBUTE_UNUSED;
5e98e63d 8581{
8582 while (GET_CODE (dest) == STRICT_LOW_PART
8583 || GET_CODE (dest) == SIGN_EXTRACT
8584 || GET_CODE (dest) == ZERO_EXTRACT
8585 || GET_CODE (dest) == SUBREG)
8586 dest = XEXP (dest, 0);
8587
8588 if (GET_CODE (dest) == REG)
8589 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8590 else if (GET_CODE (dest) == MEM)
8591 reload_cse_invalidate_mem (dest);
8592}
8593
8594/* Do a very simple CSE pass over the hard registers.
8595
8596 This function detects no-op moves where we happened to assign two
8597 different pseudo-registers to the same hard register, and then
8598 copied one to the other. Reload will generate a useless
8599 instruction copying a register to itself.
8600
8601 This function also detects cases where we load a value from memory
8602 into two different registers, and (if memory is more expensive than
8603 registers) changes it to simply copy the first register into the
0dbd1c74 8604 second register.
8605
8606 Another optimization is performed that scans the operands of each
8607 instruction to see whether the value is already available in a
8608 hard register. It then replaces the operand with the hard register
8609 if possible, much like an optional reload would. */
5e98e63d 8610
11f22bbf 8611static void
8612reload_cse_regs_1 (first)
5e98e63d 8613 rtx first;
8614{
8615 char *firstobj;
8616 rtx callmem;
8617 register int i;
8618 rtx insn;
8619
63c52f6e 8620 init_alias_analysis ();
8621
5e98e63d 8622 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
77d71bdb 8623 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx));
5e98e63d 8624
8625 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8626 free them when we are done. */
8627 push_obstacks (&reload_obstack, &reload_obstack);
8628 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8629
8630 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8631 memory for a non-const call instruction. */
941522d6 8632 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
5e98e63d 8633
ea85204a 8634 /* This is used in reload_cse_invalidate_regno to avoid consing a
8635 new REG in a loop in that function. */
941522d6 8636 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
ea85204a 8637
5e98e63d 8638 for (insn = first; insn; insn = NEXT_INSN (insn))
8639 {
8640 rtx body;
8641
8642 if (GET_CODE (insn) == CODE_LABEL)
8643 {
8644 /* Forget all the register values at a code label. We don't
8645 try to do anything clever around jumps. */
8646 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8647 reg_values[i] = 0;
8648
8649 continue;
8650 }
8651
8652#ifdef NON_SAVING_SETJMP
8653 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8654 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8655 {
8656 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8657 reg_values[i] = 0;
8658
8659 continue;
8660 }
8661#endif
8662
8663 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8664 continue;
8665
8666 /* If this is a call instruction, forget anything stored in a
8667 call clobbered register, or, if this is not a const call, in
8668 memory. */
8669 if (GET_CODE (insn) == CALL_INSN)
8670 {
8671 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8672 if (call_used_regs[i])
8673 reload_cse_invalidate_regno (i, VOIDmode, 1);
8674
8675 if (! CONST_CALL_P (insn))
8676 reload_cse_invalidate_mem (callmem);
8677 }
8678
8679 body = PATTERN (insn);
8680 if (GET_CODE (body) == SET)
8681 {
0dbd1c74 8682 int count = 0;
0b399586 8683 if (reload_cse_noop_set_p (body, insn))
5e98e63d 8684 {
af3c0fc0 8685 /* If this sets the return value of the function, we must keep
8686 a USE around, in case this is in a different basic block
8687 than the final USE. Otherwise, we could loose important
8688 register lifeness information on SMALL_REGISTER_CLASSES
8689 machines, where return registers might be used as spills:
8690 subsequent passes assume that spill registers are dead at
8691 the end of a basic block. */
8692 if (REG_FUNCTION_VALUE_P (SET_DEST (body)))
8693 {
8694 pop_obstacks ();
8695 PATTERN (insn) = gen_rtx_USE (VOIDmode, SET_DEST (body));
8696 INSN_CODE (insn) = -1;
8697 REG_NOTES (insn) = NULL_RTX;
8698 push_obstacks (&reload_obstack, &reload_obstack);
8699 }
8700 else
8701 {
8702 PUT_CODE (insn, NOTE);
8703 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8704 NOTE_SOURCE_FILE (insn) = 0;
8705 }
5e98e63d 8706
8707 /* We're done with this insn. */
8708 continue;
8709 }
8710
0dbd1c74 8711 /* It's not a no-op, but we can try to simplify it. */
0dbd1c74 8712 count += reload_cse_simplify_set (body, insn);
8713
3f4d644c 8714 if (count > 0)
8715 apply_change_group ();
50011c7b 8716 else if (asm_noperands (PATTERN (insn)) < 0)
3f4d644c 8717 reload_cse_simplify_operands (insn);
0dbd1c74 8718
5e98e63d 8719 reload_cse_record_set (body, body);
8720 }
8721 else if (GET_CODE (body) == PARALLEL)
8722 {
0dbd1c74 8723 int count = 0;
af3c0fc0 8724 rtx value = NULL_RTX;
5e98e63d 8725
8726 /* If every action in a PARALLEL is a noop, we can delete
8727 the entire PARALLEL. */
8728 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
af3c0fc0 8729 {
8730 rtx part = XVECEXP (body, 0, i);
8731 if (GET_CODE (part) == SET)
8732 {
8733 if (! reload_cse_noop_set_p (part, insn))
8734 break;
8735 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8736 {
8737 if (value)
8738 break;
8739 value = SET_DEST (part);
8740 }
8741 }
8742 else if (GET_CODE (part) != CLOBBER)
8743 break;
8744 }
5e98e63d 8745 if (i < 0)
8746 {
af3c0fc0 8747 if (value)
8748 {
8749 pop_obstacks ();
8750 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8751 INSN_CODE (insn) = -1;
8752 REG_NOTES (insn) = NULL_RTX;
8753 push_obstacks (&reload_obstack, &reload_obstack);
8754 }
8755 else
8756 {
8757 PUT_CODE (insn, NOTE);
8758 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8759 NOTE_SOURCE_FILE (insn) = 0;
8760 }
5e98e63d 8761
8762 /* We're done with this insn. */
8763 continue;
8764 }
0dbd1c74 8765
8766 /* It's not a no-op, but we can try to simplify it. */
0dbd1c74 8767 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8768 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8769 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8770
3f4d644c 8771 if (count > 0)
8772 apply_change_group ();
50011c7b 8773 else if (asm_noperands (PATTERN (insn)) < 0)
3f4d644c 8774 reload_cse_simplify_operands (insn);
5e98e63d 8775
8776 /* Look through the PARALLEL and record the values being
8777 set, if possible. Also handle any CLOBBERs. */
8778 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8779 {
8780 rtx x = XVECEXP (body, 0, i);
8781
8782 if (GET_CODE (x) == SET)
8783 reload_cse_record_set (x, body);
8784 else
8785 note_stores (x, reload_cse_invalidate_rtx);
8786 }
8787 }
8788 else
8789 note_stores (body, reload_cse_invalidate_rtx);
8790
8791#ifdef AUTO_INC_DEC
8792 /* Clobber any registers which appear in REG_INC notes. We
8793 could keep track of the changes to their values, but it is
8794 unlikely to help. */
8795 {
8796 rtx x;
8797
8798 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8799 if (REG_NOTE_KIND (x) == REG_INC)
8800 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8801 }
8802#endif
8803
8804 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8805 after we have processed the insn. */
8806 if (GET_CODE (insn) == CALL_INSN)
8807 {
8808 rtx x;
8809
8810 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8811 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8812 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8813 }
8814 }
8815
8816 /* Free all the temporary structures we created, and go back to the
8817 regular obstacks. */
8818 obstack_free (&reload_obstack, firstobj);
8819 pop_obstacks ();
8820}
8821
11f22bbf 8822/* Call cse / combine like post-reload optimization phases.
8823 FIRST is the first instruction. */
8824void
8825reload_cse_regs (first)
8826 rtx first;
8827{
8828 reload_cse_regs_1 (first);
8829 reload_combine ();
8830 reload_cse_move2add (first);
8831 if (flag_expensive_optimizations)
8832 reload_cse_regs_1 (first);
8833}
8834
5e98e63d 8835/* Return whether the values known for REGNO are equal to VAL. MODE
8836 is the mode of the object that VAL is being copied to; this matters
8837 if VAL is a CONST_INT. */
8838
8839static int
8840reload_cse_regno_equal_p (regno, val, mode)
8841 int regno;
8842 rtx val;
8843 enum machine_mode mode;
8844{
8845 rtx x;
8846
8847 if (val == 0)
8848 return 0;
8849
8850 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8851 if (XEXP (x, 0) != 0
8852 && rtx_equal_p (XEXP (x, 0), val)
0c7bc7b9 8853 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM
8854 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
5e98e63d 8855 && (GET_CODE (val) != CONST_INT
8856 || mode == GET_MODE (x)
8857 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
f0bbc9e9 8858 /* On a big endian machine if the value spans more than
8859 one register then this register holds the high part of
8860 it and we can't use it.
8861
8862 ??? We should also compare with the high part of the
8863 value. */
8864 && !(WORDS_BIG_ENDIAN
8865 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
5e98e63d 8866 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8867 GET_MODE_BITSIZE (GET_MODE (x))))))
8868 return 1;
8869
8870 return 0;
8871}
8872
0b399586 8873/* See whether a single set is a noop. SET is the set instruction we
8874 are should check, and INSN is the instruction from which it came. */
5e98e63d 8875
8876static int
0b399586 8877reload_cse_noop_set_p (set, insn)
5e98e63d 8878 rtx set;
0b399586 8879 rtx insn;
5e98e63d 8880{
8881 rtx src, dest;
8882 enum machine_mode dest_mode;
8883 int dreg, sreg;
0b399586 8884 int ret;
5e98e63d 8885
8886 src = SET_SRC (set);
8887 dest = SET_DEST (set);
8888 dest_mode = GET_MODE (dest);
8889
8890 if (side_effects_p (src))
8891 return 0;
8892
8893 dreg = true_regnum (dest);
8894 sreg = true_regnum (src);
8895
0b399586 8896 /* Check for setting a register to itself. In this case, we don't
8897 have to worry about REG_DEAD notes. */
8898 if (dreg >= 0 && dreg == sreg)
8899 return 1;
8900
8901 ret = 0;
5e98e63d 8902 if (dreg >= 0)
8903 {
8904 /* Check for setting a register to itself. */
8905 if (dreg == sreg)
0b399586 8906 ret = 1;
5e98e63d 8907
8908 /* Check for setting a register to a value which we already know
8909 is in the register. */
0b399586 8910 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8911 ret = 1;
5e98e63d 8912
8913 /* Check for setting a register DREG to another register SREG
8914 where SREG is equal to a value which is already in DREG. */
0b399586 8915 else if (sreg >= 0)
5e98e63d 8916 {
8917 rtx x;
8918
8919 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
0b399586 8920 {
c5798922 8921 rtx tmp;
8922
8923 if (XEXP (x, 0) == 0)
8924 continue;
8925
8926 if (dest_mode == GET_MODE (x))
8927 tmp = XEXP (x, 0);
8928 else if (GET_MODE_BITSIZE (dest_mode)
8929 < GET_MODE_BITSIZE (GET_MODE (x)))
8930 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8931 else
8932 continue;
8933
8934 if (tmp
8935 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
0b399586 8936 {
8937 ret = 1;
8938 break;
8939 }
8940 }
5e98e63d 8941 }
8942 }
8943 else if (GET_CODE (dest) == MEM)
8944 {
8945 /* Check for storing a register to memory when we know that the
8946 register is equivalent to the memory location. */
8947 if (sreg >= 0
8948 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8949 && ! side_effects_p (dest))
0b399586 8950 ret = 1;
5e98e63d 8951 }
8952
0b399586 8953 return ret;
5e98e63d 8954}
8955
8956/* Try to simplify a single SET instruction. SET is the set pattern.
0dbd1c74 8957 INSN is the instruction it came from.
8958 This function only handles one case: if we set a register to a value
8959 which is not a register, we try to find that value in some other register
8960 and change the set into a register copy. */
5e98e63d 8961
0dbd1c74 8962static int
5e98e63d 8963reload_cse_simplify_set (set, insn)
8964 rtx set;
8965 rtx insn;
8966{
8967 int dreg;
8968 rtx src;
8969 enum machine_mode dest_mode;
8970 enum reg_class dclass;
8971 register int i;
8972
5e98e63d 8973 dreg = true_regnum (SET_DEST (set));
8974 if (dreg < 0)
0dbd1c74 8975 return 0;
5e98e63d 8976
8977 src = SET_SRC (set);
8978 if (side_effects_p (src) || true_regnum (src) >= 0)
0dbd1c74 8979 return 0;
5e98e63d 8980
3afef759 8981 dclass = REGNO_REG_CLASS (dreg);
8982
8f1dda78 8983 /* If memory loads are cheaper than register copies, don't change them. */
3afef759 8984 if (GET_CODE (src) == MEM
8985 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
0dbd1c74 8986 return 0;
5e98e63d 8987
1162c62d 8988 /* If the constant is cheaper than a register, don't change it. */
8989 if (CONSTANT_P (src)
8990 && rtx_cost (src, SET) < 2)
8991 return 0;
8992
5e98e63d 8993 dest_mode = GET_MODE (SET_DEST (set));
5e98e63d 8994 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8995 {
8996 if (i != dreg
8997 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8998 && reload_cse_regno_equal_p (i, src, dest_mode))
8999 {
9000 int validated;
9001
9002 /* Pop back to the real obstacks while changing the insn. */
9003 pop_obstacks ();
9004
9005 validated = validate_change (insn, &SET_SRC (set),
941522d6 9006 gen_rtx_REG (dest_mode, i), 1);
5e98e63d 9007
9008 /* Go back to the obstack we are using for temporary
9009 storage. */
9010 push_obstacks (&reload_obstack, &reload_obstack);
9011
3f4d644c 9012 if (validated)
9013 return 1;
0dbd1c74 9014 }
9015 }
9016 return 0;
9017}
9018
9019/* Try to replace operands in INSN with equivalent values that are already
9020 in registers. This can be viewed as optional reloading.
9021
9022 For each non-register operand in the insn, see if any hard regs are
9023 known to be equivalent to that operand. Record the alternatives which
9024 can accept these hard registers. Among all alternatives, select the
9025 ones which are better or equal to the one currently matching, where
9026 "better" is in terms of '?' and '!' constraints. Among the remaining
9027 alternatives, select the one which replaces most operands with
9028 hard registers. */
9029
9030static int
9031reload_cse_simplify_operands (insn)
9032 rtx insn;
9033{
9034#ifdef REGISTER_CONSTRAINTS
0dbd1c74 9035 int i,j;
9036
a8482e91 9037 const char *constraints[MAX_RECOG_OPERANDS];
0dbd1c74 9038
9039 /* Vector recording how bad an alternative is. */
9040 int *alternative_reject;
9041 /* Vector recording how many registers can be introduced by choosing
9042 this alternative. */
9043 int *alternative_nregs;
9044 /* Array of vectors recording, for each operand and each alternative,
9045 which hard register to substitute, or -1 if the operand should be
9046 left as it is. */
9047 int *op_alt_regno[MAX_RECOG_OPERANDS];
9048 /* Array of alternatives, sorted in order of decreasing desirability. */
9049 int *alternative_order;
1162c62d 9050 rtx reg = gen_rtx_REG (VOIDmode, -1);
0dbd1c74 9051
7f82be90 9052 extract_insn (insn);
0dbd1c74 9053
7f82be90 9054 if (recog_n_alternatives == 0 || recog_n_operands == 0)
99c14947 9055 return 0;
0dbd1c74 9056
9057 /* Figure out which alternative currently matches. */
7f82be90 9058 if (! constrain_operands (1))
f52d59ea 9059 fatal_insn_not_found (insn);
0dbd1c74 9060
7f82be90 9061 alternative_reject = (int *) alloca (recog_n_alternatives * sizeof (int));
9062 alternative_nregs = (int *) alloca (recog_n_alternatives * sizeof (int));
9063 alternative_order = (int *) alloca (recog_n_alternatives * sizeof (int));
9064 bzero ((char *)alternative_reject, recog_n_alternatives * sizeof (int));
9065 bzero ((char *)alternative_nregs, recog_n_alternatives * sizeof (int));
0dbd1c74 9066
7f82be90 9067 for (i = 0; i < recog_n_operands; i++)
0dbd1c74 9068 {
9069 enum machine_mode mode;
9070 int regno;
a8482e91 9071 const char *p;
0dbd1c74 9072
7f82be90 9073 op_alt_regno[i] = (int *) alloca (recog_n_alternatives * sizeof (int));
9074 for (j = 0; j < recog_n_alternatives; j++)
0dbd1c74 9075 op_alt_regno[i][j] = -1;
9076
7f82be90 9077 p = constraints[i] = recog_constraints[i];
9078 mode = recog_operand_mode[i];
0dbd1c74 9079
9080 /* Add the reject values for each alternative given by the constraints
9081 for this operand. */
9082 j = 0;
9083 while (*p != '\0')
9084 {
9085 char c = *p++;
9086 if (c == ',')
9087 j++;
9088 else if (c == '?')
9089 alternative_reject[j] += 3;
9090 else if (c == '!')
9091 alternative_reject[j] += 300;
9092 }
9093
9094 /* We won't change operands which are already registers. We
9095 also don't want to modify output operands. */
9096 regno = true_regnum (recog_operand[i]);
9097 if (regno >= 0
9098 || constraints[i][0] == '='
9099 || constraints[i][0] == '+')
9100 continue;
9101
9102 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9103 {
9104 int class = (int) NO_REGS;
9105
9106 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
9107 continue;
9108
1162c62d 9109 REGNO (reg) = regno;
9110 PUT_MODE (reg, mode);
9111
0dbd1c74 9112 /* We found a register equal to this operand. Now look for all
9113 alternatives that can accept this register and have not been
9114 assigned a register they can use yet. */
9115 j = 0;
9116 p = constraints[i];
9117 for (;;)
0b399586 9118 {
0dbd1c74 9119 char c = *p++;
9120
9121 switch (c)
0b399586 9122 {
0dbd1c74 9123 case '=': case '+': case '?':
9124 case '#': case '&': case '!':
9125 case '*': case '%':
9126 case '0': case '1': case '2': case '3': case '4':
9127 case 'm': case '<': case '>': case 'V': case 'o':
9128 case 'E': case 'F': case 'G': case 'H':
9129 case 's': case 'i': case 'n':
9130 case 'I': case 'J': case 'K': case 'L':
9131 case 'M': case 'N': case 'O': case 'P':
9132#ifdef EXTRA_CONSTRAINT
9133 case 'Q': case 'R': case 'S': case 'T': case 'U':
9134#endif
9135 case 'p': case 'X':
9136 /* These don't say anything we care about. */
9137 break;
9138
9139 case 'g': case 'r':
9140 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
9141 break;
9142
9143 default:
9144 class
274c11d8 9145 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
0dbd1c74 9146 break;
0b399586 9147
0dbd1c74 9148 case ',': case '\0':
9149 /* See if REGNO fits this alternative, and set it up as the
9150 replacement register if we don't have one for this
1162c62d 9151 alternative yet and the operand being replaced is not
9152 a cheap CONST_INT. */
0dbd1c74 9153 if (op_alt_regno[i][j] == -1
1162c62d 9154 && reg_fits_class_p (reg, class, 0, mode)
9155 && (GET_CODE (recog_operand[i]) != CONST_INT
9156 || rtx_cost (recog_operand[i], SET) > rtx_cost (reg, SET)))
0b399586 9157 {
0dbd1c74 9158 alternative_nregs[j]++;
9159 op_alt_regno[i][j] = regno;
0b399586 9160 }
0dbd1c74 9161 j++;
9162 break;
0b399586 9163 }
9164
0dbd1c74 9165 if (c == '\0')
9166 break;
9167 }
9168 }
9169 }
9170
9171 /* Record all alternatives which are better or equal to the currently
9172 matching one in the alternative_order array. */
7f82be90 9173 for (i = j = 0; i < recog_n_alternatives; i++)
0dbd1c74 9174 if (alternative_reject[i] <= alternative_reject[which_alternative])
9175 alternative_order[j++] = i;
7f82be90 9176 recog_n_alternatives = j;
0dbd1c74 9177
9178 /* Sort it. Given a small number of alternatives, a dumb algorithm
9179 won't hurt too much. */
7f82be90 9180 for (i = 0; i < recog_n_alternatives - 1; i++)
0dbd1c74 9181 {
9182 int best = i;
9183 int best_reject = alternative_reject[alternative_order[i]];
9184 int best_nregs = alternative_nregs[alternative_order[i]];
9185 int tmp;
9186
7f82be90 9187 for (j = i + 1; j < recog_n_alternatives; j++)
0dbd1c74 9188 {
9189 int this_reject = alternative_reject[alternative_order[j]];
9190 int this_nregs = alternative_nregs[alternative_order[j]];
9191
9192 if (this_reject < best_reject
9193 || (this_reject == best_reject && this_nregs < best_nregs))
9194 {
9195 best = j;
9196 best_reject = this_reject;
9197 best_nregs = this_nregs;
0b399586 9198 }
5e98e63d 9199 }
0dbd1c74 9200
9201 tmp = alternative_order[best];
9202 alternative_order[best] = alternative_order[i];
9203 alternative_order[i] = tmp;
9204 }
9205
9206 /* Substitute the operands as determined by op_alt_regno for the best
9207 alternative. */
9208 j = alternative_order[0];
0dbd1c74 9209
9210 /* Pop back to the real obstacks while changing the insn. */
9211 pop_obstacks ();
9212
7f82be90 9213 for (i = 0; i < recog_n_operands; i++)
0dbd1c74 9214 {
7f82be90 9215 enum machine_mode mode = recog_operand_mode[i];
0dbd1c74 9216 if (op_alt_regno[i][j] == -1)
9217 continue;
9218
0dbd1c74 9219 validate_change (insn, recog_operand_loc[i],
941522d6 9220 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
0dbd1c74 9221 }
9222
7f82be90 9223 for (i = recog_n_dups - 1; i >= 0; i--)
0dbd1c74 9224 {
9225 int op = recog_dup_num[i];
7f82be90 9226 enum machine_mode mode = recog_operand_mode[op];
0dbd1c74 9227
9228 if (op_alt_regno[op][j] == -1)
9229 continue;
9230
0dbd1c74 9231 validate_change (insn, recog_dup_loc[i],
941522d6 9232 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
5e98e63d 9233 }
0dbd1c74 9234
9235 /* Go back to the obstack we are using for temporary
9236 storage. */
9237 push_obstacks (&reload_obstack, &reload_obstack);
9238
9239 return apply_change_group ();
9240#else
9241 return 0;
9242#endif
5e98e63d 9243}
9244
9245/* These two variables are used to pass information from
9246 reload_cse_record_set to reload_cse_check_clobber. */
9247
9248static int reload_cse_check_clobbered;
9249static rtx reload_cse_check_src;
9250
9251/* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
9252 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
9253 second argument, which is passed by note_stores, is ignored. */
9254
9255static void
9256reload_cse_check_clobber (dest, ignore)
9257 rtx dest;
0e93a6ac 9258 rtx ignore ATTRIBUTE_UNUSED;
5e98e63d 9259{
9260 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
9261 reload_cse_check_clobbered = 1;
9262}
9263
9264/* Record the result of a SET instruction. SET is the set pattern.
9265 BODY is the pattern of the insn that it came from. */
9266
9267static void
9268reload_cse_record_set (set, body)
9269 rtx set;
9270 rtx body;
9271{
9722a2f1 9272 rtx dest, src, x;
5e98e63d 9273 int dreg, sreg;
9274 enum machine_mode dest_mode;
9275
9276 dest = SET_DEST (set);
9277 src = SET_SRC (set);
9278 dreg = true_regnum (dest);
9279 sreg = true_regnum (src);
9280 dest_mode = GET_MODE (dest);
9281
9722a2f1 9282 /* Some machines don't define AUTO_INC_DEC, but they still use push
9283 instructions. We need to catch that case here in order to
9284 invalidate the stack pointer correctly. Note that invalidating
9285 the stack pointer is different from invalidating DEST. */
9286 x = dest;
9287 while (GET_CODE (x) == SUBREG
9288 || GET_CODE (x) == ZERO_EXTRACT
9289 || GET_CODE (x) == SIGN_EXTRACT
9290 || GET_CODE (x) == STRICT_LOW_PART)
9291 x = XEXP (x, 0);
9292 if (push_operand (x, GET_MODE (x)))
9293 {
9294 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
9295 reload_cse_invalidate_rtx (dest, NULL_RTX);
9296 return;
9297 }
9298
5e98e63d 9299 /* We can only handle an assignment to a register, or a store of a
9300 register to a memory location. For other cases, we just clobber
9301 the destination. We also have to just clobber if there are side
9302 effects in SRC or DEST. */
9303 if ((dreg < 0 && GET_CODE (dest) != MEM)
9304 || side_effects_p (src)
9305 || side_effects_p (dest))
9306 {
9307 reload_cse_invalidate_rtx (dest, NULL_RTX);
9308 return;
9309 }
9310
9311#ifdef HAVE_cc0
9312 /* We don't try to handle values involving CC, because it's a pain
9313 to keep track of when they have to be invalidated. */
9314 if (reg_mentioned_p (cc0_rtx, src)
9315 || reg_mentioned_p (cc0_rtx, dest))
9316 {
9317 reload_cse_invalidate_rtx (dest, NULL_RTX);
9318 return;
9319 }
9320#endif
9321
9322 /* If BODY is a PARALLEL, then we need to see whether the source of
9323 SET is clobbered by some other instruction in the PARALLEL. */
9324 if (GET_CODE (body) == PARALLEL)
9325 {
9326 int i;
9327
9328 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
9329 {
9330 rtx x;
9331
9332 x = XVECEXP (body, 0, i);
9333 if (x == set)
9334 continue;
9335
9336 reload_cse_check_clobbered = 0;
9337 reload_cse_check_src = src;
9338 note_stores (x, reload_cse_check_clobber);
9339 if (reload_cse_check_clobbered)
9340 {
9341 reload_cse_invalidate_rtx (dest, NULL_RTX);
9342 return;
9343 }
9344 }
9345 }
9346
9347 if (dreg >= 0)
9348 {
9349 int i;
9350
9351 /* This is an assignment to a register. Update the value we
9352 have stored for the register. */
9353 if (sreg >= 0)
e99efbad 9354 {
9355 rtx x;
9356
9357 /* This is a copy from one register to another. Any values
9358 which were valid for SREG are now valid for DREG. If the
9359 mode changes, we use gen_lowpart_common to extract only
9360 the part of the value that is copied. */
9361 reg_values[dreg] = 0;
9362 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
9363 {
9364 rtx tmp;
9365
9366 if (XEXP (x, 0) == 0)
9367 continue;
9368 if (dest_mode == GET_MODE (XEXP (x, 0)))
9369 tmp = XEXP (x, 0);
014c646a 9370 else if (GET_MODE_BITSIZE (dest_mode)
9371 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
9372 continue;
e99efbad 9373 else
9374 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9375 if (tmp)
941522d6 9376 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
9377 reg_values[dreg]);
e99efbad 9378 }
9379 }
5e98e63d 9380 else
941522d6 9381 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
5e98e63d 9382
9383 /* We've changed DREG, so invalidate any values held by other
9384 registers that depend upon it. */
9385 reload_cse_invalidate_regno (dreg, dest_mode, 0);
9386
9387 /* If this assignment changes more than one hard register,
9388 forget anything we know about the others. */
9389 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
9390 reg_values[dreg + i] = 0;
9391 }
9392 else if (GET_CODE (dest) == MEM)
9393 {
9394 /* Invalidate conflicting memory locations. */
9395 reload_cse_invalidate_mem (dest);
9396
9397 /* If we're storing a register to memory, add DEST to the list
9398 in REG_VALUES. */
9399 if (sreg >= 0 && ! side_effects_p (dest))
941522d6 9400 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
5e98e63d 9401 reg_values[sreg]);
9402 }
9403 else
9404 {
9405 /* We should have bailed out earlier. */
9406 abort ();
9407 }
9408}
11f22bbf 9409\f
9410/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
9411 addressing now.
9412 This code might also be useful when reload gave up on reg+reg addresssing
9413 because of clashes between the return register and INDEX_REG_CLASS. */
9414
9415/* The maximum number of uses of a register we can keep track of to
9416 replace them with reg+reg addressing. */
9417#define RELOAD_COMBINE_MAX_USES 6
9418
9419/* INSN is the insn where a register has ben used, and USEP points to the
9420 location of the register within the rtl. */
9421struct reg_use { rtx insn, *usep; };
9422
9423/* If the register is used in some unknown fashion, USE_INDEX is negative.
9424 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
9425 indicates where it becomes live again.
9426 Otherwise, USE_INDEX is the index of the last encountered use of the
9427 register (which is first among these we have seen since we scan backwards),
9428 OFFSET contains the constant offset that is added to the register in
9429 all encountered uses, and USE_RUID indicates the first encountered, i.e.
bb375e94 9430 last, of these uses.
9431 STORE_RUID is always meaningful if we only want to use a value in a
9432 register in a different place: it denotes the next insn in the insn
9433 stream (i.e. the last ecountered) that sets or clobbers the register. */
11f22bbf 9434static struct
9435 {
9436 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
9437 int use_index;
9438 rtx offset;
9439 int store_ruid;
9440 int use_ruid;
9441 } reg_state[FIRST_PSEUDO_REGISTER];
9442
9443/* Reverse linear uid. This is increased in reload_combine while scanning
9444 the instructions from last to first. It is used to set last_label_ruid
9445 and the store_ruid / use_ruid fields in reg_state. */
9446static int reload_combine_ruid;
9447
d8d5b231 9448#define LABEL_LIVE(LABEL) \
9449 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
9450
11f22bbf 9451static void
9452reload_combine ()
9453{
9454 rtx insn, set;
9455 int first_index_reg = 1, last_index_reg = 0;
9456 int i;
9457 int last_label_ruid;
d8d5b231 9458 int min_labelno, n_labels;
9459 HARD_REG_SET ever_live_at_start, *label_live;
11f22bbf 9460
9461 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
9462 reload has already used it where appropriate, so there is no use in
9463 trying to generate it now. */
dab171c5 9464 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
11f22bbf 9465 return;
9466
9467 /* To avoid wasting too much time later searching for an index register,
9468 determine the minimum and maximum index register numbers. */
9469 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9470 {
9471 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
9472 {
9473 if (! last_index_reg)
9474 last_index_reg = i;
9475 first_index_reg = i;
9476 }
9477 }
9478 /* If no index register is available, we can quit now. */
9479 if (first_index_reg > last_index_reg)
9480 return;
9481
d8d5b231 9482 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
9483 information is a bit fuzzy immediately after reload, but it's
9484 still good enough to determine which registers are live at a jump
9485 destination. */
9486 min_labelno = get_first_label_num ();
9487 n_labels = max_label_num () - min_labelno;
9488 label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
9489 CLEAR_HARD_REG_SET (ever_live_at_start);
9490 for (i = n_basic_blocks - 1; i >= 0; i--)
9491 {
68676d00 9492 insn = BLOCK_HEAD (i);
d8d5b231 9493 if (GET_CODE (insn) == CODE_LABEL)
9494 {
9495 HARD_REG_SET live;
9496
9497 REG_SET_TO_HARD_REG_SET (live, basic_block_live_at_start[i]);
9498 compute_use_by_pseudos (&live, basic_block_live_at_start[i]);
9499 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
9500 IOR_HARD_REG_SET (ever_live_at_start, live);
9501 }
9502 }
9503
11f22bbf 9504 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
9505 last_label_ruid = reload_combine_ruid = 0;
9506 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9507 {
bb375e94 9508 reg_state[i].store_ruid = reload_combine_ruid;
11f22bbf 9509 if (fixed_regs[i])
9510 reg_state[i].use_index = -1;
9511 else
bb375e94 9512 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
11f22bbf 9513 }
9514
9515 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
9516 {
9517 rtx note;
9518
9519 /* We cannot do our optimization across labels. Invalidating all the use
9520 information we have would be costly, so we just note where the label
9521 is and then later disable any optimization that would cross it. */
9522 if (GET_CODE (insn) == CODE_LABEL)
9523 last_label_ruid = reload_combine_ruid;
d8d5b231 9524 if (GET_CODE (insn) == BARRIER)
9525 {
9526 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9527 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9528 }
11f22bbf 9529 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9530 continue;
9531 reload_combine_ruid++;
9532
9533 /* Look for (set (REGX) (CONST_INT))
9534 (set (REGX) (PLUS (REGX) (REGY)))
9535 ...
9536 ... (MEM (REGX)) ...
9537 and convert it to
9538 (set (REGZ) (CONST_INT))
9539 ...
9540 ... (MEM (PLUS (REGZ) (REGY)))... .
9541
9542 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
9543 and that we know all uses of REGX before it dies. */
0db645f6 9544 set = single_set (insn);
9545 if (set != NULL_RTX
11f22bbf 9546 && GET_CODE (SET_DEST (set)) == REG
9547 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
9548 GET_MODE (SET_DEST (set)))
9549 == 1)
9550 && GET_CODE (SET_SRC (set)) == PLUS
9551 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
9552 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
9553 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
9554 {
9555 rtx reg = SET_DEST (set);
9556 rtx plus = SET_SRC (set);
9557 rtx base = XEXP (plus, 1);
9558 rtx prev = prev_nonnote_insn (insn);
9559 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
9560 int regno = REGNO (reg);
9561 rtx const_reg;
9562 rtx reg_sum = NULL_RTX;
9563
9564 /* Now, we need an index register.
9565 We'll set index_reg to this index register, const_reg to the
9566 register that is to be loaded with the constant
9567 (denoted as REGZ in the substitution illustration above),
9568 and reg_sum to the register-register that we want to use to
9569 substitute uses of REG (typically in MEMs) with.
9570 First check REG and BASE for being index registers;
9571 we can use them even if they are not dead. */
9572 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
9573 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
9574 REGNO (base)))
9575 {
9576 const_reg = reg;
9577 reg_sum = plus;
9578 }
9579 else
9580 {
9581 /* Otherwise, look for a free index register. Since we have
9582 checked above that neiter REG nor BASE are index registers,
9583 if we find anything at all, it will be different from these
9584 two registers. */
9585 for (i = first_index_reg; i <= last_index_reg; i++)
9586 {
9587 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
9588 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
9589 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
9590 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
9591 {
9592 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
9593 const_reg = index_reg;
9594 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
9595 break;
9596 }
9597 }
9598 }
bb375e94 9599 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
9600 (REGY), i.e. BASE, is not clobbered before the last use we'll
9601 create. */
11f22bbf 9602 if (prev_set
9603 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
9604 && rtx_equal_p (SET_DEST (prev_set), reg)
9605 && reg_state[regno].use_index >= 0
bb375e94 9606 && reg_state[REGNO (base)].store_ruid <= reg_state[regno].use_ruid
11f22bbf 9607 && reg_sum)
9608 {
9609 int i;
9610
9611 /* Change destination register and - if necessary - the
9612 constant value in PREV, the constant loading instruction. */
9613 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
9614 if (reg_state[regno].offset != const0_rtx)
9615 validate_change (prev,
9616 &SET_SRC (prev_set),
9617 GEN_INT (INTVAL (SET_SRC (prev_set))
9618 + INTVAL (reg_state[regno].offset)),
9619 1);
9620 /* Now for every use of REG that we have recorded, replace REG
9621 with REG_SUM. */
9622 for (i = reg_state[regno].use_index;
9623 i < RELOAD_COMBINE_MAX_USES; i++)
9624 validate_change (reg_state[regno].reg_use[i].insn,
9625 reg_state[regno].reg_use[i].usep,
9626 reg_sum, 1);
9627
9628 if (apply_change_group ())
9629 {
9630 rtx *np;
9631
9632 /* Delete the reg-reg addition. */
9633 PUT_CODE (insn, NOTE);
9634 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9635 NOTE_SOURCE_FILE (insn) = 0;
9636
9637 if (reg_state[regno].offset != const0_rtx)
9638 {
9639 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
9640 are now invalid. */
9641 for (np = &REG_NOTES (prev); *np; )
9642 {
9643 if (REG_NOTE_KIND (*np) == REG_EQUAL
9644 || REG_NOTE_KIND (*np) == REG_EQUIV)
9645 *np = XEXP (*np, 1);
9646 else
9647 np = &XEXP (*np, 1);
9648 }
9649 }
9650 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9651 reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
9652 continue;
9653 }
9654 }
9655 }
9656 note_stores (PATTERN (insn), reload_combine_note_store);
9657 if (GET_CODE (insn) == CALL_INSN)
9658 {
9659 rtx link;
9660
9661 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9662 {
9663 if (call_used_regs[i])
9664 {
9665 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9666 reg_state[i].store_ruid = reload_combine_ruid;
9667 }
9668 }
9669 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
9670 link = XEXP (link, 1))
9671 {
9672 rtx use = XEXP (link, 0);
9673 int regno = REGNO (XEXP (use, 0));
9674 if (GET_CODE (use) == CLOBBER)
9675 {
9676 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9677 reg_state[regno].store_ruid = reload_combine_ruid;
9678 }
9679 else
9680 reg_state[regno].use_index = -1;
9681 }
9682 }
d8d5b231 9683 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) != RETURN)
11f22bbf 9684 {
9685 /* Non-spill registers might be used at the call destination in
9686 some unknown fashion, so we have to mark the unknown use. */
d8d5b231 9687 HARD_REG_SET *live;
9688 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
9689 && JUMP_LABEL (insn))
9690 live = &LABEL_LIVE (JUMP_LABEL (insn));
9691 else
9692 live = &ever_live_at_start;
11f22bbf 9693 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9694 {
d8d5b231 9695 if (TEST_HARD_REG_BIT (*live, i))
11f22bbf 9696 reg_state[i].use_index = -1;
9697 }
9698 }
9699 reload_combine_note_use (&PATTERN (insn), insn);
9700 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9701 {
9702 if (REG_NOTE_KIND (note) == REG_INC
9703 && GET_CODE (XEXP (note, 0)) == REG)
bb375e94 9704 {
9705 int regno = REGNO (XEXP (note, 0));
9706
9707 reg_state[regno].store_ruid = reload_combine_ruid;
9708 reg_state[regno].use_index = -1;
9709 }
11f22bbf 9710 }
9711 }
d8d5b231 9712 free (label_live);
11f22bbf 9713}
9714
9715/* Check if DST is a register or a subreg of a register; if it is,
9716 update reg_state[regno].store_ruid and reg_state[regno].use_index
9717 accordingly. Called via note_stores from reload_combine.
9718 The second argument, SET, is ignored. */
9719static void
9720reload_combine_note_store (dst, set)
274c11d8 9721 rtx dst, set ATTRIBUTE_UNUSED;
11f22bbf 9722{
9723 int regno = 0;
9724 int i;
9725 unsigned size = GET_MODE_SIZE (GET_MODE (dst));
9726
9727 if (GET_CODE (dst) == SUBREG)
9728 {
9729 regno = SUBREG_WORD (dst);
9730 dst = SUBREG_REG (dst);
9731 }
9732 if (GET_CODE (dst) != REG)
9733 return;
9734 regno += REGNO (dst);
9735 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
9736 careful with registers / register parts that are not full words. */
274c11d8 9737 if (size < (unsigned) UNITS_PER_WORD)
bb375e94 9738 {
9739 reg_state[regno].use_index = -1;
9740 reg_state[regno].store_ruid = reload_combine_ruid;
9741 }
11f22bbf 9742 else
9743 {
9744 for (i = size / UNITS_PER_WORD - 1 + regno; i >= regno; i--)
9745 {
9746 reg_state[i].store_ruid = reload_combine_ruid;
9747 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9748 }
9749 }
9750}
9751
9752/* XP points to a piece of rtl that has to be checked for any uses of
9753 registers.
9754 *XP is the pattern of INSN, or a part of it.
9755 Called from reload_combine, and recursively by itself. */
9756static void
9757reload_combine_note_use (xp, insn)
9758 rtx *xp, insn;
9759{
9760 rtx x = *xp;
9761 enum rtx_code code = x->code;
9762 char *fmt;
9763 int i, j;
9764 rtx offset = const0_rtx; /* For the REG case below. */
9765
9766 switch (code)
9767 {
9768 case SET:
9769 if (GET_CODE (SET_DEST (x)) == REG)
9770 {
9771 reload_combine_note_use (&SET_SRC (x), insn);
9772 return;
9773 }
9774 break;
9775
9776 case CLOBBER:
9777 if (GET_CODE (SET_DEST (x)) == REG)
9778 return;
9779 break;
9780
9781 case PLUS:
9782 /* We are interested in (plus (reg) (const_int)) . */
9783 if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT)
9784 break;
9785 offset = XEXP (x, 1);
9786 x = XEXP (x, 0);
9787 /* Fall through. */
9788 case REG:
9789 {
9790 int regno = REGNO (x);
9791 int use_index;
9792
9793 /* Some spurious USEs of pseudo registers might remain.
9794 Just ignore them. */
9795 if (regno >= FIRST_PSEUDO_REGISTER)
9796 return;
9797
9798 /* If this register is already used in some unknown fashion, we
9799 can't do anything.
9800 If we decrement the index from zero to -1, we can't store more
9801 uses, so this register becomes used in an unknown fashion. */
9802 use_index = --reg_state[regno].use_index;
9803 if (use_index < 0)
9804 return;
9805
9806 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9807 {
9808 /* We have found another use for a register that is already
9809 used later. Check if the offsets match; if not, mark the
9810 register as used in an unknown fashion. */
9811 if (! rtx_equal_p (offset, reg_state[regno].offset))
9812 {
9813 reg_state[regno].use_index = -1;
9814 return;
9815 }
9816 }
9817 else
9818 {
9819 /* This is the first use of this register we have seen since we
9820 marked it as dead. */
9821 reg_state[regno].offset = offset;
9822 reg_state[regno].use_ruid = reload_combine_ruid;
9823 }
9824 reg_state[regno].reg_use[use_index].insn = insn;
9825 reg_state[regno].reg_use[use_index].usep = xp;
9826 return;
9827 }
9828
9829 default:
9830 break;
9831 }
9832
9833 /* Recursively process the components of X. */
9834 fmt = GET_RTX_FORMAT (code);
9835 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9836 {
9837 if (fmt[i] == 'e')
9838 reload_combine_note_use (&XEXP (x, i), insn);
9839 else if (fmt[i] == 'E')
9840 {
9841 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9842 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9843 }
9844 }
9845}
9846\f
9847/* See if we can reduce the cost of a constant by replacing a move with
9848 an add. */
9849/* We cannot do our optimization across labels. Invalidating all the
9850 information about register contents we have would be costly, so we
9851 use last_label_luid (local variable of reload_cse_move2add) to note
9852 where the label is and then later disable any optimization that would
9853 cross it.
9854 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9855 reg_set_luid[n] is larger than last_label_luid[n] . */
9856static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9857/* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
9858 reg_mode[n] to be valid.
9859 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
9860 has been set to reg_offset[n] in mode reg_mode[n] .
9861 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
9862 register n has been set to the sum of reg_offset[n] and register
9863 reg_base_reg[n], calculated in mode reg_mode[n] . */
9864static rtx reg_offset[FIRST_PSEUDO_REGISTER];
9865static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9866static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9867/* move2add_luid is linearily increased while scanning the instructions
9868 from first to last. It is used to set reg_set_luid in
3f4d644c 9869 reload_cse_move2add and move2add_note_store. */
11f22bbf 9870static int move2add_luid;
9871
9872static void
9873reload_cse_move2add (first)
9874 rtx first;
9875{
9876 int i;
9877 rtx insn;
9878 int last_label_luid;
11f22bbf 9879
9880 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
3f4d644c 9881 reg_set_luid[i] = 0;
9882
11f22bbf 9883 last_label_luid = 0;
9884 move2add_luid = 1;
9885 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
9886 {
9887 rtx pat, note;
9888
9889 if (GET_CODE (insn) == CODE_LABEL)
9890 last_label_luid = move2add_luid;
9891 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9892 continue;
9893 pat = PATTERN (insn);
9894 /* For simplicity, we only perform this optimization on
9895 straightforward SETs. */
9896 if (GET_CODE (pat) == SET
9897 && GET_CODE (SET_DEST (pat)) == REG)
9898 {
9899 rtx reg = SET_DEST (pat);
9900 int regno = REGNO (reg);
9901 rtx src = SET_SRC (pat);
9902
9903 /* Check if we have valid information on the contents of this
9904 register in the mode of REG. */
9905 /* ??? We don't know how zero / sign extension is handled, hence
9906 we can't go from a narrower to a wider mode. */
9907 if (reg_set_luid[regno] > last_label_luid
9908 && (GET_MODE_SIZE (GET_MODE (reg))
9909 <= GET_MODE_SIZE (reg_mode[regno]))
9910 && GET_CODE (reg_offset[regno]) == CONST_INT)
9911 {
9912 /* Try to transform (set (REGX) (CONST_INT A))
9913 ...
9914 (set (REGX) (CONST_INT B))
9915 to
9916 (set (REGX) (CONST_INT A))
9917 ...
9918 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9919
9920 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
9921 {
9922 int success = 0;
9923 rtx new_src = GEN_INT (INTVAL (src)
9924 - INTVAL (reg_offset[regno]));
9925 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9926 use (set (reg) (reg)) instead.
9927 We don't delete this insn, nor do we convert it into a
9928 note, to avoid losing register notes or the return
9929 value flag. jump2 already knowns how to get rid of
9930 no-op moves. */
9931 if (new_src == const0_rtx)
9932 success = validate_change (insn, &SET_SRC (pat), reg, 0);
9933 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
9934 && have_add2_insn (GET_MODE (reg)))
9935 success = validate_change (insn, &PATTERN (insn),
9936 gen_add2_insn (reg, new_src), 0);
11f22bbf 9937 reg_set_luid[regno] = move2add_luid;
9938 reg_mode[regno] = GET_MODE (reg);
9939 reg_offset[regno] = src;
9940 continue;
9941 }
9942
9943 /* Try to transform (set (REGX) (REGY))
9944 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9945 ...
9946 (set (REGX) (REGY))
9947 (set (REGX) (PLUS (REGX) (CONST_INT B)))
9948 to
9949 (REGX) (REGY))
9950 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9951 ...
9952 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9953 else if (GET_CODE (src) == REG
9954 && reg_base_reg[regno] == REGNO (src)
9955 && reg_set_luid[regno] > reg_set_luid[REGNO (src)])
9956 {
9957 rtx next = next_nonnote_insn (insn);
9958 rtx set;
9959 if (next)
9960 set = single_set (next);
9961 if (next
9962 && set
9963 && SET_DEST (set) == reg
9964 && GET_CODE (SET_SRC (set)) == PLUS
9965 && XEXP (SET_SRC (set), 0) == reg
9966 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
9967 {
11f22bbf 9968 rtx src3 = XEXP (SET_SRC (set), 1);
9969 rtx new_src = GEN_INT (INTVAL (src3)
9970 - INTVAL (reg_offset[regno]));
9971 int success = 0;
9972
9973 if (new_src == const0_rtx)
9974 /* See above why we create (set (reg) (reg)) here. */
9975 success
9976 = validate_change (next, &SET_SRC (set), reg, 0);
9977 else if ((rtx_cost (new_src, PLUS)
9978 < 2 + rtx_cost (src3, SET))
9979 && have_add2_insn (GET_MODE (reg)))
9980 success
9981 = validate_change (next, &PATTERN (next),
9982 gen_add2_insn (reg, new_src), 0);
9983 if (success)
9984 {
11f22bbf 9985 /* INSN might be the first insn in a basic block
9986 if the preceding insn is a conditional jump
9987 or a possible-throwing call. */
9988 PUT_CODE (insn, NOTE);
9989 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9990 NOTE_SOURCE_FILE (insn) = 0;
9991 }
9992 insn = next;
9993 reg_set_luid[regno] = move2add_luid;
9994 reg_mode[regno] = GET_MODE (reg);
9995 reg_offset[regno] = src3;
9996 continue;
9997 }
9998 }
9999 }
10000 }
10001
10002 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
10003 {
10004 if (REG_NOTE_KIND (note) == REG_INC
10005 && GET_CODE (XEXP (note, 0)) == REG)
10006 {
10007 /* Indicate that this register has been recently written to,
10008 but the exact contents are not available. */
10009 int regno = REGNO (XEXP (note, 0));
10010 if (regno < FIRST_PSEUDO_REGISTER)
10011 {
10012 reg_set_luid[regno] = move2add_luid;
10013 reg_offset[regno] = note;
10014 }
10015 }
11f22bbf 10016 }
10017 note_stores (PATTERN (insn), move2add_note_store);
10018 /* If this is a CALL_INSN, all call used registers are stored with
10019 unknown values. */
10020 if (GET_CODE (insn) == CALL_INSN)
10021 {
10022 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
10023 {
10024 if (call_used_regs[i])
10025 {
10026 reg_set_luid[i] = move2add_luid;
10027 reg_offset[i] = insn; /* Invalidate contents. */
10028 }
10029 }
10030 }
10031 }
10032}
10033
10034/* SET is a SET or CLOBBER that sets DST.
10035 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
10036 Called from reload_cse_move2add via note_stores. */
10037static void
10038move2add_note_store (dst, set)
10039 rtx dst, set;
10040{
10041 int regno = 0;
10042 int i;
10043
10044 enum machine_mode mode = GET_MODE (dst);
10045 if (GET_CODE (dst) == SUBREG)
10046 {
10047 regno = SUBREG_WORD (dst);
10048 dst = SUBREG_REG (dst);
10049 }
10050 if (GET_CODE (dst) != REG)
10051 return;
10052
10053 regno += REGNO (dst);
10054
10055 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET)
10056 {
10057 rtx src = SET_SRC (set);
10058
10059 reg_mode[regno] = mode;
10060 switch (GET_CODE (src))
10061 {
10062 case PLUS:
10063 {
10064 rtx src0 = XEXP (src, 0);
10065 if (GET_CODE (src0) == REG)
10066 {
10067 if (REGNO (src0) != regno
10068 || reg_offset[regno] != const0_rtx)
10069 {
10070 reg_base_reg[regno] = REGNO (src0);
10071 reg_set_luid[regno] = move2add_luid;
10072 }
10073 reg_offset[regno] = XEXP (src, 1);
10074 break;
10075 }
10076 reg_set_luid[regno] = move2add_luid;
10077 reg_offset[regno] = set; /* Invalidate contents. */
10078 break;
10079 }
10080
10081 case REG:
10082 reg_base_reg[regno] = REGNO (SET_SRC (set));
10083 reg_offset[regno] = const0_rtx;
10084 reg_set_luid[regno] = move2add_luid;
10085 break;
10086
10087 default:
10088 reg_base_reg[regno] = -1;
10089 reg_offset[regno] = SET_SRC (set);
10090 reg_set_luid[regno] = move2add_luid;
10091 break;
10092 }
10093 }
10094 else
10095 {
10096 for (i = regno + HARD_REGNO_NREGS (regno, mode) - 1; i >= regno; i--)
10097 {
10098 /* Indicate that this register has been recently written to,
10099 but the exact contents are not available. */
10100 reg_set_luid[i] = move2add_luid;
10101 reg_offset[i] = dst;
10102 }
10103 }
10104}