]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/reload1.c
Merge in gcc2-ss-010999
[thirdparty/gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
a5cad800 2 Copyright (C) 1987, 88, 89, 92-98, 1999 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e99215a3
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
32131a9c
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
cab634f2
KG
24
25#include "machmode.h"
26#include "hard-reg-set.h"
32131a9c
RK
27#include "rtl.h"
28#include "obstack.h"
29#include "insn-config.h"
30#include "insn-flags.h"
31#include "insn-codes.h"
32#include "flags.h"
49ad7cfa 33#include "function.h"
32131a9c
RK
34#include "expr.h"
35#include "regs.h"
cad6f7d0 36#include "basic-block.h"
32131a9c
RK
37#include "reload.h"
38#include "recog.h"
32131a9c 39#include "output.h"
a9c366bf 40#include "real.h"
10f0ad3d 41#include "toplev.h"
32131a9c 42
65954bd8
JL
43#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
44#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
45#endif
46
32131a9c
RK
47/* This file contains the reload pass of the compiler, which is
48 run after register allocation has been done. It checks that
49 each insn is valid (operands required to be in registers really
50 are in registers of the proper class) and fixes up invalid ones
51 by copying values temporarily into registers for the insns
52 that need them.
53
54 The results of register allocation are described by the vector
55 reg_renumber; the insns still contain pseudo regs, but reg_renumber
56 can be used to find which hard reg, if any, a pseudo reg is in.
57
58 The technique we always use is to free up a few hard regs that are
59 called ``reload regs'', and for each place where a pseudo reg
60 must be in a hard reg, copy it temporarily into one of the reload regs.
61
03acd8f8
BS
62 Reload regs are allocated locally for every instruction that needs
63 reloads. When there are pseudos which are allocated to a register that
64 has been chosen as a reload reg, such pseudos must be ``spilled''.
65 This means that they go to other hard regs, or to stack slots if no other
32131a9c
RK
66 available hard regs can be found. Spilling can invalidate more
67 insns, requiring additional need for reloads, so we must keep checking
68 until the process stabilizes.
69
70 For machines with different classes of registers, we must keep track
71 of the register class needed for each reload, and make sure that
72 we allocate enough reload registers of each class.
73
74 The file reload.c contains the code that checks one insn for
75 validity and reports the reloads that it needs. This file
76 is in charge of scanning the entire rtl code, accumulating the
77 reload needs, spilling, assigning reload registers to use for
78 fixing up each insn, and generating the new insns to copy values
79 into the reload registers. */
546b63fb
RK
80
81
82#ifndef REGISTER_MOVE_COST
83#define REGISTER_MOVE_COST(x, y) 2
84#endif
32131a9c
RK
85\f
86/* During reload_as_needed, element N contains a REG rtx for the hard reg
0f41302f 87 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
88static rtx *reg_last_reload_reg;
89
90/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
91 for an output reload that stores into reg N. */
92static char *reg_has_output_reload;
93
94/* Indicates which hard regs are reload-registers for an output reload
95 in the current insn. */
96static HARD_REG_SET reg_is_output_reload;
97
98/* Element N is the constant value to which pseudo reg N is equivalent,
99 or zero if pseudo reg N is not equivalent to a constant.
100 find_reloads looks at this in order to replace pseudo reg N
101 with the constant it stands for. */
102rtx *reg_equiv_constant;
103
104/* Element N is a memory location to which pseudo reg N is equivalent,
105 prior to any register elimination (such as frame pointer to stack
106 pointer). Depending on whether or not it is a valid address, this value
107 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 108rtx *reg_equiv_memory_loc;
32131a9c
RK
109
110/* Element N is the address of stack slot to which pseudo reg N is equivalent.
111 This is used when the address is not valid as a memory address
112 (because its displacement is too big for the machine.) */
113rtx *reg_equiv_address;
114
115/* Element N is the memory slot to which pseudo reg N is equivalent,
116 or zero if pseudo reg N is not equivalent to a memory slot. */
117rtx *reg_equiv_mem;
118
119/* Widest width in which each pseudo reg is referred to (via subreg). */
120static int *reg_max_ref_width;
121
135eb61c 122/* Element N is the list of insns that initialized reg N from its equivalent
32131a9c
RK
123 constant or memory slot. */
124static rtx *reg_equiv_init;
125
03acd8f8
BS
126/* Vector to remember old contents of reg_renumber before spilling. */
127static short *reg_old_renumber;
128
e6e52be0 129/* During reload_as_needed, element N contains the last pseudo regno reloaded
03acd8f8 130 into hard register N. If that pseudo reg occupied more than one register,
32131a9c
RK
131 reg_reloaded_contents points to that pseudo for each spill register in
132 use; all of these must remain set for an inheritance to occur. */
133static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
134
135/* During reload_as_needed, element N contains the insn for which
e6e52be0
R
136 hard register N was last used. Its contents are significant only
137 when reg_reloaded_valid is set for this register. */
32131a9c
RK
138static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
139
e6e52be0
R
140/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
141static HARD_REG_SET reg_reloaded_valid;
142/* Indicate if the register was dead at the end of the reload.
143 This is only valid if reg_reloaded_contents is set and valid. */
144static HARD_REG_SET reg_reloaded_dead;
145
32131a9c
RK
146/* Number of spill-regs so far; number of valid elements of spill_regs. */
147static int n_spills;
148
149/* In parallel with spill_regs, contains REG rtx's for those regs.
150 Holds the last rtx used for any given reg, or 0 if it has never
151 been used for spilling yet. This rtx is reused, provided it has
152 the proper mode. */
153static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
154
155/* In parallel with spill_regs, contains nonzero for a spill reg
156 that was stored after the last time it was used.
157 The precise value is the insn generated to do the store. */
158static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
159
cb2afeb3
R
160/* This is the register that was stored with spill_reg_store. This is a
161 copy of reload_out / reload_out_reg when the value was stored; if
162 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
163static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
164
32131a9c
RK
165/* This table is the inverse mapping of spill_regs:
166 indexed by hard reg number,
167 it contains the position of that reg in spill_regs,
05d10675 168 or -1 for something that is not in spill_regs.
13c8e8e3
JL
169
170 ?!? This is no longer accurate. */
32131a9c
RK
171static short spill_reg_order[FIRST_PSEUDO_REGISTER];
172
03acd8f8
BS
173/* This reg set indicates registers that can't be used as spill registers for
174 the currently processed insn. These are the hard registers which are live
175 during the insn, but not allocated to pseudos, as well as fixed
176 registers. */
32131a9c
RK
177static HARD_REG_SET bad_spill_regs;
178
03acd8f8
BS
179/* These are the hard registers that can't be used as spill register for any
180 insn. This includes registers used for user variables and registers that
181 we can't eliminate. A register that appears in this set also can't be used
182 to retry register allocation. */
183static HARD_REG_SET bad_spill_regs_global;
184
32131a9c 185/* Describes order of use of registers for reloading
03acd8f8
BS
186 of spilled pseudo-registers. `n_spills' is the number of
187 elements that are actually valid; new ones are added at the end.
188
189 Both spill_regs and spill_reg_order are used on two occasions:
190 once during find_reload_regs, where they keep track of the spill registers
191 for a single insn, but also during reload_as_needed where they show all
192 the registers ever used by reload. For the latter case, the information
193 is calculated during finish_spills. */
32131a9c
RK
194static short spill_regs[FIRST_PSEUDO_REGISTER];
195
03acd8f8
BS
196/* This vector of reg sets indicates, for each pseudo, which hard registers
197 may not be used for retrying global allocation because the register was
198 formerly spilled from one of them. If we allowed reallocating a pseudo to
199 a register that it was already allocated to, reload might not
200 terminate. */
201static HARD_REG_SET *pseudo_previous_regs;
202
203/* This vector of reg sets indicates, for each pseudo, which hard
204 registers may not be used for retrying global allocation because they
205 are used as spill registers during one of the insns in which the
206 pseudo is live. */
207static HARD_REG_SET *pseudo_forbidden_regs;
208
209/* All hard regs that have been used as spill registers for any insn are
210 marked in this set. */
211static HARD_REG_SET used_spill_regs;
8b4f9969 212
4079cd63
JW
213/* Index of last register assigned as a spill register. We allocate in
214 a round-robin fashion. */
4079cd63
JW
215static int last_spill_reg;
216
32131a9c
RK
217/* Describes order of preference for putting regs into spill_regs.
218 Contains the numbers of all the hard regs, in order most preferred first.
219 This order is different for each function.
220 It is set up by order_regs_for_reload.
221 Empty elements at the end contain -1. */
222static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
223
32131a9c
RK
224/* Nonzero if indirect addressing is supported on the machine; this means
225 that spilling (REG n) does not require reloading it into a register in
226 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
227 value indicates the level of indirect addressing supported, e.g., two
228 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
229 a hard register. */
32131a9c
RK
230static char spill_indirect_levels;
231
232/* Nonzero if indirect addressing is supported when the innermost MEM is
233 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
234 which these are valid is the same as spill_indirect_levels, above. */
32131a9c
RK
235char indirect_symref_ok;
236
237/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
32131a9c
RK
238char double_reg_address_ok;
239
240/* Record the stack slot for each spilled hard register. */
32131a9c
RK
241static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
242
243/* Width allocated so far for that stack slot. */
32131a9c
RK
244static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
245
7609e720
BS
246/* Record which pseudos needed to be spilled. */
247static regset spilled_pseudos;
248
32131a9c
RK
249/* First uid used by insns created by reload in this function.
250 Used in find_equiv_reg. */
251int reload_first_uid;
252
253/* Flag set by local-alloc or global-alloc if anything is live in
254 a call-clobbered reg across calls. */
32131a9c
RK
255int caller_save_needed;
256
257/* Set to 1 while reload_as_needed is operating.
258 Required by some machines to handle any generated moves differently. */
32131a9c
RK
259int reload_in_progress = 0;
260
261/* These arrays record the insn_code of insns that may be needed to
262 perform input and output reloads of special objects. They provide a
263 place to pass a scratch register. */
32131a9c
RK
264enum insn_code reload_in_optab[NUM_MACHINE_MODES];
265enum insn_code reload_out_optab[NUM_MACHINE_MODES];
266
d45cf215 267/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
268 The allocated storage can be freed once find_reloads has processed the
269 insn. */
32131a9c 270struct obstack reload_obstack;
cad6f7d0
BS
271
272/* Points to the beginning of the reload_obstack. All insn_chain structures
273 are allocated first. */
274char *reload_startobj;
275
276/* The point after all insn_chain structures. Used to quickly deallocate
277 memory used while processing one insn. */
32131a9c
RK
278char *reload_firstobj;
279
280#define obstack_chunk_alloc xmalloc
281#define obstack_chunk_free free
282
cad6f7d0
BS
283/* List of insn_chain instructions, one for every insn that reload needs to
284 examine. */
285struct insn_chain *reload_insn_chain;
7609e720 286
dfb7c80f
JL
287#ifdef TREE_CODE
288extern tree current_function_decl;
289#else
122a860e 290extern union tree_node *current_function_decl;
dfb7c80f
JL
291#endif
292
03acd8f8 293/* List of all insns needing reloads. */
7609e720 294static struct insn_chain *insns_need_reload;
32131a9c
RK
295\f
296/* This structure is used to record information about register eliminations.
297 Each array entry describes one possible way of eliminating a register
298 in favor of another. If there is more than one way of eliminating a
299 particular register, the most preferred should be specified first. */
300
590cf94d 301struct elim_table
32131a9c 302{
0f41302f
MS
303 int from; /* Register number to be eliminated. */
304 int to; /* Register number used as replacement. */
305 int initial_offset; /* Initial difference between values. */
306 int can_eliminate; /* Non-zero if this elimination can be done. */
32131a9c 307 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
0f41302f
MS
308 insns made by reload. */
309 int offset; /* Current offset between the two regs. */
0f41302f
MS
310 int previous_offset; /* Offset at end of previous insn. */
311 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
32131a9c
RK
312 rtx from_rtx; /* REG rtx for the register to be eliminated.
313 We cannot simply compare the number since
314 we might then spuriously replace a hard
315 register corresponding to a pseudo
0f41302f
MS
316 assigned to the reg to be eliminated. */
317 rtx to_rtx; /* REG rtx for the replacement. */
590cf94d
KG
318};
319
320static struct elim_table * reg_eliminate = 0;
321
322/* This is an intermediate structure to initialize the table. It has
323 exactly the members provided by ELIMINABLE_REGS. */
324static struct elim_table_1
325{
326 int from;
327 int to;
328} reg_eliminate_1[] =
32131a9c
RK
329
330/* If a set of eliminable registers was specified, define the table from it.
331 Otherwise, default to the normal case of the frame pointer being
332 replaced by the stack pointer. */
333
334#ifdef ELIMINABLE_REGS
335 ELIMINABLE_REGS;
336#else
337 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
338#endif
339
590cf94d 340#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate_1/sizeof reg_eliminate_1[0])
32131a9c
RK
341
342/* Record the number of pending eliminations that have an offset not equal
343 to their initial offset. If non-zero, we use a new copy of each
344 replacement result in any insns encountered. */
cb2afeb3 345int num_not_at_initial_offset;
32131a9c
RK
346
347/* Count the number of registers that we may be able to eliminate. */
348static int num_eliminable;
2b49ee39
R
349/* And the number of registers that are equivalent to a constant that
350 can be eliminated to frame_pointer / arg_pointer + constant. */
351static int num_eliminable_invariants;
32131a9c
RK
352
353/* For each label, we record the offset of each elimination. If we reach
354 a label by more than one path and an offset differs, we cannot do the
355 elimination. This information is indexed by the number of the label.
356 The first table is an array of flags that records whether we have yet
357 encountered a label and the second table is an array of arrays, one
358 entry in the latter array for each elimination. */
359
360static char *offsets_known_at;
361static int (*offsets_at)[NUM_ELIMINABLE_REGS];
362
363/* Number of labels in the current function. */
364
365static int num_labels;
546b63fb 366
03acd8f8
BS
367struct hard_reg_n_uses
368{
369 int regno;
370 unsigned int uses;
371};
32131a9c 372\f
18a90182 373static void maybe_fix_stack_asms PROTO((void));
03acd8f8
BS
374static void calculate_needs_all_insns PROTO((int));
375static void calculate_needs PROTO((struct insn_chain *));
376static void find_reload_regs PROTO((struct insn_chain *chain,
377 FILE *));
378static void find_tworeg_group PROTO((struct insn_chain *, int,
379 FILE *));
380static void find_group PROTO((struct insn_chain *, int,
381 FILE *));
382static int possible_group_p PROTO((struct insn_chain *, int));
383static void count_possible_groups PROTO((struct insn_chain *, int));
546b63fb
RK
384static int modes_equiv_for_class_p PROTO((enum machine_mode,
385 enum machine_mode,
386 enum reg_class));
7609e720 387static void delete_caller_save_insns PROTO((void));
03acd8f8 388
546b63fb 389static void spill_failure PROTO((rtx));
03acd8f8
BS
390static void new_spill_reg PROTO((struct insn_chain *, int, int,
391 int, FILE *));
392static void maybe_mark_pseudo_spilled PROTO((int));
546b63fb
RK
393static void delete_dead_insn PROTO((rtx));
394static void alter_reg PROTO((int, int));
395static void set_label_offsets PROTO((rtx, rtx, int));
396static int eliminate_regs_in_insn PROTO((rtx, int));
cb2afeb3 397static void update_eliminable_offsets PROTO((void));
546b63fb 398static void mark_not_eliminable PROTO((rtx, rtx));
09dd1133 399static void set_initial_elim_offsets PROTO((void));
c47f5ea5 400static void verify_initial_elim_offsets PROTO((void));
1f3b1e1a
JL
401static void set_initial_label_offsets PROTO((void));
402static void set_offsets_for_label PROTO((rtx));
09dd1133
BS
403static void init_elim_table PROTO((void));
404static void update_eliminables PROTO((HARD_REG_SET *));
03acd8f8
BS
405static void spill_hard_reg PROTO((int, FILE *, int));
406static int finish_spills PROTO((int, FILE *));
407static void ior_hard_reg_set PROTO((HARD_REG_SET *, HARD_REG_SET *));
546b63fb 408static void scan_paradoxical_subregs PROTO((rtx));
788a0818 409static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
03acd8f8
BS
410static void count_pseudo PROTO((struct hard_reg_n_uses *, int));
411static void order_regs_for_reload PROTO((struct insn_chain *));
7609e720 412static void reload_as_needed PROTO((int));
9a881562 413static void forget_old_reloads_1 PROTO((rtx, rtx));
788a0818 414static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
546b63fb
RK
415static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
416 enum machine_mode));
be7ae2a4
RK
417static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
418 enum machine_mode));
546b63fb 419static int reload_reg_free_p PROTO((int, int, enum reload_type));
dfe96118 420static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int, int));
546b63fb 421static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
03acd8f8
BS
422static int allocate_reload_reg PROTO((struct insn_chain *, int, int,
423 int));
424static void choose_reload_regs PROTO((struct insn_chain *));
546b63fb 425static void merge_assigned_reloads PROTO((rtx));
7609e720 426static void emit_reload_insns PROTO((struct insn_chain *));
cb2afeb3
R
427static void delete_output_reload PROTO((rtx, int, int));
428static void delete_address_reloads PROTO((rtx, rtx));
429static void delete_address_reloads_1 PROTO((rtx, rtx, rtx));
430static rtx inc_for_reload PROTO((rtx, rtx, rtx, int));
9b3142b3 431static int constraint_accepts_reg_p PROTO((const char *, rtx));
5adf6da0 432static void reload_cse_regs_1 PROTO((rtx));
2a9fb548 433static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
cbfc3ad3 434static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
2a9fb548
ILT
435static void reload_cse_invalidate_mem PROTO((rtx));
436static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
2a9fb548 437static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
31418d35 438static int reload_cse_noop_set_p PROTO((rtx, rtx));
e9a25f70
JL
439static int reload_cse_simplify_set PROTO((rtx, rtx));
440static int reload_cse_simplify_operands PROTO((rtx));
2a9fb548
ILT
441static void reload_cse_check_clobber PROTO((rtx, rtx));
442static void reload_cse_record_set PROTO((rtx, rtx));
5adf6da0
R
443static void reload_combine PROTO((void));
444static void reload_combine_note_use PROTO((rtx *, rtx));
445static void reload_combine_note_store PROTO((rtx, rtx));
446static void reload_cse_move2add PROTO((rtx));
447static void move2add_note_store PROTO((rtx, rtx));
2dfa9a87
MH
448#ifdef AUTO_INC_DEC
449static void add_auto_inc_notes PROTO((rtx, rtx));
450#endif
32131a9c 451\f
546b63fb
RK
452/* Initialize the reload pass once per compilation. */
453
32131a9c
RK
454void
455init_reload ()
456{
457 register int i;
458
459 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
460 Set spill_indirect_levels to the number of levels such addressing is
461 permitted, zero if it is not permitted at all. */
462
463 register rtx tem
38a448ca
RH
464 = gen_rtx_MEM (Pmode,
465 gen_rtx_PLUS (Pmode,
c5c76735
JL
466 gen_rtx_REG (Pmode,
467 LAST_VIRTUAL_REGISTER + 1),
38a448ca 468 GEN_INT (4)));
32131a9c
RK
469 spill_indirect_levels = 0;
470
471 while (memory_address_p (QImode, tem))
472 {
473 spill_indirect_levels++;
38a448ca 474 tem = gen_rtx_MEM (Pmode, tem);
32131a9c
RK
475 }
476
477 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
478
38a448ca 479 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
32131a9c
RK
480 indirect_symref_ok = memory_address_p (QImode, tem);
481
482 /* See if reg+reg is a valid (and offsettable) address. */
483
65701fd2 484 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638 485 {
38a448ca
RH
486 tem = gen_rtx_PLUS (Pmode,
487 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
488 gen_rtx_REG (Pmode, i));
c5c76735 489
57caa638
RS
490 /* This way, we make sure that reg+reg is an offsettable address. */
491 tem = plus_constant (tem, 4);
492
493 if (memory_address_p (QImode, tem))
494 {
495 double_reg_address_ok = 1;
496 break;
497 }
498 }
32131a9c 499
0f41302f 500 /* Initialize obstack for our rtl allocation. */
32131a9c 501 gcc_obstack_init (&reload_obstack);
cad6f7d0 502 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
503}
504
cad6f7d0
BS
505/* List of insn chains that are currently unused. */
506static struct insn_chain *unused_insn_chains = 0;
507
508/* Allocate an empty insn_chain structure. */
509struct insn_chain *
510new_insn_chain ()
511{
512 struct insn_chain *c;
513
514 if (unused_insn_chains == 0)
515 {
8db99db2
KG
516 c = (struct insn_chain *)
517 obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
cad6f7d0
BS
518 c->live_before = OBSTACK_ALLOC_REG_SET (&reload_obstack);
519 c->live_after = OBSTACK_ALLOC_REG_SET (&reload_obstack);
520 }
521 else
522 {
523 c = unused_insn_chains;
524 unused_insn_chains = c->next;
525 }
526 c->is_caller_save_insn = 0;
03acd8f8 527 c->need_operand_change = 0;
cad6f7d0
BS
528 c->need_reload = 0;
529 c->need_elim = 0;
530 return c;
531}
532
7609e720
BS
533/* Small utility function to set all regs in hard reg set TO which are
534 allocated to pseudos in regset FROM. */
535void
536compute_use_by_pseudos (to, from)
537 HARD_REG_SET *to;
538 regset from;
539{
540 int regno;
541 EXECUTE_IF_SET_IN_REG_SET
542 (from, FIRST_PSEUDO_REGISTER, regno,
543 {
544 int r = reg_renumber[regno];
545 int nregs;
546 if (r < 0)
404d95c4
R
547 {
548 /* reload_combine uses the information from
e881bb1b
RH
549 BASIC_BLOCK->global_live_at_start, which might still
550 contain registers that have not actually been allocated
551 since they have an equivalence. */
404d95c4
R
552 if (! reload_completed)
553 abort ();
554 }
555 else
556 {
557 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
558 while (nregs-- > 0)
559 SET_HARD_REG_BIT (*to, r + nregs);
560 }
7609e720
BS
561 });
562}
03acd8f8 563\f
1e5bd841
BS
564/* Global variables used by reload and its subroutines. */
565
1e5bd841
BS
566/* Set during calculate_needs if an insn needs register elimination. */
567static int something_needs_elimination;
cb2afeb3
R
568/* Set during calculate_needs if an insn needs an operand changed. */
569int something_needs_operands_changed;
1e5bd841 570
1e5bd841
BS
571/* Nonzero means we couldn't get enough spill regs. */
572static int failure;
573
546b63fb 574/* Main entry point for the reload pass.
32131a9c
RK
575
576 FIRST is the first insn of the function being compiled.
577
578 GLOBAL nonzero means we were called from global_alloc
579 and should attempt to reallocate any pseudoregs that we
580 displace from hard regs we will use for reloads.
581 If GLOBAL is zero, we do not have enough information to do that,
582 so any pseudo reg that is spilled must go to the stack.
583
584 DUMPFILE is the global-reg debugging dump file stream, or 0.
585 If it is nonzero, messages are written to it to describe
586 which registers are seized as reload regs, which pseudo regs
5352b11a 587 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 588
5352b11a
RS
589 Return value is nonzero if reload failed
590 and we must not do any more for this function. */
591
592int
32131a9c
RK
593reload (first, global, dumpfile)
594 rtx first;
595 int global;
596 FILE *dumpfile;
597{
03acd8f8 598 register int i;
32131a9c
RK
599 register rtx insn;
600 register struct elim_table *ep;
601
a68d4b75
BK
602 /* The two pointers used to track the true location of the memory used
603 for label offsets. */
604 char *real_known_ptr = NULL_PTR;
605 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
606
32131a9c
RK
607 /* Make sure even insns with volatile mem refs are recognizable. */
608 init_recog ();
609
1e5bd841
BS
610 failure = 0;
611
cad6f7d0
BS
612 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
613
437a710d
BS
614 /* Make sure that the last insn in the chain
615 is not something that needs reloading. */
616 emit_note (NULL_PTR, NOTE_INSN_DELETED);
617
32131a9c
RK
618 /* Enable find_equiv_reg to distinguish insns made by reload. */
619 reload_first_uid = get_max_uid ();
620
0dadecf6
RK
621#ifdef SECONDARY_MEMORY_NEEDED
622 /* Initialize the secondary memory table. */
623 clear_secondary_mem ();
624#endif
625
32131a9c 626 /* We don't have a stack slot for any spill reg yet. */
4c9a05bc
RK
627 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
628 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
32131a9c 629
a8efe40d
RK
630 /* Initialize the save area information for caller-save, in case some
631 are needed. */
632 init_save_areas ();
a8fdc208 633
32131a9c
RK
634 /* Compute which hard registers are now in use
635 as homes for pseudo registers.
636 This is done here rather than (eg) in global_alloc
637 because this point is reached even if not optimizing. */
32131a9c
RK
638 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
639 mark_home_live (i);
640
8dddd002
RK
641 /* A function that receives a nonlocal goto must save all call-saved
642 registers. */
643 if (current_function_has_nonlocal_label)
644 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
645 {
646 if (! call_used_regs[i] && ! fixed_regs[i])
647 regs_ever_live[i] = 1;
648 }
649
32131a9c
RK
650 /* Find all the pseudo registers that didn't get hard regs
651 but do have known equivalent constants or memory slots.
652 These include parameters (known equivalent to parameter slots)
653 and cse'd or loop-moved constant memory addresses.
654
655 Record constant equivalents in reg_equiv_constant
656 so they will be substituted by find_reloads.
657 Record memory equivalents in reg_mem_equiv so they can
658 be substituted eventually by altering the REG-rtx's. */
659
ad85216e
KG
660 reg_equiv_constant = (rtx *) xcalloc (max_regno, sizeof (rtx));
661 reg_equiv_memory_loc = (rtx *) xcalloc (max_regno, sizeof (rtx));
662 reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
663 reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
664 reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
665 reg_max_ref_width = (int *) xcalloc (max_regno, sizeof (int));
666 reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
47c3ed98 667 bcopy ((PTR) reg_renumber, (PTR) reg_old_renumber, max_regno * sizeof (short));
03acd8f8
BS
668 pseudo_forbidden_regs
669 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
670 pseudo_previous_regs
ad85216e 671 = (HARD_REG_SET *) xcalloc (max_regno, sizeof (HARD_REG_SET));
32131a9c 672
03acd8f8 673 CLEAR_HARD_REG_SET (bad_spill_regs_global);
56f58d3a 674
32131a9c 675 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
676 Also find all paradoxical subregs and find largest such for each pseudo.
677 On machines with small register classes, record hard registers that
05d10675 678 are used for user variables. These can never be used for spills.
b453cb0b
RK
679 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
680 caller-saved registers must be marked live. */
32131a9c 681
2b49ee39 682 num_eliminable_invariants = 0;
32131a9c
RK
683 for (insn = first; insn; insn = NEXT_INSN (insn))
684 {
685 rtx set = single_set (insn);
686
b453cb0b
RK
687 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
688 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
690 if (! call_used_regs[i])
691 regs_ever_live[i] = 1;
692
32131a9c
RK
693 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
694 {
fb3821f7 695 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
696 if (note
697#ifdef LEGITIMATE_PIC_OPERAND_P
2b49ee39
R
698 && (! function_invariant_p (XEXP (note, 0))
699 || ! flag_pic
a8efe40d
RK
700 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
701#endif
702 )
32131a9c
RK
703 {
704 rtx x = XEXP (note, 0);
705 i = REGNO (SET_DEST (set));
706 if (i > LAST_VIRTUAL_REGISTER)
707 {
708 if (GET_CODE (x) == MEM)
956d6950
JL
709 {
710 /* If the operand is a PLUS, the MEM may be shared,
711 so make sure we have an unshared copy here. */
712 if (GET_CODE (XEXP (x, 0)) == PLUS)
713 x = copy_rtx (x);
714
715 reg_equiv_memory_loc[i] = x;
716 }
2b49ee39 717 else if (function_invariant_p (x))
32131a9c 718 {
2b49ee39
R
719 if (GET_CODE (x) == PLUS)
720 {
721 /* This is PLUS of frame pointer and a constant,
722 and might be shared. Unshare it. */
723 reg_equiv_constant[i] = copy_rtx (x);
724 num_eliminable_invariants++;
725 }
726 else if (x == frame_pointer_rtx
727 || x == arg_pointer_rtx)
728 {
729 reg_equiv_constant[i] = x;
730 num_eliminable_invariants++;
731 }
732 else if (LEGITIMATE_CONSTANT_P (x))
32131a9c
RK
733 reg_equiv_constant[i] = x;
734 else
735 reg_equiv_memory_loc[i]
d445b551 736 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
737 }
738 else
739 continue;
740
741 /* If this register is being made equivalent to a MEM
742 and the MEM is not SET_SRC, the equivalencing insn
743 is one with the MEM as a SET_DEST and it occurs later.
744 So don't mark this insn now. */
745 if (GET_CODE (x) != MEM
746 || rtx_equal_p (SET_SRC (set), x))
135eb61c
R
747 reg_equiv_init[i]
748 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
32131a9c
RK
749 }
750 }
751 }
752
753 /* If this insn is setting a MEM from a register equivalent to it,
754 this is the equivalencing insn. */
755 else if (set && GET_CODE (SET_DEST (set)) == MEM
756 && GET_CODE (SET_SRC (set)) == REG
757 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
758 && rtx_equal_p (SET_DEST (set),
759 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
135eb61c
R
760 reg_equiv_init[REGNO (SET_SRC (set))]
761 = gen_rtx_INSN_LIST (VOIDmode, insn,
762 reg_equiv_init[REGNO (SET_SRC (set))]);
32131a9c
RK
763
764 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
765 scan_paradoxical_subregs (PATTERN (insn));
766 }
767
09dd1133 768 init_elim_table ();
32131a9c
RK
769
770 num_labels = max_label_num () - get_first_label_num ();
771
772 /* Allocate the tables used to store offset information at labels. */
a68d4b75
BK
773 /* We used to use alloca here, but the size of what it would try to
774 allocate would occasionally cause it to exceed the stack limit and
775 cause a core dump. */
776 real_known_ptr = xmalloc (num_labels);
777 real_at_ptr
32131a9c 778 = (int (*)[NUM_ELIMINABLE_REGS])
a68d4b75 779 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
32131a9c 780
a68d4b75
BK
781 offsets_known_at = real_known_ptr - get_first_label_num ();
782 offsets_at
783 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
32131a9c
RK
784
785 /* Alter each pseudo-reg rtx to contain its hard reg number.
786 Assign stack slots to the pseudos that lack hard regs or equivalents.
787 Do not touch virtual registers. */
788
789 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
790 alter_reg (i, -1);
791
32131a9c
RK
792 /* If we have some registers we think can be eliminated, scan all insns to
793 see if there is an insn that sets one of these registers to something
794 other than itself plus a constant. If so, the register cannot be
795 eliminated. Doing this scan here eliminates an extra pass through the
796 main reload loop in the most common case where register elimination
797 cannot be done. */
798 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
799 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
800 || GET_CODE (insn) == CALL_INSN)
801 note_stores (PATTERN (insn), mark_not_eliminable);
802
803#ifndef REGISTER_CONSTRAINTS
804 /* If all the pseudo regs have hard regs,
805 except for those that are never referenced,
806 we know that no reloads are needed. */
807 /* But that is not true if there are register constraints, since
808 in that case some pseudos might be in the wrong kind of hard reg. */
809
810 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
b1f21e0a 811 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
32131a9c
RK
812 break;
813
b8093d02 814 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
a68d4b75
BK
815 {
816 free (real_known_ptr);
817 free (real_at_ptr);
56a65848
DB
818 free (reg_equiv_constant);
819 free (reg_equiv_memory_loc);
820 free (reg_equiv_mem);
821 free (reg_equiv_init);
822 free (reg_equiv_address);
823 free (reg_max_ref_width);
03acd8f8
BS
824 free (reg_old_renumber);
825 free (pseudo_previous_regs);
826 free (pseudo_forbidden_regs);
56a65848 827 return 0;
a68d4b75 828 }
32131a9c
RK
829#endif
830
18a90182
BS
831 maybe_fix_stack_asms ();
832
03acd8f8
BS
833 insns_need_reload = 0;
834 something_needs_elimination = 0;
05d10675 835
4079cd63
JW
836 /* Initialize to -1, which means take the first spill register. */
837 last_spill_reg = -1;
838
7609e720
BS
839 spilled_pseudos = ALLOCA_REG_SET ();
840
32131a9c 841 /* Spill any hard regs that we know we can't eliminate. */
03acd8f8 842 CLEAR_HARD_REG_SET (used_spill_regs);
32131a9c
RK
843 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
844 if (! ep->can_eliminate)
03acd8f8 845 spill_hard_reg (ep->from, dumpfile, 1);
9ff3516a
RK
846
847#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
848 if (frame_pointer_needed)
03acd8f8 849 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, dumpfile, 1);
9ff3516a 850#endif
7609e720
BS
851 finish_spills (global, dumpfile);
852
f1db3576
JL
853 /* From now on, we may need to generate moves differently. We may also
854 allow modifications of insns which cause them to not be recognized.
855 Any such modifications will be cleaned up during reload itself. */
b2f15f94
RK
856 reload_in_progress = 1;
857
32131a9c
RK
858 /* This loop scans the entire function each go-round
859 and repeats until one repetition spills no additional hard regs. */
03acd8f8 860 for (;;)
32131a9c 861 {
03acd8f8
BS
862 int something_changed;
863 int did_spill;
864 struct insn_chain *chain;
32131a9c 865
03acd8f8 866 HOST_WIDE_INT starting_frame_size;
32131a9c 867
7657bf2f
JW
868 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
869 here because the stack size may be a part of the offset computation
870 for register elimination, and there might have been new stack slots
871 created in the last iteration of this loop. */
872 assign_stack_local (BLKmode, 0, 0);
873
874 starting_frame_size = get_frame_size ();
875
09dd1133 876 set_initial_elim_offsets ();
1f3b1e1a 877 set_initial_label_offsets ();
03acd8f8 878
32131a9c
RK
879 /* For each pseudo register that has an equivalent location defined,
880 try to eliminate any eliminable registers (such as the frame pointer)
881 assuming initial offsets for the replacement register, which
882 is the normal case.
883
884 If the resulting location is directly addressable, substitute
885 the MEM we just got directly for the old REG.
886
887 If it is not addressable but is a constant or the sum of a hard reg
888 and constant, it is probably not addressable because the constant is
889 out of range, in that case record the address; we will generate
890 hairy code to compute the address in a register each time it is
6491dbbb
RK
891 needed. Similarly if it is a hard register, but one that is not
892 valid as an address register.
32131a9c
RK
893
894 If the location is not addressable, but does not have one of the
895 above forms, assign a stack slot. We have to do this to avoid the
896 potential of producing lots of reloads if, e.g., a location involves
897 a pseudo that didn't get a hard register and has an equivalent memory
898 location that also involves a pseudo that didn't get a hard register.
899
900 Perhaps at some point we will improve reload_when_needed handling
901 so this problem goes away. But that's very hairy. */
902
903 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
904 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
905 {
1914f5da 906 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
907
908 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
909 XEXP (x, 0)))
910 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
911 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
912 || (GET_CODE (XEXP (x, 0)) == REG
913 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
914 || (GET_CODE (XEXP (x, 0)) == PLUS
915 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
916 && (REGNO (XEXP (XEXP (x, 0), 0))
917 < FIRST_PSEUDO_REGISTER)
918 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
919 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
920 else
921 {
922 /* Make a new stack slot. Then indicate that something
a8fdc208 923 changed so we go back and recompute offsets for
32131a9c
RK
924 eliminable registers because the allocation of memory
925 below might change some offset. reg_equiv_{mem,address}
926 will be set up for this pseudo on the next pass around
927 the loop. */
928 reg_equiv_memory_loc[i] = 0;
929 reg_equiv_init[i] = 0;
930 alter_reg (i, -1);
32131a9c
RK
931 }
932 }
a8fdc208 933
437a710d
BS
934 if (caller_save_needed)
935 setup_save_areas ();
936
03acd8f8 937 /* If we allocated another stack slot, redo elimination bookkeeping. */
437a710d 938 if (starting_frame_size != get_frame_size ())
32131a9c
RK
939 continue;
940
437a710d 941 if (caller_save_needed)
a8efe40d 942 {
437a710d
BS
943 save_call_clobbered_regs ();
944 /* That might have allocated new insn_chain structures. */
945 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
a8efe40d
RK
946 }
947
03acd8f8
BS
948 calculate_needs_all_insns (global);
949
950 CLEAR_REG_SET (spilled_pseudos);
951 did_spill = 0;
952
953 something_changed = 0;
32131a9c 954
0dadecf6
RK
955 /* If we allocated any new memory locations, make another pass
956 since it might have changed elimination offsets. */
957 if (starting_frame_size != get_frame_size ())
958 something_changed = 1;
959
09dd1133
BS
960 {
961 HARD_REG_SET to_spill;
962 CLEAR_HARD_REG_SET (to_spill);
963 update_eliminables (&to_spill);
964 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
965 if (TEST_HARD_REG_BIT (to_spill, i))
32131a9c 966 {
03acd8f8
BS
967 spill_hard_reg (i, dumpfile, 1);
968 did_spill = 1;
8f5db3c1
JL
969
970 /* Regardless of the state of spills, if we previously had
971 a register that we thought we could eliminate, but no can
972 not eliminate, we must run another pass.
973
974 Consider pseudos which have an entry in reg_equiv_* which
975 reference an eliminable register. We must make another pass
976 to update reg_equiv_* so that we do not substitute in the
977 old value from when we thought the elimination could be
978 performed. */
979 something_changed = 1;
32131a9c 980 }
09dd1133 981 }
9ff3516a 982
03acd8f8
BS
983 CLEAR_HARD_REG_SET (used_spill_regs);
984 /* Try to satisfy the needs for each insn. */
985 for (chain = insns_need_reload; chain != 0;
986 chain = chain->next_need_reload)
987 find_reload_regs (chain, dumpfile);
32131a9c 988
1e5bd841
BS
989 if (failure)
990 goto failed;
437a710d 991
03acd8f8
BS
992 if (insns_need_reload != 0 || did_spill)
993 something_changed |= finish_spills (global, dumpfile);
7609e720 994
03acd8f8
BS
995 if (! something_changed)
996 break;
997
998 if (caller_save_needed)
7609e720 999 delete_caller_save_insns ();
32131a9c
RK
1000 }
1001
1002 /* If global-alloc was run, notify it of any register eliminations we have
1003 done. */
1004 if (global)
1005 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1006 if (ep->can_eliminate)
1007 mark_elimination (ep->from, ep->to);
1008
32131a9c
RK
1009 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1010 If that insn didn't set the register (i.e., it copied the register to
1011 memory), just delete that insn instead of the equivalencing insn plus
1012 anything now dead. If we call delete_dead_insn on that insn, we may
135eb61c 1013 delete the insn that actually sets the register if the register dies
32131a9c
RK
1014 there and that is incorrect. */
1015
1016 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
135eb61c
R
1017 {
1018 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1019 {
1020 rtx list;
1021 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1022 {
1023 rtx equiv_insn = XEXP (list, 0);
1024 if (GET_CODE (equiv_insn) == NOTE)
1025 continue;
1026 if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1027 delete_dead_insn (equiv_insn);
1028 else
1029 {
1030 PUT_CODE (equiv_insn, NOTE);
1031 NOTE_SOURCE_FILE (equiv_insn) = 0;
1032 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1033 }
1034 }
1035 }
1036 }
32131a9c
RK
1037
1038 /* Use the reload registers where necessary
1039 by generating move instructions to move the must-be-register
1040 values into or out of the reload registers. */
1041
03acd8f8
BS
1042 if (insns_need_reload != 0 || something_needs_elimination
1043 || something_needs_operands_changed)
c47f5ea5
BS
1044 {
1045 int old_frame_size = get_frame_size ();
1046
1047 reload_as_needed (global);
1048
1049 if (old_frame_size != get_frame_size ())
1050 abort ();
1051
1052 if (num_eliminable)
1053 verify_initial_elim_offsets ();
1054 }
32131a9c 1055
2a1f8b6b 1056 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1057 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1058 virtue of being in a pseudo, that pseudo will be marked live
1059 and hence the frame pointer will be known to be live via that
1060 pseudo. */
1061
1062 if (! frame_pointer_needed)
1063 for (i = 0; i < n_basic_blocks; i++)
e881bb1b 1064 CLEAR_REGNO_REG_SET (BASIC_BLOCK (i)->global_live_at_start,
8e08106d 1065 HARD_FRAME_POINTER_REGNUM);
2a1f8b6b 1066
5352b11a
RS
1067 /* Come here (with failure set nonzero) if we can't get enough spill regs
1068 and we decide not to abort about it. */
1069 failed:
1070
a3ec87a8
RS
1071 reload_in_progress = 0;
1072
32131a9c
RK
1073 /* Now eliminate all pseudo regs by modifying them into
1074 their equivalent memory references.
1075 The REG-rtx's for the pseudos are modified in place,
1076 so all insns that used to refer to them now refer to memory.
1077
1078 For a reg that has a reg_equiv_address, all those insns
1079 were changed by reloading so that no insns refer to it any longer;
1080 but the DECL_RTL of a variable decl may refer to it,
1081 and if so this causes the debugging info to mention the variable. */
1082
1083 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1084 {
1085 rtx addr = 0;
ab1fd483 1086 int in_struct = 0;
6a651371 1087 int is_scalar = 0;
9ec36da5
JL
1088 int is_readonly = 0;
1089
1090 if (reg_equiv_memory_loc[i])
ab1fd483 1091 {
9ec36da5 1092 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
c6df88cb 1093 is_scalar = MEM_SCALAR_P (reg_equiv_memory_loc[i]);
9ec36da5 1094 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
ab1fd483 1095 }
9ec36da5
JL
1096
1097 if (reg_equiv_mem[i])
1098 addr = XEXP (reg_equiv_mem[i], 0);
1099
32131a9c
RK
1100 if (reg_equiv_address[i])
1101 addr = reg_equiv_address[i];
9ec36da5 1102
32131a9c
RK
1103 if (addr)
1104 {
1105 if (reg_renumber[i] < 0)
1106 {
1107 rtx reg = regno_reg_rtx[i];
ef178af3 1108 PUT_CODE (reg, MEM);
32131a9c
RK
1109 XEXP (reg, 0) = addr;
1110 REG_USERVAR_P (reg) = 0;
9ec36da5 1111 RTX_UNCHANGING_P (reg) = is_readonly;
ab1fd483 1112 MEM_IN_STRUCT_P (reg) = in_struct;
c6df88cb 1113 MEM_SCALAR_P (reg) = is_scalar;
41472af8
MM
1114 /* We have no alias information about this newly created
1115 MEM. */
1116 MEM_ALIAS_SET (reg) = 0;
32131a9c
RK
1117 }
1118 else if (reg_equiv_mem[i])
1119 XEXP (reg_equiv_mem[i], 0) = addr;
1120 }
1121 }
1122
2ae74651
JL
1123 /* We must set reload_completed now since the cleanup_subreg_operands call
1124 below will re-recognize each insn and reload may have generated insns
1125 which are only valid during and after reload. */
1126 reload_completed = 1;
1127
2dfa9a87
MH
1128 /* Make a pass over all the insns and delete all USEs which we
1129 inserted only to tag a REG_EQUAL note on them. Remove all
1130 REG_DEAD and REG_UNUSED notes. Delete all CLOBBER insns and
1131 simplify (subreg (reg)) operands. Also remove all REG_RETVAL and
1132 REG_LIBCALL notes since they are no longer useful or accurate.
1133 Strip and regenerate REG_INC notes that may have been moved
1134 around. */
32131a9c
RK
1135
1136 for (insn = first; insn; insn = NEXT_INSN (insn))
1137 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1138 {
6764d250 1139 rtx *pnote;
32131a9c 1140
0304f787
JL
1141 if ((GET_CODE (PATTERN (insn)) == USE
1142 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1143 || GET_CODE (PATTERN (insn)) == CLOBBER)
b60a8416
R
1144 {
1145 PUT_CODE (insn, NOTE);
1146 NOTE_SOURCE_FILE (insn) = 0;
1147 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1148 continue;
1149 }
6764d250
BS
1150
1151 pnote = &REG_NOTES (insn);
1152 while (*pnote != 0)
32131a9c 1153 {
6764d250 1154 if (REG_NOTE_KIND (*pnote) == REG_DEAD
80599fd9 1155 || REG_NOTE_KIND (*pnote) == REG_UNUSED
2dfa9a87 1156 || REG_NOTE_KIND (*pnote) == REG_INC
80599fd9
NC
1157 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1158 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
6764d250
BS
1159 *pnote = XEXP (*pnote, 1);
1160 else
1161 pnote = &XEXP (*pnote, 1);
32131a9c 1162 }
0304f787 1163
2dfa9a87
MH
1164#ifdef AUTO_INC_DEC
1165 add_auto_inc_notes (insn, PATTERN (insn));
1166#endif
1167
0304f787
JL
1168 /* And simplify (subreg (reg)) if it appears as an operand. */
1169 cleanup_subreg_operands (insn);
b60a8416 1170 }
32131a9c 1171
ab87f8c8
JL
1172 /* If we are doing stack checking, give a warning if this function's
1173 frame size is larger than we expect. */
1174 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1175 {
1176 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
05d10675
BS
1177 static int verbose_warned = 0;
1178
ab87f8c8
JL
1179 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1180 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1181 size += UNITS_PER_WORD;
1182
1183 if (size > STACK_CHECK_MAX_FRAME_SIZE)
05d10675 1184 {
ab87f8c8
JL
1185 warning ("frame size too large for reliable stack checking");
1186 if (! verbose_warned)
1187 {
1188 warning ("try reducing the number of local variables");
1189 verbose_warned = 1;
1190 }
1191 }
1192 }
1193
32131a9c 1194 /* Indicate that we no longer have known memory locations or constants. */
58d9f9d9
JL
1195 if (reg_equiv_constant)
1196 free (reg_equiv_constant);
32131a9c 1197 reg_equiv_constant = 0;
58d9f9d9
JL
1198 if (reg_equiv_memory_loc)
1199 free (reg_equiv_memory_loc);
32131a9c 1200 reg_equiv_memory_loc = 0;
5352b11a 1201
a68d4b75
BK
1202 if (real_known_ptr)
1203 free (real_known_ptr);
1204 if (real_at_ptr)
1205 free (real_at_ptr);
1206
56a65848
DB
1207 free (reg_equiv_mem);
1208 free (reg_equiv_init);
1209 free (reg_equiv_address);
1210 free (reg_max_ref_width);
03acd8f8
BS
1211 free (reg_old_renumber);
1212 free (pseudo_previous_regs);
1213 free (pseudo_forbidden_regs);
56a65848 1214
7609e720
BS
1215 FREE_REG_SET (spilled_pseudos);
1216
8b4f9969
JW
1217 CLEAR_HARD_REG_SET (used_spill_regs);
1218 for (i = 0; i < n_spills; i++)
1219 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1220
7609e720
BS
1221 /* Free all the insn_chain structures at once. */
1222 obstack_free (&reload_obstack, reload_startobj);
1223 unused_insn_chains = 0;
1224
5352b11a 1225 return failure;
32131a9c 1226}
1e5bd841 1227
18a90182
BS
1228/* Yet another special case. Unfortunately, reg-stack forces people to
1229 write incorrect clobbers in asm statements. These clobbers must not
1230 cause the register to appear in bad_spill_regs, otherwise we'll call
1231 fatal_insn later. We clear the corresponding regnos in the live
1232 register sets to avoid this.
1233 The whole thing is rather sick, I'm afraid. */
1234static void
1235maybe_fix_stack_asms ()
1236{
1237#ifdef STACK_REGS
392dccb7 1238 const char *constraints[MAX_RECOG_OPERANDS];
18a90182
BS
1239 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1240 struct insn_chain *chain;
1241
1242 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1243 {
1244 int i, noperands;
1245 HARD_REG_SET clobbered, allowed;
1246 rtx pat;
1247
1248 if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i'
1249 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1250 continue;
1251 pat = PATTERN (chain->insn);
1252 if (GET_CODE (pat) != PARALLEL)
1253 continue;
1254
1255 CLEAR_HARD_REG_SET (clobbered);
1256 CLEAR_HARD_REG_SET (allowed);
1257
1258 /* First, make a mask of all stack regs that are clobbered. */
1259 for (i = 0; i < XVECLEN (pat, 0); i++)
1260 {
1261 rtx t = XVECEXP (pat, 0, i);
1262 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1263 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1264 }
1265
1266 /* Get the operand values and constraints out of the insn. */
1267 decode_asm_operands (pat, recog_operand, recog_operand_loc,
1268 constraints, operand_mode);
1269
1270 /* For every operand, see what registers are allowed. */
1271 for (i = 0; i < noperands; i++)
1272 {
6b9c6f4f 1273 const char *p = constraints[i];
18a90182
BS
1274 /* For every alternative, we compute the class of registers allowed
1275 for reloading in CLS, and merge its contents into the reg set
1276 ALLOWED. */
1277 int cls = (int) NO_REGS;
1278
1279 for (;;)
1280 {
1281 char c = *p++;
1282
1283 if (c == '\0' || c == ',' || c == '#')
1284 {
1285 /* End of one alternative - mark the regs in the current
1286 class, and reset the class. */
1287 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1288 cls = NO_REGS;
1289 if (c == '#')
1290 do {
1291 c = *p++;
1292 } while (c != '\0' && c != ',');
1293 if (c == '\0')
1294 break;
1295 continue;
1296 }
1297
1298 switch (c)
1299 {
1300 case '=': case '+': case '*': case '%': case '?': case '!':
1301 case '0': case '1': case '2': case '3': case '4': case 'm':
1302 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1303 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1304 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1305 case 'P':
1306#ifdef EXTRA_CONSTRAINT
1307 case 'Q': case 'R': case 'S': case 'T': case 'U':
1308#endif
1309 break;
1310
1311 case 'p':
1312 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1313 break;
1314
1315 case 'g':
1316 case 'r':
1317 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1318 break;
1319
1320 default:
1321 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
05d10675 1322
18a90182
BS
1323 }
1324 }
1325 }
1326 /* Those of the registers which are clobbered, but allowed by the
1327 constraints, must be usable as reload registers. So clear them
1328 out of the life information. */
1329 AND_HARD_REG_SET (allowed, clobbered);
1330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1331 if (TEST_HARD_REG_BIT (allowed, i))
1332 {
1333 CLEAR_REGNO_REG_SET (chain->live_before, i);
1334 CLEAR_REGNO_REG_SET (chain->live_after, i);
1335 }
1336 }
1337
1338#endif
1339}
1340
03acd8f8
BS
1341\f
1342/* Walk the chain of insns, and determine for each whether it needs reloads
1343 and/or eliminations. Build the corresponding insns_need_reload list, and
1344 set something_needs_elimination as appropriate. */
1345static void
7609e720 1346calculate_needs_all_insns (global)
1e5bd841
BS
1347 int global;
1348{
7609e720 1349 struct insn_chain **pprev_reload = &insns_need_reload;
03acd8f8 1350 struct insn_chain **pchain;
1e5bd841 1351
03acd8f8
BS
1352 something_needs_elimination = 0;
1353
1354 for (pchain = &reload_insn_chain; *pchain != 0; pchain = &(*pchain)->next)
1e5bd841 1355 {
03acd8f8
BS
1356 rtx insn;
1357 struct insn_chain *chain;
1358
1359 chain = *pchain;
1360 insn = chain->insn;
1e5bd841 1361
03acd8f8
BS
1362 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1363 include REG_LABEL), we need to see what effects this has on the
1364 known offsets at labels. */
1e5bd841
BS
1365
1366 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1367 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1368 && REG_NOTES (insn) != 0))
1369 set_label_offsets (insn, insn, 0);
1370
1371 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1372 {
1373 rtx old_body = PATTERN (insn);
1374 int old_code = INSN_CODE (insn);
1375 rtx old_notes = REG_NOTES (insn);
1376 int did_elimination = 0;
cb2afeb3 1377 int operands_changed = 0;
2b49ee39
R
1378 rtx set = single_set (insn);
1379
1380 /* Skip insns that only set an equivalence. */
1381 if (set && GET_CODE (SET_DEST (set)) == REG
1382 && reg_renumber[REGNO (SET_DEST (set))] < 0
1383 && reg_equiv_constant[REGNO (SET_DEST (set))])
a8edca88
JW
1384 {
1385 /* Must clear out the shortcuts, in case they were set last
1386 time through. */
1387 chain->need_elim = 0;
1388 chain->need_reload = 0;
1389 chain->need_operand_change = 0;
1390 continue;
1391 }
1e5bd841 1392
1e5bd841 1393 /* If needed, eliminate any eliminable registers. */
2b49ee39 1394 if (num_eliminable || num_eliminable_invariants)
1e5bd841
BS
1395 did_elimination = eliminate_regs_in_insn (insn, 0);
1396
1397 /* Analyze the instruction. */
cb2afeb3
R
1398 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1399 global, spill_reg_order);
1400
1401 /* If a no-op set needs more than one reload, this is likely
1402 to be something that needs input address reloads. We
1403 can't get rid of this cleanly later, and it is of no use
1404 anyway, so discard it now.
1405 We only do this when expensive_optimizations is enabled,
1406 since this complements reload inheritance / output
1407 reload deletion, and it can make debugging harder. */
1408 if (flag_expensive_optimizations && n_reloads > 1)
1409 {
1410 rtx set = single_set (insn);
1411 if (set
1412 && SET_SRC (set) == SET_DEST (set)
1413 && GET_CODE (SET_SRC (set)) == REG
1414 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1415 {
1416 PUT_CODE (insn, NOTE);
1417 NOTE_SOURCE_FILE (insn) = 0;
1418 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1419 continue;
1420 }
1421 }
1422 if (num_eliminable)
1423 update_eliminable_offsets ();
1e5bd841
BS
1424
1425 /* Remember for later shortcuts which insns had any reloads or
7609e720
BS
1426 register eliminations. */
1427 chain->need_elim = did_elimination;
03acd8f8
BS
1428 chain->need_reload = n_reloads > 0;
1429 chain->need_operand_change = operands_changed;
1e5bd841
BS
1430
1431 /* Discard any register replacements done. */
1432 if (did_elimination)
1433 {
1434 obstack_free (&reload_obstack, reload_firstobj);
1435 PATTERN (insn) = old_body;
1436 INSN_CODE (insn) = old_code;
1437 REG_NOTES (insn) = old_notes;
1438 something_needs_elimination = 1;
1439 }
1440
cb2afeb3
R
1441 something_needs_operands_changed |= operands_changed;
1442
437a710d 1443 if (n_reloads != 0)
7609e720
BS
1444 {
1445 *pprev_reload = chain;
1446 pprev_reload = &chain->next_need_reload;
03acd8f8
BS
1447
1448 calculate_needs (chain);
7609e720 1449 }
1e5bd841 1450 }
1e5bd841 1451 }
7609e720 1452 *pprev_reload = 0;
1e5bd841
BS
1453}
1454
03acd8f8
BS
1455/* Compute the most additional registers needed by one instruction,
1456 given by CHAIN. Collect information separately for each class of regs.
1457
1458 To compute the number of reload registers of each class needed for an
1459 insn, we must simulate what choose_reload_regs can do. We do this by
1460 splitting an insn into an "input" and an "output" part. RELOAD_OTHER
1461 reloads are used in both. The input part uses those reloads,
1462 RELOAD_FOR_INPUT reloads, which must be live over the entire input section
1463 of reloads, and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1464 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the inputs.
1465
1466 The registers needed for output are RELOAD_OTHER and RELOAD_FOR_OUTPUT,
1467 which are live for the entire output portion, and the maximum of all the
1468 RELOAD_FOR_OUTPUT_ADDRESS reloads for each operand.
1e5bd841
BS
1469
1470 The total number of registers needed is the maximum of the
1471 inputs and outputs. */
1472
03acd8f8
BS
1473static void
1474calculate_needs (chain)
7609e720 1475 struct insn_chain *chain;
1e5bd841 1476{
1e5bd841
BS
1477 int i;
1478
1e5bd841
BS
1479 /* Each `struct needs' corresponds to one RELOAD_... type. */
1480 struct {
1481 struct needs other;
1482 struct needs input;
1483 struct needs output;
1484 struct needs insn;
1485 struct needs other_addr;
1486 struct needs op_addr;
1487 struct needs op_addr_reload;
1488 struct needs in_addr[MAX_RECOG_OPERANDS];
1489 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1490 struct needs out_addr[MAX_RECOG_OPERANDS];
1491 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1492 } insn_needs;
1493
03acd8f8
BS
1494 bzero ((char *) chain->group_size, sizeof chain->group_size);
1495 for (i = 0; i < N_REG_CLASSES; i++)
1496 chain->group_mode[i] = VOIDmode;
1e5bd841
BS
1497 bzero ((char *) &insn_needs, sizeof insn_needs);
1498
1499 /* Count each reload once in every class
1500 containing the reload's own class. */
1501
1502 for (i = 0; i < n_reloads; i++)
1503 {
1504 register enum reg_class *p;
1505 enum reg_class class = reload_reg_class[i];
1506 int size;
1507 enum machine_mode mode;
1508 struct needs *this_needs;
1509
1510 /* Don't count the dummy reloads, for which one of the
1511 regs mentioned in the insn can be used for reloading.
1512 Don't count optional reloads.
1513 Don't count reloads that got combined with others. */
1514 if (reload_reg_rtx[i] != 0
1515 || reload_optional[i] != 0
1516 || (reload_out[i] == 0 && reload_in[i] == 0
1517 && ! reload_secondary_p[i]))
1518 continue;
1519
1e5bd841
BS
1520 mode = reload_inmode[i];
1521 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1522 mode = reload_outmode[i];
1523 size = CLASS_MAX_NREGS (class, mode);
1524
1525 /* Decide which time-of-use to count this reload for. */
1526 switch (reload_when_needed[i])
1527 {
1528 case RELOAD_OTHER:
1529 this_needs = &insn_needs.other;
1530 break;
1531 case RELOAD_FOR_INPUT:
1532 this_needs = &insn_needs.input;
1533 break;
1534 case RELOAD_FOR_OUTPUT:
1535 this_needs = &insn_needs.output;
1536 break;
1537 case RELOAD_FOR_INSN:
1538 this_needs = &insn_needs.insn;
1539 break;
1540 case RELOAD_FOR_OTHER_ADDRESS:
1541 this_needs = &insn_needs.other_addr;
1542 break;
1543 case RELOAD_FOR_INPUT_ADDRESS:
1544 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1545 break;
1546 case RELOAD_FOR_INPADDR_ADDRESS:
1547 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1548 break;
1549 case RELOAD_FOR_OUTPUT_ADDRESS:
1550 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1551 break;
1552 case RELOAD_FOR_OUTADDR_ADDRESS:
1553 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1554 break;
1555 case RELOAD_FOR_OPERAND_ADDRESS:
1556 this_needs = &insn_needs.op_addr;
1557 break;
1558 case RELOAD_FOR_OPADDR_ADDR:
1559 this_needs = &insn_needs.op_addr_reload;
1560 break;
973838fd
KG
1561 default:
1562 abort();
1e5bd841
BS
1563 }
1564
1565 if (size > 1)
1566 {
1567 enum machine_mode other_mode, allocate_mode;
1568
1569 /* Count number of groups needed separately from
1570 number of individual regs needed. */
1571 this_needs->groups[(int) class]++;
1572 p = reg_class_superclasses[(int) class];
1573 while (*p != LIM_REG_CLASSES)
1574 this_needs->groups[(int) *p++]++;
1575
1576 /* Record size and mode of a group of this class. */
1577 /* If more than one size group is needed,
1578 make all groups the largest needed size. */
03acd8f8 1579 if (chain->group_size[(int) class] < size)
1e5bd841 1580 {
03acd8f8 1581 other_mode = chain->group_mode[(int) class];
1e5bd841
BS
1582 allocate_mode = mode;
1583
03acd8f8
BS
1584 chain->group_size[(int) class] = size;
1585 chain->group_mode[(int) class] = mode;
1e5bd841
BS
1586 }
1587 else
1588 {
1589 other_mode = mode;
03acd8f8 1590 allocate_mode = chain->group_mode[(int) class];
1e5bd841
BS
1591 }
1592
1593 /* Crash if two dissimilar machine modes both need
1594 groups of consecutive regs of the same class. */
1595
1596 if (other_mode != VOIDmode && other_mode != allocate_mode
1597 && ! modes_equiv_for_class_p (allocate_mode,
1598 other_mode, class))
1599 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
03acd8f8 1600 chain->insn);
1e5bd841
BS
1601 }
1602 else if (size == 1)
1603 {
e51712db 1604 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) class] += 1;
1e5bd841
BS
1605 p = reg_class_superclasses[(int) class];
1606 while (*p != LIM_REG_CLASSES)
e51712db 1607 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) *p++] += 1;
1e5bd841
BS
1608 }
1609 else
1610 abort ();
1611 }
1612
1613 /* All reloads have been counted for this insn;
1614 now merge the various times of use.
1615 This sets insn_needs, etc., to the maximum total number
1616 of registers needed at any point in this insn. */
1617
1618 for (i = 0; i < N_REG_CLASSES; i++)
1619 {
1620 int j, in_max, out_max;
1621
1622 /* Compute normal and nongroup needs. */
1623 for (j = 0; j <= 1; j++)
1624 {
1625 int k;
1626 for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++)
1627 {
1628 in_max = MAX (in_max,
1629 (insn_needs.in_addr[k].regs[j][i]
1630 + insn_needs.in_addr_addr[k].regs[j][i]));
1631 out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1632 out_max = MAX (out_max,
1633 insn_needs.out_addr_addr[k].regs[j][i]);
1634 }
1635
1636 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1637 and operand addresses but not things used to reload
1638 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1639 don't conflict with things needed to reload inputs or
1640 outputs. */
1641
1642 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1643 insn_needs.op_addr_reload.regs[j][i]),
1644 in_max);
1645
1646 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1647
1648 insn_needs.input.regs[j][i]
1649 = MAX (insn_needs.input.regs[j][i]
1650 + insn_needs.op_addr.regs[j][i]
1651 + insn_needs.insn.regs[j][i],
1652 in_max + insn_needs.input.regs[j][i]);
1653
1654 insn_needs.output.regs[j][i] += out_max;
1655 insn_needs.other.regs[j][i]
1656 += MAX (MAX (insn_needs.input.regs[j][i],
1657 insn_needs.output.regs[j][i]),
1658 insn_needs.other_addr.regs[j][i]);
1659
1660 }
1661
1662 /* Now compute group needs. */
1663 for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++)
1664 {
1665 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1666 in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]);
1667 out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1668 out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1669 }
1670
1671 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1672 insn_needs.op_addr_reload.groups[i]),
1673 in_max);
1674 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1675
1676 insn_needs.input.groups[i]
1677 = MAX (insn_needs.input.groups[i]
1678 + insn_needs.op_addr.groups[i]
1679 + insn_needs.insn.groups[i],
1680 in_max + insn_needs.input.groups[i]);
1681
1682 insn_needs.output.groups[i] += out_max;
1683 insn_needs.other.groups[i]
1684 += MAX (MAX (insn_needs.input.groups[i],
1685 insn_needs.output.groups[i]),
1686 insn_needs.other_addr.groups[i]);
1687 }
1688
7609e720
BS
1689 /* Record the needs for later. */
1690 chain->need = insn_needs.other;
1e5bd841 1691}
03acd8f8 1692\f
1e5bd841
BS
1693/* Find a group of exactly 2 registers.
1694
1695 First try to fill out the group by spilling a single register which
1696 would allow completion of the group.
1697
1698 Then try to create a new group from a pair of registers, neither of
1699 which are explicitly used.
1700
1701 Then try to create a group from any pair of registers. */
03acd8f8
BS
1702
1703static void
1704find_tworeg_group (chain, class, dumpfile)
1705 struct insn_chain *chain;
1e5bd841
BS
1706 int class;
1707 FILE *dumpfile;
1708{
1709 int i;
1710 /* First, look for a register that will complete a group. */
1711 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1712 {
1713 int j, other;
1714
1715 j = potential_reload_regs[i];
1716 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1717 && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1719 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
03acd8f8
BS
1720 && HARD_REGNO_MODE_OK (other, chain->group_mode[class])
1721 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1e5bd841
BS
1722 /* We don't want one part of another group.
1723 We could get "two groups" that overlap! */
03acd8f8 1724 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))
1e5bd841
BS
1725 || (j < FIRST_PSEUDO_REGISTER - 1
1726 && (other = j + 1, spill_reg_order[other] >= 0)
1727 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1728 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
03acd8f8
BS
1729 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1730 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1731 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))))
1e5bd841
BS
1732 {
1733 register enum reg_class *p;
1734
1735 /* We have found one that will complete a group,
1736 so count off one group as provided. */
03acd8f8 1737 chain->need.groups[class]--;
1e5bd841
BS
1738 p = reg_class_superclasses[class];
1739 while (*p != LIM_REG_CLASSES)
1740 {
03acd8f8
BS
1741 if (chain->group_size [(int) *p] <= chain->group_size [class])
1742 chain->need.groups[(int) *p]--;
1e5bd841
BS
1743 p++;
1744 }
1745
1746 /* Indicate both these regs are part of a group. */
03acd8f8
BS
1747 SET_HARD_REG_BIT (chain->counted_for_groups, j);
1748 SET_HARD_REG_BIT (chain->counted_for_groups, other);
1e5bd841
BS
1749 break;
1750 }
1751 }
1752 /* We can't complete a group, so start one. */
1e5bd841
BS
1753 if (i == FIRST_PSEUDO_REGISTER)
1754 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1755 {
1756 int j, k;
1757 j = potential_reload_regs[i];
1758 /* Verify that J+1 is a potential reload reg. */
1759 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1760 if (potential_reload_regs[k] == j + 1)
1761 break;
1762 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1763 && k < FIRST_PSEUDO_REGISTER
1764 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1765 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1766 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
03acd8f8
BS
1767 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1768 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, j + 1)
1e5bd841
BS
1769 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1770 break;
1771 }
1772
1773 /* I should be the index in potential_reload_regs
1774 of the new reload reg we have found. */
1775
03acd8f8 1776 new_spill_reg (chain, i, class, 0, dumpfile);
1e5bd841
BS
1777}
1778
1779/* Find a group of more than 2 registers.
1780 Look for a sufficient sequence of unspilled registers, and spill them all
1781 at once. */
03acd8f8
BS
1782
1783static void
1784find_group (chain, class, dumpfile)
1785 struct insn_chain *chain;
1e5bd841
BS
1786 int class;
1787 FILE *dumpfile;
1788{
1e5bd841
BS
1789 int i;
1790
1791 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1792 {
03acd8f8 1793 int j = potential_reload_regs[i];
1e5bd841 1794
1e5bd841 1795 if (j >= 0
03acd8f8
BS
1796 && j + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
1797 && HARD_REGNO_MODE_OK (j, chain->group_mode[class]))
1e5bd841 1798 {
03acd8f8 1799 int k;
1e5bd841 1800 /* Check each reg in the sequence. */
03acd8f8 1801 for (k = 0; k < chain->group_size[class]; k++)
1e5bd841
BS
1802 if (! (spill_reg_order[j + k] < 0
1803 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1804 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1805 break;
1806 /* We got a full sequence, so spill them all. */
03acd8f8 1807 if (k == chain->group_size[class])
1e5bd841
BS
1808 {
1809 register enum reg_class *p;
03acd8f8 1810 for (k = 0; k < chain->group_size[class]; k++)
1e5bd841
BS
1811 {
1812 int idx;
03acd8f8 1813 SET_HARD_REG_BIT (chain->counted_for_groups, j + k);
1e5bd841
BS
1814 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1815 if (potential_reload_regs[idx] == j + k)
1816 break;
03acd8f8 1817 new_spill_reg (chain, idx, class, 0, dumpfile);
1e5bd841
BS
1818 }
1819
1820 /* We have found one that will complete a group,
1821 so count off one group as provided. */
03acd8f8 1822 chain->need.groups[class]--;
1e5bd841
BS
1823 p = reg_class_superclasses[class];
1824 while (*p != LIM_REG_CLASSES)
1825 {
03acd8f8
BS
1826 if (chain->group_size [(int) *p]
1827 <= chain->group_size [class])
1828 chain->need.groups[(int) *p]--;
1e5bd841
BS
1829 p++;
1830 }
03acd8f8 1831 return;
1e5bd841
BS
1832 }
1833 }
1834 }
1835 /* There are no groups left. */
03acd8f8 1836 spill_failure (chain->insn);
1e5bd841 1837 failure = 1;
1e5bd841
BS
1838}
1839
03acd8f8
BS
1840/* If pseudo REG conflicts with one of our reload registers, mark it as
1841 spilled. */
1842static void
1843maybe_mark_pseudo_spilled (reg)
1844 int reg;
1845{
1846 int i;
1847 int r = reg_renumber[reg];
1848 int nregs;
1849
1850 if (r < 0)
1851 abort ();
1852 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1853 for (i = 0; i < n_spills; i++)
1854 if (r <= spill_regs[i] && r + nregs > spill_regs[i])
1855 {
1856 SET_REGNO_REG_SET (spilled_pseudos, reg);
1857 return;
1858 }
1859}
1860
1861/* Find more reload regs to satisfy the remaining need of an insn, which
1862 is given by CHAIN.
1e5bd841
BS
1863 Do it by ascending class number, since otherwise a reg
1864 might be spilled for a big class and might fail to count
1865 for a smaller class even though it belongs to that class.
1866
1867 Count spilled regs in `spills', and add entries to
1868 `spill_regs' and `spill_reg_order'.
1869
1870 ??? Note there is a problem here.
1871 When there is a need for a group in a high-numbered class,
1872 and also need for non-group regs that come from a lower class,
1873 the non-group regs are chosen first. If there aren't many regs,
1874 they might leave no room for a group.
1875
1876 This was happening on the 386. To fix it, we added the code
1877 that calls possible_group_p, so that the lower class won't
1878 break up the last possible group.
1879
1880 Really fixing the problem would require changes above
1881 in counting the regs already spilled, and in choose_reload_regs.
1882 It might be hard to avoid introducing bugs there. */
1883
03acd8f8
BS
1884static void
1885find_reload_regs (chain, dumpfile)
1886 struct insn_chain *chain;
1e5bd841
BS
1887 FILE *dumpfile;
1888{
03acd8f8
BS
1889 int i, class;
1890 short *group_needs = chain->need.groups;
1891 short *simple_needs = chain->need.regs[0];
1892 short *nongroup_needs = chain->need.regs[1];
1893
1894 if (dumpfile)
1895 fprintf (dumpfile, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1896
1897 /* Compute the order of preference for hard registers to spill.
1898 Store them by decreasing preference in potential_reload_regs. */
1899
1900 order_regs_for_reload (chain);
1901
1902 /* So far, no hard regs have been spilled. */
1903 n_spills = 0;
1904 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1905 spill_reg_order[i] = -1;
1e5bd841 1906
03acd8f8
BS
1907 CLEAR_HARD_REG_SET (chain->used_spill_regs);
1908 CLEAR_HARD_REG_SET (chain->counted_for_groups);
1909 CLEAR_HARD_REG_SET (chain->counted_for_nongroups);
1e5bd841
BS
1910
1911 for (class = 0; class < N_REG_CLASSES; class++)
1912 {
1913 /* First get the groups of registers.
1914 If we got single registers first, we might fragment
1915 possible groups. */
03acd8f8 1916 while (group_needs[class] > 0)
1e5bd841
BS
1917 {
1918 /* If any single spilled regs happen to form groups,
1919 count them now. Maybe we don't really need
1920 to spill another group. */
03acd8f8 1921 count_possible_groups (chain, class);
1e5bd841 1922
03acd8f8 1923 if (group_needs[class] <= 0)
1e5bd841
BS
1924 break;
1925
03acd8f8 1926 /* Groups of size 2, the only groups used on most machines,
1e5bd841 1927 are treated specially. */
03acd8f8
BS
1928 if (chain->group_size[class] == 2)
1929 find_tworeg_group (chain, class, dumpfile);
1e5bd841 1930 else
03acd8f8 1931 find_group (chain, class, dumpfile);
1e5bd841 1932 if (failure)
03acd8f8 1933 return;
1e5bd841
BS
1934 }
1935
1936 /* Now similarly satisfy all need for single registers. */
1937
03acd8f8 1938 while (simple_needs[class] > 0 || nongroup_needs[class] > 0)
1e5bd841 1939 {
1e5bd841
BS
1940 /* If we spilled enough regs, but they weren't counted
1941 against the non-group need, see if we can count them now.
1942 If so, we can avoid some actual spilling. */
03acd8f8 1943 if (simple_needs[class] <= 0 && nongroup_needs[class] > 0)
1e5bd841
BS
1944 for (i = 0; i < n_spills; i++)
1945 {
1946 int regno = spill_regs[i];
1947 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
03acd8f8
BS
1948 && !TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
1949 && !TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno)
1950 && nongroup_needs[class] > 0)
1951 {
1952 register enum reg_class *p;
1e5bd841 1953
03acd8f8
BS
1954 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
1955 nongroup_needs[class]--;
1956 p = reg_class_superclasses[class];
1957 while (*p != LIM_REG_CLASSES)
1958 nongroup_needs[(int) *p++]--;
1959 }
1e5bd841 1960 }
03acd8f8
BS
1961
1962 if (simple_needs[class] <= 0 && nongroup_needs[class] <= 0)
1e5bd841
BS
1963 break;
1964
1965 /* Consider the potential reload regs that aren't
1966 yet in use as reload regs, in order of preference.
1967 Find the most preferred one that's in this class. */
1968
1969 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1970 {
1971 int regno = potential_reload_regs[i];
1972 if (regno >= 0
1973 && TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1974 /* If this reg will not be available for groups,
1975 pick one that does not foreclose possible groups.
1976 This is a kludge, and not very general,
1977 but it should be sufficient to make the 386 work,
1978 and the problem should not occur on machines with
1979 more registers. */
03acd8f8
BS
1980 && (nongroup_needs[class] == 0
1981 || possible_group_p (chain, regno)))
1e5bd841
BS
1982 break;
1983 }
1984
1985 /* If we couldn't get a register, try to get one even if we
1986 might foreclose possible groups. This may cause problems
1987 later, but that's better than aborting now, since it is
1988 possible that we will, in fact, be able to form the needed
1989 group even with this allocation. */
1990
1991 if (i >= FIRST_PSEUDO_REGISTER
03acd8f8 1992 && asm_noperands (chain->insn) < 0)
1e5bd841
BS
1993 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1994 if (potential_reload_regs[i] >= 0
1995 && TEST_HARD_REG_BIT (reg_class_contents[class],
1996 potential_reload_regs[i]))
1997 break;
1998
1999 /* I should be the index in potential_reload_regs
2000 of the new reload reg we have found. */
2001
03acd8f8
BS
2002 new_spill_reg (chain, i, class, 1, dumpfile);
2003 if (failure)
2004 return;
1e5bd841
BS
2005 }
2006 }
05d10675 2007
03acd8f8
BS
2008 /* We know which hard regs to use, now mark the pseudos that live in them
2009 as needing to be kicked out. */
2010 EXECUTE_IF_SET_IN_REG_SET
2011 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
2012 {
2013 maybe_mark_pseudo_spilled (i);
2014 });
2015 EXECUTE_IF_SET_IN_REG_SET
2016 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
2017 {
2018 maybe_mark_pseudo_spilled (i);
2019 });
2020
2021 IOR_HARD_REG_SET (used_spill_regs, chain->used_spill_regs);
1e5bd841
BS
2022}
2023
03acd8f8
BS
2024void
2025dump_needs (chain, dumpfile)
2026 struct insn_chain *chain;
09dd1133
BS
2027 FILE *dumpfile;
2028{
a4ec8d12 2029 static const char * const reg_class_names[] = REG_CLASS_NAMES;
09dd1133 2030 int i;
03acd8f8 2031 struct needs *n = &chain->need;
09dd1133
BS
2032
2033 for (i = 0; i < N_REG_CLASSES; i++)
2034 {
03acd8f8 2035 if (n->regs[i][0] > 0)
09dd1133 2036 fprintf (dumpfile,
03acd8f8
BS
2037 ";; Need %d reg%s of class %s.\n",
2038 n->regs[i][0], n->regs[i][0] == 1 ? "" : "s",
2039 reg_class_names[i]);
2040 if (n->regs[i][1] > 0)
09dd1133 2041 fprintf (dumpfile,
03acd8f8
BS
2042 ";; Need %d nongroup reg%s of class %s.\n",
2043 n->regs[i][1], n->regs[i][1] == 1 ? "" : "s",
2044 reg_class_names[i]);
2045 if (n->groups[i] > 0)
09dd1133 2046 fprintf (dumpfile,
03acd8f8
BS
2047 ";; Need %d group%s (%smode) of class %s.\n",
2048 n->groups[i], n->groups[i] == 1 ? "" : "s",
a4ec8d12 2049 GET_MODE_NAME(chain->group_mode[i]),
03acd8f8 2050 reg_class_names[i]);
09dd1133
BS
2051 }
2052}
32131a9c 2053\f
437a710d
BS
2054/* Delete all insns that were inserted by emit_caller_save_insns during
2055 this iteration. */
2056static void
7609e720 2057delete_caller_save_insns ()
437a710d 2058{
7609e720 2059 struct insn_chain *c = reload_insn_chain;
437a710d 2060
7609e720 2061 while (c != 0)
437a710d 2062 {
7609e720 2063 while (c != 0 && c->is_caller_save_insn)
437a710d 2064 {
7609e720
BS
2065 struct insn_chain *next = c->next;
2066 rtx insn = c->insn;
2067
3b413743
RH
2068 if (insn == BLOCK_HEAD (c->block))
2069 BLOCK_HEAD (c->block) = NEXT_INSN (insn);
2070 if (insn == BLOCK_END (c->block))
2071 BLOCK_END (c->block) = PREV_INSN (insn);
7609e720
BS
2072 if (c == reload_insn_chain)
2073 reload_insn_chain = next;
2074
2075 if (NEXT_INSN (insn) != 0)
2076 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2077 if (PREV_INSN (insn) != 0)
2078 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2079
2080 if (next)
2081 next->prev = c->prev;
2082 if (c->prev)
2083 c->prev->next = next;
2084 c->next = unused_insn_chains;
2085 unused_insn_chains = c;
2086 c = next;
437a710d 2087 }
7609e720
BS
2088 if (c != 0)
2089 c = c->next;
437a710d
BS
2090 }
2091}
2092\f
32131a9c
RK
2093/* Nonzero if, after spilling reg REGNO for non-groups,
2094 it will still be possible to find a group if we still need one. */
2095
2096static int
03acd8f8
BS
2097possible_group_p (chain, regno)
2098 struct insn_chain *chain;
32131a9c 2099 int regno;
32131a9c
RK
2100{
2101 int i;
2102 int class = (int) NO_REGS;
2103
2104 for (i = 0; i < (int) N_REG_CLASSES; i++)
03acd8f8 2105 if (chain->need.groups[i] > 0)
32131a9c
RK
2106 {
2107 class = i;
2108 break;
2109 }
2110
2111 if (class == (int) NO_REGS)
2112 return 1;
2113
2114 /* Consider each pair of consecutive registers. */
2115 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2116 {
2117 /* Ignore pairs that include reg REGNO. */
2118 if (i == regno || i + 1 == regno)
2119 continue;
2120
2121 /* Ignore pairs that are outside the class that needs the group.
2122 ??? Here we fail to handle the case where two different classes
2123 independently need groups. But this never happens with our
2124 current machine descriptions. */
2125 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2126 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2127 continue;
2128
2129 /* A pair of consecutive regs we can still spill does the trick. */
2130 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2131 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2132 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2133 return 1;
2134
2135 /* A pair of one already spilled and one we can spill does it
2136 provided the one already spilled is not otherwise reserved. */
2137 if (spill_reg_order[i] < 0
2138 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2139 && spill_reg_order[i + 1] >= 0
03acd8f8
BS
2140 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i + 1)
2141 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i + 1))
32131a9c
RK
2142 return 1;
2143 if (spill_reg_order[i + 1] < 0
2144 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2145 && spill_reg_order[i] >= 0
03acd8f8
BS
2146 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i)
2147 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i))
32131a9c
RK
2148 return 1;
2149 }
2150
2151 return 0;
2152}
03acd8f8 2153
066aca28
RK
2154/* Count any groups of CLASS that can be formed from the registers recently
2155 spilled. */
32131a9c
RK
2156
2157static void
03acd8f8
BS
2158count_possible_groups (chain, class)
2159 struct insn_chain *chain;
066aca28 2160 int class;
32131a9c 2161{
066aca28
RK
2162 HARD_REG_SET new;
2163 int i, j;
2164
32131a9c
RK
2165 /* Now find all consecutive groups of spilled registers
2166 and mark each group off against the need for such groups.
2167 But don't count them against ordinary need, yet. */
2168
03acd8f8 2169 if (chain->group_size[class] == 0)
066aca28
RK
2170 return;
2171
2172 CLEAR_HARD_REG_SET (new);
2173
2174 /* Make a mask of all the regs that are spill regs in class I. */
2175 for (i = 0; i < n_spills; i++)
03acd8f8
BS
2176 {
2177 int regno = spill_regs[i];
2178
2179 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
2180 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
2181 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno))
2182 SET_HARD_REG_BIT (new, regno);
2183 }
066aca28
RK
2184
2185 /* Find each consecutive group of them. */
03acd8f8 2186 for (i = 0; i < FIRST_PSEUDO_REGISTER && chain->need.groups[class] > 0; i++)
066aca28 2187 if (TEST_HARD_REG_BIT (new, i)
03acd8f8
BS
2188 && i + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
2189 && HARD_REGNO_MODE_OK (i, chain->group_mode[class]))
32131a9c 2190 {
03acd8f8 2191 for (j = 1; j < chain->group_size[class]; j++)
066aca28
RK
2192 if (! TEST_HARD_REG_BIT (new, i + j))
2193 break;
32131a9c 2194
03acd8f8 2195 if (j == chain->group_size[class])
066aca28
RK
2196 {
2197 /* We found a group. Mark it off against this class's need for
2198 groups, and against each superclass too. */
2199 register enum reg_class *p;
2200
03acd8f8 2201 chain->need.groups[class]--;
066aca28
RK
2202 p = reg_class_superclasses[class];
2203 while (*p != LIM_REG_CLASSES)
d601d5da 2204 {
03acd8f8
BS
2205 if (chain->group_size [(int) *p] <= chain->group_size [class])
2206 chain->need.groups[(int) *p]--;
d601d5da
JW
2207 p++;
2208 }
066aca28
RK
2209
2210 /* Don't count these registers again. */
03acd8f8
BS
2211 for (j = 0; j < chain->group_size[class]; j++)
2212 SET_HARD_REG_BIT (chain->counted_for_groups, i + j);
066aca28
RK
2213 }
2214
2215 /* Skip to the last reg in this group. When i is incremented above,
2216 it will then point to the first reg of the next possible group. */
2217 i += j - 1;
2218 }
32131a9c
RK
2219}
2220\f
2221/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2222 another mode that needs to be reloaded for the same register class CLASS.
2223 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2224 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2225
2226 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2227 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2228 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2229 causes unnecessary failures on machines requiring alignment of register
2230 groups when the two modes are different sizes, because the larger mode has
2231 more strict alignment rules than the smaller mode. */
2232
2233static int
2234modes_equiv_for_class_p (allocate_mode, other_mode, class)
2235 enum machine_mode allocate_mode, other_mode;
2236 enum reg_class class;
2237{
2238 register int regno;
2239 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2240 {
2241 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2242 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2243 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2244 return 0;
2245 }
2246 return 1;
2247}
03acd8f8 2248\f
5352b11a
RS
2249/* Handle the failure to find a register to spill.
2250 INSN should be one of the insns which needed this particular spill reg. */
2251
2252static void
2253spill_failure (insn)
2254 rtx insn;
2255{
2256 if (asm_noperands (PATTERN (insn)) >= 0)
2257 error_for_asm (insn, "`asm' needs too many reloads");
2258 else
a89b2cc4 2259 fatal_insn ("Unable to find a register to spill.", insn);
5352b11a
RS
2260}
2261
03acd8f8
BS
2262/* Add a new register to the tables of available spill-registers.
2263 CHAIN is the insn for which the register will be used; we decrease the
2264 needs of that insn.
32131a9c
RK
2265 I is the index of this register in potential_reload_regs.
2266 CLASS is the regclass whose need is being satisfied.
03acd8f8
BS
2267 NONGROUP is 0 if this register is part of a group.
2268 DUMPFILE is the same as the one that `reload' got. */
32131a9c 2269
03acd8f8
BS
2270static void
2271new_spill_reg (chain, i, class, nongroup, dumpfile)
2272 struct insn_chain *chain;
32131a9c
RK
2273 int i;
2274 int class;
03acd8f8 2275 int nongroup;
32131a9c
RK
2276 FILE *dumpfile;
2277{
2278 register enum reg_class *p;
32131a9c
RK
2279 int regno = potential_reload_regs[i];
2280
2281 if (i >= FIRST_PSEUDO_REGISTER)
03acd8f8
BS
2282 {
2283 spill_failure (chain->insn);
2284 failure = 1;
2285 return;
2286 }
32131a9c 2287
03acd8f8 2288 if (TEST_HARD_REG_BIT (bad_spill_regs, regno))
da275344 2289 {
a4ec8d12 2290 static const char * const reg_class_names[] = REG_CLASS_NAMES;
03acd8f8
BS
2291
2292 if (asm_noperands (PATTERN (chain->insn)) < 0)
2293 {
05d10675
BS
2294 /* The error message is still correct - we know only that it wasn't
2295 an asm statement that caused the problem, but one of the global
2296 registers declared by the users might have screwed us. */
03acd8f8
BS
2297 error ("fixed or forbidden register %d (%s) was spilled for class %s.",
2298 regno, reg_names[regno], reg_class_names[class]);
2299 error ("This may be due to a compiler bug or to impossible asm");
2300 error ("statements or clauses.");
2301 fatal_insn ("This is the instruction:", chain->insn);
2302 }
2303 error_for_asm (chain->insn, "Invalid `asm' statement:");
2304 error_for_asm (chain->insn,
2305 "fixed or forbidden register %d (%s) was spilled for class %s.",
2306 regno, reg_names[regno], reg_class_names[class]);
2307 failure = 1;
2308 return;
da275344 2309 }
32131a9c
RK
2310
2311 /* Make reg REGNO an additional reload reg. */
2312
2313 potential_reload_regs[i] = -1;
2314 spill_regs[n_spills] = regno;
2315 spill_reg_order[regno] = n_spills;
2316 if (dumpfile)
03acd8f8
BS
2317 fprintf (dumpfile, "Spilling reg %d.\n", regno);
2318 SET_HARD_REG_BIT (chain->used_spill_regs, regno);
32131a9c
RK
2319
2320 /* Clear off the needs we just satisfied. */
2321
03acd8f8 2322 chain->need.regs[0][class]--;
32131a9c
RK
2323 p = reg_class_superclasses[class];
2324 while (*p != LIM_REG_CLASSES)
03acd8f8 2325 chain->need.regs[0][(int) *p++]--;
32131a9c 2326
03acd8f8 2327 if (nongroup && chain->need.regs[1][class] > 0)
32131a9c 2328 {
03acd8f8
BS
2329 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
2330 chain->need.regs[1][class]--;
32131a9c
RK
2331 p = reg_class_superclasses[class];
2332 while (*p != LIM_REG_CLASSES)
03acd8f8 2333 chain->need.regs[1][(int) *p++]--;
32131a9c
RK
2334 }
2335
32131a9c 2336 n_spills++;
32131a9c
RK
2337}
2338\f
2339/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2340 data that is dead in INSN. */
2341
2342static void
2343delete_dead_insn (insn)
2344 rtx insn;
2345{
2346 rtx prev = prev_real_insn (insn);
2347 rtx prev_dest;
2348
2349 /* If the previous insn sets a register that dies in our insn, delete it
2350 too. */
2351 if (prev && GET_CODE (PATTERN (prev)) == SET
2352 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2353 && reg_mentioned_p (prev_dest, PATTERN (insn))
b294ca38
R
2354 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2355 && ! side_effects_p (SET_SRC (PATTERN (prev))))
32131a9c
RK
2356 delete_dead_insn (prev);
2357
2358 PUT_CODE (insn, NOTE);
2359 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2360 NOTE_SOURCE_FILE (insn) = 0;
2361}
2362
2363/* Modify the home of pseudo-reg I.
2364 The new home is present in reg_renumber[I].
2365
2366 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2367 or it may be -1, meaning there is none or it is not relevant.
2368 This is used so that all pseudos spilled from a given hard reg
2369 can share one stack slot. */
2370
2371static void
2372alter_reg (i, from_reg)
2373 register int i;
2374 int from_reg;
2375{
2376 /* When outputting an inline function, this can happen
2377 for a reg that isn't actually used. */
2378 if (regno_reg_rtx[i] == 0)
2379 return;
2380
2381 /* If the reg got changed to a MEM at rtl-generation time,
2382 ignore it. */
2383 if (GET_CODE (regno_reg_rtx[i]) != REG)
2384 return;
2385
2386 /* Modify the reg-rtx to contain the new hard reg
2387 number or else to contain its pseudo reg number. */
2388 REGNO (regno_reg_rtx[i])
2389 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2390
2391 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2392 allocate a stack slot for it. */
2393
2394 if (reg_renumber[i] < 0
b1f21e0a 2395 && REG_N_REFS (i) > 0
32131a9c
RK
2396 && reg_equiv_constant[i] == 0
2397 && reg_equiv_memory_loc[i] == 0)
2398 {
2399 register rtx x;
2400 int inherent_size = PSEUDO_REGNO_BYTES (i);
2401 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2402 int adjust = 0;
2403
2404 /* Each pseudo reg has an inherent size which comes from its own mode,
2405 and a total size which provides room for paradoxical subregs
2406 which refer to the pseudo reg in wider modes.
2407
2408 We can use a slot already allocated if it provides both
2409 enough inherent space and enough total space.
2410 Otherwise, we allocate a new slot, making sure that it has no less
2411 inherent space, and no less total space, then the previous slot. */
2412 if (from_reg == -1)
2413 {
2414 /* No known place to spill from => no slot to reuse. */
cabcf079
ILT
2415 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2416 inherent_size == total_size ? 0 : -1);
f76b9db2 2417 if (BYTES_BIG_ENDIAN)
02db8dd0
RK
2418 /* Cancel the big-endian correction done in assign_stack_local.
2419 Get the address of the beginning of the slot.
2420 This is so we can do a big-endian correction unconditionally
2421 below. */
2422 adjust = inherent_size - total_size;
2423
2424 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
32131a9c
RK
2425 }
2426 /* Reuse a stack slot if possible. */
2427 else if (spill_stack_slot[from_reg] != 0
2428 && spill_stack_slot_width[from_reg] >= total_size
2429 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2430 >= inherent_size))
2431 x = spill_stack_slot[from_reg];
2432 /* Allocate a bigger slot. */
2433 else
2434 {
2435 /* Compute maximum size needed, both for inherent size
2436 and for total size. */
2437 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
4f2d3674 2438 rtx stack_slot;
32131a9c
RK
2439 if (spill_stack_slot[from_reg])
2440 {
2441 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2442 > inherent_size)
2443 mode = GET_MODE (spill_stack_slot[from_reg]);
2444 if (spill_stack_slot_width[from_reg] > total_size)
2445 total_size = spill_stack_slot_width[from_reg];
2446 }
2447 /* Make a slot with that size. */
cabcf079
ILT
2448 x = assign_stack_local (mode, total_size,
2449 inherent_size == total_size ? 0 : -1);
4f2d3674 2450 stack_slot = x;
f76b9db2
ILT
2451 if (BYTES_BIG_ENDIAN)
2452 {
2453 /* Cancel the big-endian correction done in assign_stack_local.
2454 Get the address of the beginning of the slot.
2455 This is so we can do a big-endian correction unconditionally
2456 below. */
2457 adjust = GET_MODE_SIZE (mode) - total_size;
4f2d3674 2458 if (adjust)
38a448ca
RH
2459 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2460 * BITS_PER_UNIT,
2461 MODE_INT, 1),
05d10675 2462 plus_constant (XEXP (x, 0), adjust));
f76b9db2 2463 }
4f2d3674 2464 spill_stack_slot[from_reg] = stack_slot;
32131a9c
RK
2465 spill_stack_slot_width[from_reg] = total_size;
2466 }
2467
32131a9c
RK
2468 /* On a big endian machine, the "address" of the slot
2469 is the address of the low part that fits its inherent mode. */
f76b9db2 2470 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
32131a9c 2471 adjust += (total_size - inherent_size);
32131a9c
RK
2472
2473 /* If we have any adjustment to make, or if the stack slot is the
2474 wrong mode, make a new stack slot. */
2475 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2476 {
38a448ca 2477 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
05d10675 2478 plus_constant (XEXP (x, 0), adjust));
9ec36da5
JL
2479
2480 /* If this was shared among registers, must ensure we never
2481 set it readonly since that can cause scheduling
2482 problems. Note we would only have in this adjustment
2483 case in any event, since the code above doesn't set it. */
2484
2485 if (from_reg == -1)
2486 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
32131a9c
RK
2487 }
2488
2489 /* Save the stack slot for later. */
2490 reg_equiv_memory_loc[i] = x;
2491 }
2492}
2493
2494/* Mark the slots in regs_ever_live for the hard regs
2495 used by pseudo-reg number REGNO. */
2496
2497void
2498mark_home_live (regno)
2499 int regno;
2500{
2501 register int i, lim;
2502 i = reg_renumber[regno];
2503 if (i < 0)
2504 return;
2505 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2506 while (i < lim)
2507 regs_ever_live[i++] = 1;
2508}
2509\f
2510/* This function handles the tracking of elimination offsets around branches.
2511
2512 X is a piece of RTL being scanned.
2513
2514 INSN is the insn that it came from, if any.
2515
2516 INITIAL_P is non-zero if we are to set the offset to be the initial
2517 offset and zero if we are setting the offset of the label to be the
2518 current offset. */
2519
2520static void
2521set_label_offsets (x, insn, initial_p)
2522 rtx x;
2523 rtx insn;
2524 int initial_p;
2525{
2526 enum rtx_code code = GET_CODE (x);
2527 rtx tem;
e51712db 2528 unsigned int i;
32131a9c
RK
2529 struct elim_table *p;
2530
2531 switch (code)
2532 {
2533 case LABEL_REF:
8be386d9
RS
2534 if (LABEL_REF_NONLOCAL_P (x))
2535 return;
2536
32131a9c
RK
2537 x = XEXP (x, 0);
2538
0f41302f 2539 /* ... fall through ... */
32131a9c
RK
2540
2541 case CODE_LABEL:
2542 /* If we know nothing about this label, set the desired offsets. Note
2543 that this sets the offset at a label to be the offset before a label
2544 if we don't know anything about the label. This is not correct for
2545 the label after a BARRIER, but is the best guess we can make. If
2546 we guessed wrong, we will suppress an elimination that might have
2547 been possible had we been able to guess correctly. */
2548
2549 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2550 {
2551 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2552 offsets_at[CODE_LABEL_NUMBER (x)][i]
2553 = (initial_p ? reg_eliminate[i].initial_offset
2554 : reg_eliminate[i].offset);
2555 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2556 }
2557
2558 /* Otherwise, if this is the definition of a label and it is
d45cf215 2559 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2560 that label. */
2561
2562 else if (x == insn
2563 && (tem = prev_nonnote_insn (insn)) != 0
2564 && GET_CODE (tem) == BARRIER)
1f3b1e1a 2565 set_offsets_for_label (insn);
32131a9c
RK
2566 else
2567 /* If neither of the above cases is true, compare each offset
2568 with those previously recorded and suppress any eliminations
2569 where the offsets disagree. */
a8fdc208 2570
32131a9c
RK
2571 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2572 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2573 != (initial_p ? reg_eliminate[i].initial_offset
2574 : reg_eliminate[i].offset))
2575 reg_eliminate[i].can_eliminate = 0;
2576
2577 return;
2578
2579 case JUMP_INSN:
2580 set_label_offsets (PATTERN (insn), insn, initial_p);
2581
0f41302f 2582 /* ... fall through ... */
32131a9c
RK
2583
2584 case INSN:
2585 case CALL_INSN:
2586 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2587 and hence must have all eliminations at their initial offsets. */
2588 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2589 if (REG_NOTE_KIND (tem) == REG_LABEL)
2590 set_label_offsets (XEXP (tem, 0), insn, 1);
2591 return;
2592
2593 case ADDR_VEC:
2594 case ADDR_DIFF_VEC:
2595 /* Each of the labels in the address vector must be at their initial
38e01259 2596 offsets. We want the first field for ADDR_VEC and the second
32131a9c
RK
2597 field for ADDR_DIFF_VEC. */
2598
e51712db 2599 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
32131a9c
RK
2600 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2601 insn, initial_p);
2602 return;
2603
2604 case SET:
2605 /* We only care about setting PC. If the source is not RETURN,
2606 IF_THEN_ELSE, or a label, disable any eliminations not at
2607 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2608 isn't one of those possibilities. For branches to a label,
2609 call ourselves recursively.
2610
2611 Note that this can disable elimination unnecessarily when we have
2612 a non-local goto since it will look like a non-constant jump to
2613 someplace in the current function. This isn't a significant
2614 problem since such jumps will normally be when all elimination
2615 pairs are back to their initial offsets. */
2616
2617 if (SET_DEST (x) != pc_rtx)
2618 return;
2619
2620 switch (GET_CODE (SET_SRC (x)))
2621 {
2622 case PC:
2623 case RETURN:
2624 return;
2625
2626 case LABEL_REF:
2627 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2628 return;
2629
2630 case IF_THEN_ELSE:
2631 tem = XEXP (SET_SRC (x), 1);
2632 if (GET_CODE (tem) == LABEL_REF)
2633 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2634 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2635 break;
2636
2637 tem = XEXP (SET_SRC (x), 2);
2638 if (GET_CODE (tem) == LABEL_REF)
2639 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2640 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2641 break;
2642 return;
e9a25f70
JL
2643
2644 default:
2645 break;
32131a9c
RK
2646 }
2647
2648 /* If we reach here, all eliminations must be at their initial
2649 offset because we are doing a jump to a variable address. */
2650 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2651 if (p->offset != p->initial_offset)
2652 p->can_eliminate = 0;
e9a25f70 2653 break;
05d10675 2654
e9a25f70
JL
2655 default:
2656 break;
32131a9c
RK
2657 }
2658}
2659\f
2660/* Used for communication between the next two function to properly share
2661 the vector for an ASM_OPERANDS. */
2662
2663static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2664
a8fdc208 2665/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2666 replacement (such as sp), plus an offset.
2667
2668 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2669 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2670 MEM, we are allowed to replace a sum of a register and the constant zero
2671 with the register, which we cannot do outside a MEM. In addition, we need
2672 to record the fact that a register is referenced outside a MEM.
2673
ff32812a 2674 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2675 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2676 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
38e01259 2677 the REG is being modified.
32131a9c 2678
ff32812a
RS
2679 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2680 That's used when we eliminate in expressions stored in notes.
2681 This means, do not set ref_outside_mem even if the reference
2682 is outside of MEMs.
2683
32131a9c
RK
2684 If we see a modification to a register we know about, take the
2685 appropriate action (see case SET, below).
2686
2687 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2688 replacements done assuming all offsets are at their initial values. If
2689 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2690 encounter, return the actual location so that find_reloads will do
2691 the proper thing. */
2692
2693rtx
1914f5da 2694eliminate_regs (x, mem_mode, insn)
32131a9c
RK
2695 rtx x;
2696 enum machine_mode mem_mode;
2697 rtx insn;
2698{
2699 enum rtx_code code = GET_CODE (x);
2700 struct elim_table *ep;
2701 int regno;
2702 rtx new;
2703 int i, j;
6f7d635c 2704 const char *fmt;
32131a9c
RK
2705 int copied = 0;
2706
d6633f01
NS
2707 if (! current_function_decl)
2708 return x;
9969bb2c 2709
32131a9c
RK
2710 switch (code)
2711 {
2712 case CONST_INT:
2713 case CONST_DOUBLE:
2714 case CONST:
2715 case SYMBOL_REF:
2716 case CODE_LABEL:
2717 case PC:
2718 case CC0:
2719 case ASM_INPUT:
2720 case ADDR_VEC:
2721 case ADDR_DIFF_VEC:
2722 case RETURN:
2723 return x;
2724
e9a25f70
JL
2725 case ADDRESSOF:
2726 /* This is only for the benefit of the debugging backends, which call
2727 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2728 removed after CSE. */
1914f5da 2729 new = eliminate_regs (XEXP (x, 0), 0, insn);
e9a25f70
JL
2730 if (GET_CODE (new) == MEM)
2731 return XEXP (new, 0);
2732 return x;
2733
32131a9c
RK
2734 case REG:
2735 regno = REGNO (x);
2736
2737 /* First handle the case where we encounter a bare register that
2738 is eliminable. Replace it with a PLUS. */
2739 if (regno < FIRST_PSEUDO_REGISTER)
2740 {
2741 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2742 ep++)
2743 if (ep->from_rtx == x && ep->can_eliminate)
2744 {
ff32812a
RS
2745 if (! mem_mode
2746 /* Refs inside notes don't count for this purpose. */
fe089a90 2747 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2748 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2749 ep->ref_outside_mem = 1;
2750 return plus_constant (ep->to_rtx, ep->previous_offset);
2751 }
2752
2753 }
2b49ee39
R
2754 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2755 && reg_equiv_constant[regno]
2756 && ! CONSTANT_P (reg_equiv_constant[regno]))
2757 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2758 mem_mode, insn);
32131a9c
RK
2759 return x;
2760
c5c76735
JL
2761 /* You might think handling MINUS in a manner similar to PLUS is a
2762 good idea. It is not. It has been tried multiple times and every
2763 time the change has had to have been reverted.
2764
2765 Other parts of reload know a PLUS is special (gen_reload for example)
2766 and require special code to handle code a reloaded PLUS operand.
2767
2768 Also consider backends where the flags register is clobbered by a
2769 MINUS, but we can emit a PLUS that does not clobber flags (ia32,
2770 lea instruction comes to mind). If we try to reload a MINUS, we
2771 may kill the flags register that was holding a useful value.
2772
2773 So, please before trying to handle MINUS, consider reload as a
2774 whole instead of this little section as well as the backend issues. */
32131a9c
RK
2775 case PLUS:
2776 /* If this is the sum of an eliminable register and a constant, rework
2777 the sum. */
2778 if (GET_CODE (XEXP (x, 0)) == REG
2779 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2780 && CONSTANT_P (XEXP (x, 1)))
2781 {
2782 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2783 ep++)
2784 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2785 {
e5687447
JW
2786 if (! mem_mode
2787 /* Refs inside notes don't count for this purpose. */
2788 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2789 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2790 ep->ref_outside_mem = 1;
2791
2792 /* The only time we want to replace a PLUS with a REG (this
2793 occurs when the constant operand of the PLUS is the negative
2794 of the offset) is when we are inside a MEM. We won't want
2795 to do so at other times because that would change the
2796 structure of the insn in a way that reload can't handle.
2797 We special-case the commonest situation in
2798 eliminate_regs_in_insn, so just replace a PLUS with a
2799 PLUS here, unless inside a MEM. */
a23b64d5 2800 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2801 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2802 return ep->to_rtx;
2803 else
38a448ca
RH
2804 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2805 plus_constant (XEXP (x, 1),
2806 ep->previous_offset));
32131a9c
RK
2807 }
2808
2809 /* If the register is not eliminable, we are done since the other
2810 operand is a constant. */
2811 return x;
2812 }
2813
2814 /* If this is part of an address, we want to bring any constant to the
2815 outermost PLUS. We will do this by doing register replacement in
2816 our operands and seeing if a constant shows up in one of them.
2817
2818 We assume here this is part of an address (or a "load address" insn)
2819 since an eliminable register is not likely to appear in any other
2820 context.
2821
2822 If we have (plus (eliminable) (reg)), we want to produce
930aeef3 2823 (plus (plus (replacement) (reg) (const))). If this was part of a
32131a9c
RK
2824 normal add insn, (plus (replacement) (reg)) will be pushed as a
2825 reload. This is the desired action. */
2826
2827 {
1914f5da
RH
2828 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2829 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2830
2831 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2832 {
2833 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2834 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2835 we must replace the constant here since it may no longer
2836 be in the position of any operand. */
2837 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2838 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2839 && reg_renumber[REGNO (new1)] < 0
2840 && reg_equiv_constant != 0
2841 && reg_equiv_constant[REGNO (new1)] != 0)
2842 new1 = reg_equiv_constant[REGNO (new1)];
2843 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2844 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2845 && reg_renumber[REGNO (new0)] < 0
2846 && reg_equiv_constant[REGNO (new0)] != 0)
2847 new0 = reg_equiv_constant[REGNO (new0)];
2848
2849 new = form_sum (new0, new1);
2850
2851 /* As above, if we are not inside a MEM we do not want to
2852 turn a PLUS into something else. We might try to do so here
2853 for an addition of 0 if we aren't optimizing. */
2854 if (! mem_mode && GET_CODE (new) != PLUS)
38a448ca 2855 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
32131a9c
RK
2856 else
2857 return new;
2858 }
2859 }
2860 return x;
2861
981c7390 2862 case MULT:
05d10675 2863 /* If this is the product of an eliminable register and a
981c7390
RK
2864 constant, apply the distribute law and move the constant out
2865 so that we have (plus (mult ..) ..). This is needed in order
9faa82d8 2866 to keep load-address insns valid. This case is pathological.
981c7390
RK
2867 We ignore the possibility of overflow here. */
2868 if (GET_CODE (XEXP (x, 0)) == REG
2869 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2870 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2871 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2872 ep++)
2873 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2874 {
2875 if (! mem_mode
2876 /* Refs inside notes don't count for this purpose. */
2877 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2878 || GET_CODE (insn) == INSN_LIST)))
2879 ep->ref_outside_mem = 1;
2880
2881 return
38a448ca 2882 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
981c7390
RK
2883 ep->previous_offset * INTVAL (XEXP (x, 1)));
2884 }
32131a9c 2885
0f41302f 2886 /* ... fall through ... */
32131a9c 2887
32131a9c
RK
2888 case CALL:
2889 case COMPARE:
c5c76735 2890 /* See comments before PLUS about handling MINUS. */
930aeef3 2891 case MINUS:
32131a9c
RK
2892 case DIV: case UDIV:
2893 case MOD: case UMOD:
2894 case AND: case IOR: case XOR:
45620ed4
RK
2895 case ROTATERT: case ROTATE:
2896 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2897 case NE: case EQ:
2898 case GE: case GT: case GEU: case GTU:
2899 case LE: case LT: case LEU: case LTU:
2900 {
1914f5da 2901 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2902 rtx new1
1914f5da 2903 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2904
2905 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
38a448ca 2906 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
32131a9c
RK
2907 }
2908 return x;
2909
981c7390
RK
2910 case EXPR_LIST:
2911 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2912 if (XEXP (x, 0))
2913 {
1914f5da 2914 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
981c7390 2915 if (new != XEXP (x, 0))
13bb79d4
R
2916 {
2917 /* If this is a REG_DEAD note, it is not valid anymore.
2918 Using the eliminated version could result in creating a
2919 REG_DEAD note for the stack or frame pointer. */
2920 if (GET_MODE (x) == REG_DEAD)
2921 return (XEXP (x, 1)
2922 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2923 : NULL_RTX);
2924
2925 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2926 }
981c7390
RK
2927 }
2928
0f41302f 2929 /* ... fall through ... */
981c7390
RK
2930
2931 case INSN_LIST:
2932 /* Now do eliminations in the rest of the chain. If this was
2933 an EXPR_LIST, this might result in allocating more memory than is
2934 strictly needed, but it simplifies the code. */
2935 if (XEXP (x, 1))
2936 {
1914f5da 2937 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
981c7390 2938 if (new != XEXP (x, 1))
38a448ca 2939 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
981c7390
RK
2940 }
2941 return x;
2942
32131a9c
RK
2943 case PRE_INC:
2944 case POST_INC:
2945 case PRE_DEC:
2946 case POST_DEC:
2947 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2948 if (ep->to_rtx == XEXP (x, 0))
2949 {
4c05b187
RK
2950 int size = GET_MODE_SIZE (mem_mode);
2951
2952 /* If more bytes than MEM_MODE are pushed, account for them. */
2953#ifdef PUSH_ROUNDING
2954 if (ep->to_rtx == stack_pointer_rtx)
2955 size = PUSH_ROUNDING (size);
2956#endif
32131a9c 2957 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2958 ep->offset += size;
32131a9c 2959 else
4c05b187 2960 ep->offset -= size;
32131a9c
RK
2961 }
2962
2963 /* Fall through to generic unary operation case. */
32131a9c
RK
2964 case STRICT_LOW_PART:
2965 case NEG: case NOT:
2966 case SIGN_EXTEND: case ZERO_EXTEND:
2967 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2968 case FLOAT: case FIX:
2969 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2970 case ABS:
2971 case SQRT:
2972 case FFS:
1914f5da 2973 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c 2974 if (new != XEXP (x, 0))
38a448ca 2975 return gen_rtx_fmt_e (code, GET_MODE (x), new);
32131a9c
RK
2976 return x;
2977
2978 case SUBREG:
2979 /* Similar to above processing, but preserve SUBREG_WORD.
2980 Convert (subreg (mem)) to (mem) if not paradoxical.
2981 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2982 pseudo didn't get a hard reg, we must replace this with the
2983 eliminated version of the memory location because push_reloads
2984 may do the replacement in certain circumstances. */
2985 if (GET_CODE (SUBREG_REG (x)) == REG
2986 && (GET_MODE_SIZE (GET_MODE (x))
2987 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2988 && reg_equiv_memory_loc != 0
2989 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2990 {
cb2afeb3 2991#if 0
32131a9c 2992 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
1914f5da 2993 mem_mode, insn);
32131a9c
RK
2994
2995 /* If we didn't change anything, we must retain the pseudo. */
2996 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
59e2c378 2997 new = SUBREG_REG (x);
32131a9c 2998 else
59e2c378 2999 {
59e2c378
RK
3000 /* In this case, we must show that the pseudo is used in this
3001 insn so that delete_output_reload will do the right thing. */
3002 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
3003 && GET_CODE (insn) != INSN_LIST)
b60a8416
R
3004 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
3005 SUBREG_REG (x)),
05d10675 3006 insn))
b60a8416
R
3007 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
3008
3009 /* Ensure NEW isn't shared in case we have to reload it. */
3010 new = copy_rtx (new);
59e2c378 3011 }
cb2afeb3
R
3012#else
3013 new = SUBREG_REG (x);
3014#endif
32131a9c
RK
3015 }
3016 else
1914f5da 3017 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
3018
3019 if (new != XEXP (x, 0))
3020 {
29ae5012
RK
3021 int x_size = GET_MODE_SIZE (GET_MODE (x));
3022 int new_size = GET_MODE_SIZE (GET_MODE (new));
3023
1914f5da 3024 if (GET_CODE (new) == MEM
6d49a073 3025 && ((x_size < new_size
1914f5da 3026#ifdef WORD_REGISTER_OPERATIONS
6d49a073
JW
3027 /* On these machines, combine can create rtl of the form
3028 (set (subreg:m1 (reg:m2 R) 0) ...)
05d10675 3029 where m1 < m2, and expects something interesting to
6d49a073
JW
3030 happen to the entire word. Moreover, it will use the
3031 (reg:m2 R) later, expecting all bits to be preserved.
05d10675 3032 So if the number of words is the same, preserve the
6d49a073
JW
3033 subreg so that push_reloads can see it. */
3034 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
1914f5da 3035#endif
6d49a073
JW
3036 )
3037 || (x_size == new_size))
1914f5da 3038 )
32131a9c
RK
3039 {
3040 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
3041 enum machine_mode mode = GET_MODE (x);
3042
f76b9db2
ILT
3043 if (BYTES_BIG_ENDIAN)
3044 offset += (MIN (UNITS_PER_WORD,
3045 GET_MODE_SIZE (GET_MODE (new)))
3046 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
32131a9c
RK
3047
3048 PUT_MODE (new, mode);
3049 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
3050 return new;
3051 }
3052 else
38a448ca 3053 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
32131a9c
RK
3054 }
3055
3056 return x;
3057
94714ecc
RK
3058 case USE:
3059 /* If using a register that is the source of an eliminate we still
3060 think can be performed, note it cannot be performed since we don't
3061 know how this register is used. */
3062 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3063 if (ep->from_rtx == XEXP (x, 0))
3064 ep->can_eliminate = 0;
3065
1914f5da 3066 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
94714ecc 3067 if (new != XEXP (x, 0))
38a448ca 3068 return gen_rtx_fmt_e (code, GET_MODE (x), new);
94714ecc
RK
3069 return x;
3070
32131a9c
RK
3071 case CLOBBER:
3072 /* If clobbering a register that is the replacement register for an
d45cf215 3073 elimination we still think can be performed, note that it cannot
32131a9c
RK
3074 be performed. Otherwise, we need not be concerned about it. */
3075 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3076 if (ep->to_rtx == XEXP (x, 0))
3077 ep->can_eliminate = 0;
3078
1914f5da 3079 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c 3080 if (new != XEXP (x, 0))
38a448ca 3081 return gen_rtx_fmt_e (code, GET_MODE (x), new);
32131a9c
RK
3082 return x;
3083
3084 case ASM_OPERANDS:
3085 {
3086 rtx *temp_vec;
3087 /* Properly handle sharing input and constraint vectors. */
3088 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3089 {
3090 /* When we come to a new vector not seen before,
3091 scan all its elements; keep the old vector if none
3092 of them changes; otherwise, make a copy. */
3093 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3094 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3095 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3096 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
1914f5da 3097 mem_mode, insn);
32131a9c
RK
3098
3099 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3100 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3101 break;
3102
3103 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3104 new_asm_operands_vec = old_asm_operands_vec;
3105 else
3106 new_asm_operands_vec
3107 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3108 }
3109
3110 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3111 if (new_asm_operands_vec == old_asm_operands_vec)
3112 return x;
3113
38a448ca
RH
3114 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3115 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3116 ASM_OPERANDS_OUTPUT_IDX (x),
3117 new_asm_operands_vec,
3118 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3119 ASM_OPERANDS_SOURCE_FILE (x),
3120 ASM_OPERANDS_SOURCE_LINE (x));
32131a9c
RK
3121 new->volatil = x->volatil;
3122 return new;
3123 }
3124
3125 case SET:
3126 /* Check for setting a register that we know about. */
3127 if (GET_CODE (SET_DEST (x)) == REG)
3128 {
3129 /* See if this is setting the replacement register for an
a8fdc208 3130 elimination.
32131a9c 3131
3ec2ea3e
DE
3132 If DEST is the hard frame pointer, we do nothing because we
3133 assume that all assignments to the frame pointer are for
3134 non-local gotos and are being done at a time when they are valid
3135 and do not disturb anything else. Some machines want to
3136 eliminate a fake argument pointer (or even a fake frame pointer)
3137 with either the real frame or the stack pointer. Assignments to
3138 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
3139
3140 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3141 ep++)
3142 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 3143 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 3144 {
6dc42e49 3145 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
3146 this elimination can't be done. */
3147 rtx src = SET_SRC (x);
3148
3149 if (GET_CODE (src) == PLUS
3150 && XEXP (src, 0) == SET_DEST (x)
3151 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3152 ep->offset -= INTVAL (XEXP (src, 1));
3153 else
3154 ep->can_eliminate = 0;
3155 }
3156
3157 /* Now check to see we are assigning to a register that can be
3158 eliminated. If so, it must be as part of a PARALLEL, since we
3159 will not have been called if this is a single SET. So indicate
3160 that we can no longer eliminate this reg. */
3161 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3162 ep++)
3163 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3164 ep->can_eliminate = 0;
3165 }
3166
3167 /* Now avoid the loop below in this common case. */
3168 {
1914f5da
RH
3169 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3170 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3171
ff32812a 3172 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3173 write a CLOBBER insn. */
3174 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3175 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3176 && GET_CODE (insn) != INSN_LIST)
38a448ca 3177 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
32131a9c
RK
3178
3179 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
38a448ca 3180 return gen_rtx_SET (VOIDmode, new0, new1);
32131a9c
RK
3181 }
3182
3183 return x;
3184
3185 case MEM:
e9a25f70
JL
3186 /* This is only for the benefit of the debugging backends, which call
3187 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3188 removed after CSE. */
3189 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
1914f5da 3190 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
e9a25f70 3191
32131a9c
RK
3192 /* Our only special processing is to pass the mode of the MEM to our
3193 recursive call and copy the flags. While we are here, handle this
3194 case more efficiently. */
1914f5da 3195 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3196 if (new != XEXP (x, 0))
3197 {
38a448ca 3198 new = gen_rtx_MEM (GET_MODE (x), new);
32131a9c
RK
3199 new->volatil = x->volatil;
3200 new->unchanging = x->unchanging;
3201 new->in_struct = x->in_struct;
3202 return new;
3203 }
3204 else
3205 return x;
05d10675 3206
e9a25f70
JL
3207 default:
3208 break;
32131a9c
RK
3209 }
3210
3211 /* Process each of our operands recursively. If any have changed, make a
3212 copy of the rtx. */
3213 fmt = GET_RTX_FORMAT (code);
3214 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3215 {
3216 if (*fmt == 'e')
3217 {
1914f5da 3218 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3219 if (new != XEXP (x, i) && ! copied)
3220 {
3221 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3222 bcopy ((char *) x, (char *) new_x,
3223 (sizeof (*new_x) - sizeof (new_x->fld)
3224 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
32131a9c
RK
3225 x = new_x;
3226 copied = 1;
3227 }
3228 XEXP (x, i) = new;
3229 }
3230 else if (*fmt == 'E')
3231 {
3232 int copied_vec = 0;
3233 for (j = 0; j < XVECLEN (x, i); j++)
3234 {
1914f5da 3235 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
32131a9c
RK
3236 if (new != XVECEXP (x, i, j) && ! copied_vec)
3237 {
8f985ec4
ZW
3238 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3239 XVEC (x, i)->elem);
32131a9c
RK
3240 if (! copied)
3241 {
3242 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3243 bcopy ((char *) x, (char *) new_x,
3244 (sizeof (*new_x) - sizeof (new_x->fld)
3245 + (sizeof (new_x->fld[0])
3246 * GET_RTX_LENGTH (code))));
32131a9c
RK
3247 x = new_x;
3248 copied = 1;
3249 }
3250 XVEC (x, i) = new_v;
3251 copied_vec = 1;
3252 }
3253 XVECEXP (x, i, j) = new;
3254 }
3255 }
3256 }
3257
3258 return x;
3259}
3260\f
3261/* Scan INSN and eliminate all eliminable registers in it.
3262
3263 If REPLACE is nonzero, do the replacement destructively. Also
3264 delete the insn as dead it if it is setting an eliminable register.
3265
3266 If REPLACE is zero, do all our allocations in reload_obstack.
3267
3268 If no eliminations were done and this insn doesn't require any elimination
3269 processing (these are not identical conditions: it might be updating sp,
3270 but not referencing fp; this needs to be seen during reload_as_needed so
3271 that the offset between fp and sp can be taken into consideration), zero
3272 is returned. Otherwise, 1 is returned. */
3273
3274static int
3275eliminate_regs_in_insn (insn, replace)
3276 rtx insn;
3277 int replace;
3278{
3279 rtx old_body = PATTERN (insn);
774672d2 3280 rtx old_set = single_set (insn);
32131a9c
RK
3281 rtx new_body;
3282 int val = 0;
3283 struct elim_table *ep;
3284
3285 if (! replace)
3286 push_obstacks (&reload_obstack, &reload_obstack);
3287
774672d2
RK
3288 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3289 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
3290 {
3291 /* Check for setting an eliminable register. */
3292 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
774672d2 3293 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
32131a9c 3294 {
dd1eab0a
RK
3295#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3296 /* If this is setting the frame pointer register to the
3297 hardware frame pointer register and this is an elimination
3298 that will be done (tested above), this insn is really
3299 adjusting the frame pointer downward to compensate for
3300 the adjustment done before a nonlocal goto. */
3301 if (ep->from == FRAME_POINTER_REGNUM
3302 && ep->to == HARD_FRAME_POINTER_REGNUM)
3303 {
3304 rtx src = SET_SRC (old_set);
973838fd 3305 int offset = 0, ok = 0;
8026ebba 3306 rtx prev_insn, prev_set;
dd1eab0a
RK
3307
3308 if (src == ep->to_rtx)
3309 offset = 0, ok = 1;
3310 else if (GET_CODE (src) == PLUS
bb22893c
JW
3311 && GET_CODE (XEXP (src, 0)) == CONST_INT
3312 && XEXP (src, 1) == ep->to_rtx)
dd1eab0a 3313 offset = INTVAL (XEXP (src, 0)), ok = 1;
bb22893c
JW
3314 else if (GET_CODE (src) == PLUS
3315 && GET_CODE (XEXP (src, 1)) == CONST_INT
3316 && XEXP (src, 0) == ep->to_rtx)
3317 offset = INTVAL (XEXP (src, 1)), ok = 1;
8026ebba
ILT
3318 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3319 && (prev_set = single_set (prev_insn)) != 0
3320 && rtx_equal_p (SET_DEST (prev_set), src))
3321 {
3322 src = SET_SRC (prev_set);
3323 if (src == ep->to_rtx)
3324 offset = 0, ok = 1;
3325 else if (GET_CODE (src) == PLUS
3326 && GET_CODE (XEXP (src, 0)) == CONST_INT
3327 && XEXP (src, 1) == ep->to_rtx)
3328 offset = INTVAL (XEXP (src, 0)), ok = 1;
3329 else if (GET_CODE (src) == PLUS
3330 && GET_CODE (XEXP (src, 1)) == CONST_INT
3331 && XEXP (src, 0) == ep->to_rtx)
3332 offset = INTVAL (XEXP (src, 1)), ok = 1;
3333 }
dd1eab0a
RK
3334
3335 if (ok)
3336 {
3337 if (replace)
3338 {
3339 rtx src
3340 = plus_constant (ep->to_rtx, offset - ep->offset);
3341
3342 /* First see if this insn remains valid when we
3343 make the change. If not, keep the INSN_CODE
3344 the same and let reload fit it up. */
3345 validate_change (insn, &SET_SRC (old_set), src, 1);
3346 validate_change (insn, &SET_DEST (old_set),
3347 ep->to_rtx, 1);
3348 if (! apply_change_group ())
3349 {
3350 SET_SRC (old_set) = src;
3351 SET_DEST (old_set) = ep->to_rtx;
3352 }
3353 }
3354
3355 val = 1;
3356 goto done;
3357 }
3358 }
3359#endif
3360
32131a9c
RK
3361 /* In this case this insn isn't serving a useful purpose. We
3362 will delete it in reload_as_needed once we know that this
3363 elimination is, in fact, being done.
3364
abc95ed3 3365 If REPLACE isn't set, we can't delete this insn, but needn't
32131a9c
RK
3366 process it since it won't be used unless something changes. */
3367 if (replace)
8a34409d
RH
3368 {
3369 delete_dead_insn (insn);
3370 return 1;
3371 }
32131a9c
RK
3372 val = 1;
3373 goto done;
3374 }
3375
3376 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3377 in the insn is the negative of the offset in FROM. Substitute
3378 (set (reg) (reg to)) for the insn and change its code.
3379
cb2afeb3 3380 We have to do this here, rather than in eliminate_regs, so that we can
32131a9c
RK
3381 change the insn code. */
3382
774672d2
RK
3383 if (GET_CODE (SET_SRC (old_set)) == PLUS
3384 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3385 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
32131a9c
RK
3386 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3387 ep++)
774672d2 3388 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
922d9d40 3389 && ep->can_eliminate)
32131a9c 3390 {
922d9d40
RK
3391 /* We must stop at the first elimination that will be used.
3392 If this one would replace the PLUS with a REG, do it
3393 now. Otherwise, quit the loop and let eliminate_regs
3394 do its normal replacement. */
774672d2 3395 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
922d9d40 3396 {
774672d2
RK
3397 /* We assume here that we don't need a PARALLEL of
3398 any CLOBBERs for this assignment. There's not
3399 much we can do if we do need it. */
38a448ca
RH
3400 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3401 SET_DEST (old_set),
3402 ep->to_rtx);
922d9d40
RK
3403 INSN_CODE (insn) = -1;
3404 val = 1;
3405 goto done;
3406 }
3407
3408 break;
32131a9c
RK
3409 }
3410 }
3411
3412 old_asm_operands_vec = 0;
3413
3414 /* Replace the body of this insn with a substituted form. If we changed
05d10675 3415 something, return non-zero.
32131a9c
RK
3416
3417 If we are replacing a body that was a (set X (plus Y Z)), try to
3418 re-recognize the insn. We do this in case we had a simple addition
3419 but now can do this as a load-address. This saves an insn in this
0f41302f 3420 common case. */
32131a9c 3421
1914f5da 3422 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3423 if (new_body != old_body)
3424 {
7c791b13
RK
3425 /* If we aren't replacing things permanently and we changed something,
3426 make another copy to ensure that all the RTL is new. Otherwise
3427 things can go wrong if find_reload swaps commutative operands
0f41302f 3428 and one is inside RTL that has been copied while the other is not. */
7c791b13 3429
4d411872
RS
3430 /* Don't copy an asm_operands because (1) there's no need and (2)
3431 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3432 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3433 new_body = copy_rtx (new_body);
3434
774672d2
RK
3435 /* If we had a move insn but now we don't, rerecognize it. This will
3436 cause spurious re-recognition if the old move had a PARALLEL since
3437 the new one still will, but we can't call single_set without
3438 having put NEW_BODY into the insn and the re-recognition won't
3439 hurt in this rare case. */
3440 if (old_set != 0
3441 && ((GET_CODE (SET_SRC (old_set)) == REG
3442 && (GET_CODE (new_body) != SET
3443 || GET_CODE (SET_SRC (new_body)) != REG))
3444 /* If this was a load from or store to memory, compare
3445 the MEM in recog_operand to the one in the insn. If they
3446 are not equal, then rerecognize the insn. */
3447 || (old_set != 0
3448 && ((GET_CODE (SET_SRC (old_set)) == MEM
3449 && SET_SRC (old_set) != recog_operand[1])
3450 || (GET_CODE (SET_DEST (old_set)) == MEM
3451 && SET_DEST (old_set) != recog_operand[0])))
3452 /* If this was an add insn before, rerecognize. */
3453 || GET_CODE (SET_SRC (old_set)) == PLUS))
4a5d0fb5
RS
3454 {
3455 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3456 /* If recognition fails, store the new body anyway.
3457 It's normal to have recognition failures here
3458 due to bizarre memory addresses; reloading will fix them. */
3459 PATTERN (insn) = new_body;
4a5d0fb5 3460 }
0ba846c7 3461 else
32131a9c
RK
3462 PATTERN (insn) = new_body;
3463
32131a9c
RK
3464 val = 1;
3465 }
a8fdc208 3466
cb2afeb3 3467 /* Loop through all elimination pairs. See if any have changed.
a8efe40d 3468
32131a9c
RK
3469 We also detect a cases where register elimination cannot be done,
3470 namely, if a register would be both changed and referenced outside a MEM
3471 in the resulting insn since such an insn is often undefined and, even if
3472 not, we cannot know what meaning will be given to it. Note that it is
3473 valid to have a register used in an address in an insn that changes it
3474 (presumably with a pre- or post-increment or decrement).
3475
3476 If anything changes, return nonzero. */
3477
32131a9c
RK
3478 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3479 {
3480 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3481 ep->can_eliminate = 0;
3482
3483 ep->ref_outside_mem = 0;
3484
3485 if (ep->previous_offset != ep->offset)
3486 val = 1;
32131a9c
RK
3487 }
3488
3489 done:
9faa82d8 3490 /* If we changed something, perform elimination in REG_NOTES. This is
05b4c365
RK
3491 needed even when REPLACE is zero because a REG_DEAD note might refer
3492 to a register that we eliminate and could cause a different number
3493 of spill registers to be needed in the final reload pass than in
3494 the pre-passes. */
20748cab 3495 if (val && REG_NOTES (insn) != 0)
1914f5da 3496 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3497
32131a9c
RK
3498 if (! replace)
3499 pop_obstacks ();
3500
3501 return val;
3502}
3503
cb2afeb3
R
3504/* Loop through all elimination pairs.
3505 Recalculate the number not at initial offset.
3506
3507 Compute the maximum offset (minimum offset if the stack does not
3508 grow downward) for each elimination pair. */
3509
3510static void
3511update_eliminable_offsets ()
3512{
3513 struct elim_table *ep;
3514
3515 num_not_at_initial_offset = 0;
3516 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3517 {
3518 ep->previous_offset = ep->offset;
3519 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3520 num_not_at_initial_offset++;
cb2afeb3
R
3521 }
3522}
3523
32131a9c
RK
3524/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3525 replacement we currently believe is valid, mark it as not eliminable if X
3526 modifies DEST in any way other than by adding a constant integer to it.
3527
3528 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3529 all assignments to the hard frame pointer are nonlocal gotos and are being
3530 done at a time when they are valid and do not disturb anything else.
32131a9c 3531 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3532 frame or stack pointer. Assignments to the hard frame pointer must not
3533 prevent this elimination.
32131a9c
RK
3534
3535 Called via note_stores from reload before starting its passes to scan
3536 the insns of the function. */
3537
3538static void
3539mark_not_eliminable (dest, x)
3540 rtx dest;
3541 rtx x;
3542{
e51712db 3543 register unsigned int i;
32131a9c
RK
3544
3545 /* A SUBREG of a hard register here is just changing its mode. We should
3546 not see a SUBREG of an eliminable hard register, but check just in
3547 case. */
3548 if (GET_CODE (dest) == SUBREG)
3549 dest = SUBREG_REG (dest);
3550
3ec2ea3e 3551 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3552 return;
3553
3554 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3555 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3556 && (GET_CODE (x) != SET
3557 || GET_CODE (SET_SRC (x)) != PLUS
3558 || XEXP (SET_SRC (x), 0) != dest
3559 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3560 {
3561 reg_eliminate[i].can_eliminate_previous
3562 = reg_eliminate[i].can_eliminate = 0;
3563 num_eliminable--;
3564 }
3565}
09dd1133 3566
c47f5ea5
BS
3567/* Verify that the initial elimination offsets did not change since the
3568 last call to set_initial_elim_offsets. This is used to catch cases
3569 where something illegal happened during reload_as_needed that could
3570 cause incorrect code to be generated if we did not check for it. */
3571static void
3572verify_initial_elim_offsets ()
3573{
3574 int t;
3575
3576#ifdef ELIMINABLE_REGS
3577 struct elim_table *ep;
3578
3579 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3580 {
3581 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3582 if (t != ep->initial_offset)
3583 abort ();
3584 }
3585#else
3586 INITIAL_FRAME_POINTER_OFFSET (t);
3587 if (t != reg_eliminate[0].initial_offset)
3588 abort ();
05d10675 3589#endif
c47f5ea5
BS
3590}
3591
09dd1133
BS
3592/* Reset all offsets on eliminable registers to their initial values. */
3593static void
3594set_initial_elim_offsets ()
3595{
1f3b1e1a 3596 struct elim_table *ep = reg_eliminate;
09dd1133
BS
3597
3598#ifdef ELIMINABLE_REGS
1f3b1e1a 3599 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
09dd1133
BS
3600 {
3601 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
1f3b1e1a 3602 ep->previous_offset = ep->offset = ep->initial_offset;
09dd1133
BS
3603 }
3604#else
1f3b1e1a
JL
3605 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3606 ep->previous_offset = ep->offset = ep->initial_offset;
09dd1133
BS
3607#endif
3608
3609 num_not_at_initial_offset = 0;
1f3b1e1a 3610}
09dd1133 3611
1f3b1e1a
JL
3612/* Initialize the known label offsets.
3613 Set a known offset for each forced label to be at the initial offset
3614 of each elimination. We do this because we assume that all
3615 computed jumps occur from a location where each elimination is
3616 at its initial offset.
3617 For all other labels, show that we don't know the offsets. */
09dd1133 3618
1f3b1e1a
JL
3619static void
3620set_initial_label_offsets ()
3621{
3622 rtx x;
3623 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
09dd1133
BS
3624
3625 for (x = forced_labels; x; x = XEXP (x, 1))
3626 if (XEXP (x, 0))
3627 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3628}
3629
1f3b1e1a
JL
3630/* Set all elimination offsets to the known values for the code label given
3631 by INSN. */
3632static void
3633set_offsets_for_label (insn)
3634 rtx insn;
3635{
973838fd 3636 unsigned int i;
1f3b1e1a
JL
3637 int label_nr = CODE_LABEL_NUMBER (insn);
3638 struct elim_table *ep;
3639
3640 num_not_at_initial_offset = 0;
3641 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3642 {
3643 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3644 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3645 num_not_at_initial_offset++;
3646 }
3647}
3648
09dd1133
BS
3649/* See if anything that happened changes which eliminations are valid.
3650 For example, on the Sparc, whether or not the frame pointer can
3651 be eliminated can depend on what registers have been used. We need
3652 not check some conditions again (such as flag_omit_frame_pointer)
3653 since they can't have changed. */
3654
3655static void
3656update_eliminables (pset)
3657 HARD_REG_SET *pset;
3658{
3659#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3660 int previous_frame_pointer_needed = frame_pointer_needed;
3661#endif
3662 struct elim_table *ep;
3663
3664 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3665 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3666#ifdef ELIMINABLE_REGS
3667 || ! CAN_ELIMINATE (ep->from, ep->to)
3668#endif
3669 )
3670 ep->can_eliminate = 0;
3671
3672 /* Look for the case where we have discovered that we can't replace
3673 register A with register B and that means that we will now be
3674 trying to replace register A with register C. This means we can
3675 no longer replace register C with register B and we need to disable
3676 such an elimination, if it exists. This occurs often with A == ap,
3677 B == sp, and C == fp. */
3678
3679 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3680 {
3681 struct elim_table *op;
3682 register int new_to = -1;
3683
3684 if (! ep->can_eliminate && ep->can_eliminate_previous)
3685 {
3686 /* Find the current elimination for ep->from, if there is a
3687 new one. */
3688 for (op = reg_eliminate;
3689 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3690 if (op->from == ep->from && op->can_eliminate)
3691 {
3692 new_to = op->to;
3693 break;
3694 }
3695
3696 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3697 disable it. */
3698 for (op = reg_eliminate;
3699 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3700 if (op->from == new_to && op->to == ep->to)
3701 op->can_eliminate = 0;
3702 }
3703 }
3704
3705 /* See if any registers that we thought we could eliminate the previous
3706 time are no longer eliminable. If so, something has changed and we
3707 must spill the register. Also, recompute the number of eliminable
3708 registers and see if the frame pointer is needed; it is if there is
3709 no elimination of the frame pointer that we can perform. */
3710
3711 frame_pointer_needed = 1;
3712 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3713 {
3714 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3715 && ep->to != HARD_FRAME_POINTER_REGNUM)
3716 frame_pointer_needed = 0;
3717
3718 if (! ep->can_eliminate && ep->can_eliminate_previous)
3719 {
3720 ep->can_eliminate_previous = 0;
3721 SET_HARD_REG_BIT (*pset, ep->from);
3722 num_eliminable--;
3723 }
3724 }
3725
3726#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3727 /* If we didn't need a frame pointer last time, but we do now, spill
3728 the hard frame pointer. */
3729 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3730 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3731#endif
3732}
3733
3734/* Initialize the table of registers to eliminate. */
3735static void
3736init_elim_table ()
3737{
3738 struct elim_table *ep;
590cf94d
KG
3739#ifdef ELIMINABLE_REGS
3740 struct elim_table_1 *ep1;
3741#endif
09dd1133 3742
590cf94d 3743 if (!reg_eliminate)
ad85216e
KG
3744 reg_eliminate = (struct elim_table *)
3745 xcalloc(sizeof(struct elim_table), NUM_ELIMINABLE_REGS);
05d10675 3746
09dd1133
BS
3747 /* Does this function require a frame pointer? */
3748
3749 frame_pointer_needed = (! flag_omit_frame_pointer
3750#ifdef EXIT_IGNORE_STACK
3751 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3752 and restore sp for alloca. So we can't eliminate
3753 the frame pointer in that case. At some point,
3754 we should improve this by emitting the
3755 sp-adjusting insns for this case. */
3756 || (current_function_calls_alloca
3757 && EXIT_IGNORE_STACK)
3758#endif
3759 || FRAME_POINTER_REQUIRED);
3760
3761 num_eliminable = 0;
3762
3763#ifdef ELIMINABLE_REGS
590cf94d
KG
3764 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3765 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
09dd1133 3766 {
590cf94d
KG
3767 ep->from = ep1->from;
3768 ep->to = ep1->to;
09dd1133
BS
3769 ep->can_eliminate = ep->can_eliminate_previous
3770 = (CAN_ELIMINATE (ep->from, ep->to)
3771 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3772 }
3773#else
590cf94d
KG
3774 reg_eliminate[0].from = reg_eliminate_1[0].from;
3775 reg_eliminate[0].to = reg_eliminate_1[0].to;
09dd1133
BS
3776 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3777 = ! frame_pointer_needed;
3778#endif
3779
3780 /* Count the number of eliminable registers and build the FROM and TO
3781 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3782 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3783 We depend on this. */
3784 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3785 {
3786 num_eliminable += ep->can_eliminate;
3787 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3788 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3789 }
3790}
32131a9c
RK
3791\f
3792/* Kick all pseudos out of hard register REGNO.
32131a9c
RK
3793 If DUMPFILE is nonzero, log actions taken on that file.
3794
3795 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3796 because we found we can't eliminate some register. In the case, no pseudos
3797 are allowed to be in the register, even if they are only in a block that
3798 doesn't require spill registers, unlike the case when we are spilling this
3799 hard reg to produce another spill register.
3800
3801 Return nonzero if any pseudos needed to be kicked out. */
3802
03acd8f8
BS
3803static void
3804spill_hard_reg (regno, dumpfile, cant_eliminate)
32131a9c 3805 register int regno;
6a651371 3806 FILE *dumpfile ATTRIBUTE_UNUSED;
32131a9c
RK
3807 int cant_eliminate;
3808{
32131a9c
RK
3809 register int i;
3810
9ff3516a 3811 if (cant_eliminate)
03acd8f8
BS
3812 {
3813 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3814 regs_ever_live[regno] = 1;
3815 }
9ff3516a 3816
32131a9c
RK
3817 /* Spill every pseudo reg that was allocated to this reg
3818 or to something that overlaps this reg. */
3819
3820 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3821 if (reg_renumber[i] >= 0
3822 && reg_renumber[i] <= regno
a8fdc208 3823 && (reg_renumber[i]
32131a9c
RK
3824 + HARD_REGNO_NREGS (reg_renumber[i],
3825 PSEUDO_REGNO_MODE (i))
3826 > regno))
03acd8f8
BS
3827 SET_REGNO_REG_SET (spilled_pseudos, i);
3828}
32131a9c 3829
03acd8f8
BS
3830/* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3831 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3832static void
3833ior_hard_reg_set (set1, set2)
3834 HARD_REG_SET *set1, *set2;
3835{
3836 IOR_HARD_REG_SET (*set1, *set2);
3837}
05d10675 3838
03acd8f8
BS
3839/* After find_reload_regs has been run for all insn that need reloads,
3840 and/or spill_hard_regs was called, this function is used to actually
3841 spill pseudo registers and try to reallocate them. It also sets up the
3842 spill_regs array for use by choose_reload_regs. */
a8fdc208 3843
03acd8f8
BS
3844static int
3845finish_spills (global, dumpfile)
3846 int global;
3847 FILE *dumpfile;
3848{
3849 struct insn_chain *chain;
3850 int something_changed = 0;
3851 int i;
3852
3853 /* Build the spill_regs array for the function. */
3854 /* If there are some registers still to eliminate and one of the spill regs
3855 wasn't ever used before, additional stack space may have to be
3856 allocated to store this register. Thus, we may have changed the offset
3857 between the stack and frame pointers, so mark that something has changed.
32131a9c 3858
03acd8f8
BS
3859 One might think that we need only set VAL to 1 if this is a call-used
3860 register. However, the set of registers that must be saved by the
3861 prologue is not identical to the call-used set. For example, the
3862 register used by the call insn for the return PC is a call-used register,
3863 but must be saved by the prologue. */
3864
3865 n_spills = 0;
3866 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3867 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3868 {
3869 spill_reg_order[i] = n_spills;
3870 spill_regs[n_spills++] = i;
3871 if (num_eliminable && ! regs_ever_live[i])
3872 something_changed = 1;
3873 regs_ever_live[i] = 1;
3874 }
3875 else
3876 spill_reg_order[i] = -1;
3877
3878 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3879 if (REGNO_REG_SET_P (spilled_pseudos, i))
3880 {
3881 /* Record the current hard register the pseudo is allocated to in
3882 pseudo_previous_regs so we avoid reallocating it to the same
3883 hard reg in a later pass. */
3884 if (reg_renumber[i] < 0)
3885 abort ();
3886 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
32131a9c
RK
3887 /* Mark it as no longer having a hard register home. */
3888 reg_renumber[i] = -1;
3889 /* We will need to scan everything again. */
3890 something_changed = 1;
03acd8f8 3891 }
7609e720 3892
03acd8f8
BS
3893 /* Retry global register allocation if possible. */
3894 if (global)
3895 {
3896 bzero ((char *) pseudo_forbidden_regs, max_regno * sizeof (HARD_REG_SET));
3897 /* For every insn that needs reloads, set the registers used as spill
3898 regs in pseudo_forbidden_regs for every pseudo live across the
3899 insn. */
3900 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3901 {
3902 EXECUTE_IF_SET_IN_REG_SET
3903 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
3904 {
3905 ior_hard_reg_set (pseudo_forbidden_regs + i,
3906 &chain->used_spill_regs);
3907 });
3908 EXECUTE_IF_SET_IN_REG_SET
3909 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
3910 {
3911 ior_hard_reg_set (pseudo_forbidden_regs + i,
3912 &chain->used_spill_regs);
3913 });
3914 }
7609e720 3915
03acd8f8
BS
3916 /* Retry allocating the spilled pseudos. For each reg, merge the
3917 various reg sets that indicate which hard regs can't be used,
3918 and call retry_global_alloc.
05d10675 3919 We change spill_pseudos here to only contain pseudos that did not
03acd8f8
BS
3920 get a new hard register. */
3921 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3922 if (reg_old_renumber[i] != reg_renumber[i])
32131a9c 3923 {
03acd8f8
BS
3924 HARD_REG_SET forbidden;
3925 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3926 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3927 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3928 retry_global_alloc (i, forbidden);
3929 if (reg_renumber[i] >= 0)
3930 CLEAR_REGNO_REG_SET (spilled_pseudos, i);
32131a9c 3931 }
03acd8f8 3932 }
7609e720 3933
03acd8f8
BS
3934 /* Fix up the register information in the insn chain.
3935 This involves deleting those of the spilled pseudos which did not get
3936 a new hard register home from the live_{before,after} sets. */
7609e720
BS
3937 for (chain = reload_insn_chain; chain; chain = chain->next)
3938 {
03acd8f8
BS
3939 HARD_REG_SET used_by_pseudos;
3940 HARD_REG_SET used_by_pseudos2;
3941
7609e720
BS
3942 AND_COMPL_REG_SET (chain->live_before, spilled_pseudos);
3943 AND_COMPL_REG_SET (chain->live_after, spilled_pseudos);
03acd8f8
BS
3944
3945 /* Mark any unallocated hard regs as available for spills. That
3946 makes inheritance work somewhat better. */
3947 if (chain->need_reload)
3948 {
3949 REG_SET_TO_HARD_REG_SET (used_by_pseudos, chain->live_before);
3950 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, chain->live_after);
3951 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3952
3953 /* Save the old value for the sanity test below. */
3954 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3955
3956 compute_use_by_pseudos (&used_by_pseudos, chain->live_before);
3957 compute_use_by_pseudos (&used_by_pseudos, chain->live_after);
3958 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3959 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3960
3961 /* Make sure we only enlarge the set. */
3962 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3963 abort ();
3964 ok:;
3965 }
7609e720 3966 }
03acd8f8
BS
3967
3968 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3969 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3970 {
3971 int regno = reg_renumber[i];
3972 if (reg_old_renumber[i] == regno)
3973 continue;
05d10675 3974
03acd8f8
BS
3975 alter_reg (i, reg_old_renumber[i]);
3976 reg_old_renumber[i] = regno;
3977 if (dumpfile)
3978 {
3979 if (regno == -1)
3980 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3981 else
3982 fprintf (dumpfile, " Register %d now in %d.\n\n",
3983 i, reg_renumber[i]);
3984 }
3985 }
3986
3987 return something_changed;
7609e720 3988}
32131a9c 3989\f
05d10675 3990/* Find all paradoxical subregs within X and update reg_max_ref_width.
56f58d3a
RK
3991 Also mark any hard registers used to store user variables as
3992 forbidden from being used for spill registers. */
32131a9c
RK
3993
3994static void
3995scan_paradoxical_subregs (x)
3996 register rtx x;
3997{
3998 register int i;
6f7d635c 3999 register const char *fmt;
32131a9c
RK
4000 register enum rtx_code code = GET_CODE (x);
4001
4002 switch (code)
4003 {
56f58d3a 4004 case REG:
03acd8f8 4005#if 0
e9a25f70 4006 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
f95182a4 4007 && REG_USERVAR_P (x))
03acd8f8
BS
4008 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
4009#endif
56f58d3a
RK
4010 return;
4011
32131a9c
RK
4012 case CONST_INT:
4013 case CONST:
4014 case SYMBOL_REF:
4015 case LABEL_REF:
4016 case CONST_DOUBLE:
4017 case CC0:
4018 case PC:
32131a9c
RK
4019 case USE:
4020 case CLOBBER:
4021 return;
4022
4023 case SUBREG:
4024 if (GET_CODE (SUBREG_REG (x)) == REG
4025 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4026 reg_max_ref_width[REGNO (SUBREG_REG (x))]
4027 = GET_MODE_SIZE (GET_MODE (x));
4028 return;
05d10675 4029
e9a25f70
JL
4030 default:
4031 break;
32131a9c
RK
4032 }
4033
4034 fmt = GET_RTX_FORMAT (code);
4035 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4036 {
4037 if (fmt[i] == 'e')
4038 scan_paradoxical_subregs (XEXP (x, i));
4039 else if (fmt[i] == 'E')
4040 {
4041 register int j;
4042 for (j = XVECLEN (x, i) - 1; j >=0; j--)
4043 scan_paradoxical_subregs (XVECEXP (x, i, j));
4044 }
4045 }
4046}
4047\f
32131a9c 4048static int
788a0818 4049hard_reg_use_compare (p1p, p2p)
03acd8f8
BS
4050 const GENERIC_PTR p1p;
4051 const GENERIC_PTR p2p;
05d10675 4052{
03acd8f8
BS
4053 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p;
4054 struct hard_reg_n_uses *p2 = (struct hard_reg_n_uses *)p2p;
4055 int bad1 = TEST_HARD_REG_BIT (bad_spill_regs, p1->regno);
4056 int bad2 = TEST_HARD_REG_BIT (bad_spill_regs, p2->regno);
4057 if (bad1 && bad2)
4058 return p1->regno - p2->regno;
4059 if (bad1)
4060 return 1;
4061 if (bad2)
4062 return -1;
4063 if (p1->uses > p2->uses)
4064 return 1;
4065 if (p1->uses < p2->uses)
4066 return -1;
32131a9c
RK
4067 /* If regs are equally good, sort by regno,
4068 so that the results of qsort leave nothing to chance. */
4069 return p1->regno - p2->regno;
4070}
4071
03acd8f8
BS
4072/* Used for communication between order_regs_for_reload and count_pseudo.
4073 Used to avoid counting one pseudo twice. */
4074static regset pseudos_counted;
4075
4076/* Update the costs in N_USES, considering that pseudo REG is live. */
4077static void
4078count_pseudo (n_uses, reg)
4079 struct hard_reg_n_uses *n_uses;
4080 int reg;
4081{
4082 int r = reg_renumber[reg];
4083 int nregs;
4084
4085 if (REGNO_REG_SET_P (pseudos_counted, reg))
4086 return;
4087 SET_REGNO_REG_SET (pseudos_counted, reg);
4088
4089 if (r < 0)
4090 abort ();
4091
4092 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
4093 while (nregs-- > 0)
05d10675 4094 n_uses[r++].uses += REG_N_REFS (reg);
03acd8f8 4095}
32131a9c
RK
4096/* Choose the order to consider regs for use as reload registers
4097 based on how much trouble would be caused by spilling one.
4098 Store them in order of decreasing preference in potential_reload_regs. */
4099
4100static void
03acd8f8
BS
4101order_regs_for_reload (chain)
4102 struct insn_chain *chain;
32131a9c 4103{
03acd8f8 4104 register int i;
32131a9c 4105 register int o = 0;
32131a9c
RK
4106 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
4107
03acd8f8 4108 pseudos_counted = ALLOCA_REG_SET ();
32131a9c 4109
03acd8f8 4110 COPY_HARD_REG_SET (bad_spill_regs, bad_spill_regs_global);
32131a9c
RK
4111
4112 /* Count number of uses of each hard reg by pseudo regs allocated to it
4113 and then order them by decreasing use. */
4114
4115 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4116 {
03acd8f8
BS
4117 int j;
4118
32131a9c 4119 hard_reg_n_uses[i].regno = i;
03acd8f8 4120 hard_reg_n_uses[i].uses = 0;
32131a9c 4121
03acd8f8
BS
4122 /* Test the various reasons why we can't use a register for
4123 spilling in this insn. */
4124 if (fixed_regs[i]
4125 || REGNO_REG_SET_P (chain->live_before, i)
4126 || REGNO_REG_SET_P (chain->live_after, i))
32131a9c 4127 {
32131a9c 4128 SET_HARD_REG_BIT (bad_spill_regs, i);
03acd8f8 4129 continue;
32131a9c 4130 }
32131a9c 4131
03acd8f8
BS
4132 /* Now find out which pseudos are allocated to it, and update
4133 hard_reg_n_uses. */
4134 CLEAR_REG_SET (pseudos_counted);
4135
4136 EXECUTE_IF_SET_IN_REG_SET
4137 (chain->live_before, FIRST_PSEUDO_REGISTER, j,
4138 {
4139 count_pseudo (hard_reg_n_uses, j);
4140 });
4141 EXECUTE_IF_SET_IN_REG_SET
4142 (chain->live_after, FIRST_PSEUDO_REGISTER, j,
4143 {
4144 count_pseudo (hard_reg_n_uses, j);
4145 });
32131a9c 4146 }
03acd8f8
BS
4147
4148 FREE_REG_SET (pseudos_counted);
32131a9c
RK
4149
4150 /* Prefer registers not so far used, for use in temporary loading.
4151 Among them, if REG_ALLOC_ORDER is defined, use that order.
4152 Otherwise, prefer registers not preserved by calls. */
4153
4154#ifdef REG_ALLOC_ORDER
4155 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4156 {
4157 int regno = reg_alloc_order[i];
4158
03acd8f8
BS
4159 if (hard_reg_n_uses[regno].uses == 0
4160 && ! TEST_HARD_REG_BIT (bad_spill_regs, regno))
32131a9c
RK
4161 potential_reload_regs[o++] = regno;
4162 }
4163#else
4164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4165 {
03acd8f8
BS
4166 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i]
4167 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
32131a9c
RK
4168 potential_reload_regs[o++] = i;
4169 }
4170 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4171 {
03acd8f8
BS
4172 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i]
4173 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
32131a9c
RK
4174 potential_reload_regs[o++] = i;
4175 }
4176#endif
4177
4178 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
4179 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
4180
4181 /* Now add the regs that are already used,
4182 preferring those used less often. The fixed and otherwise forbidden
4183 registers will be at the end of this list. */
4184
4185 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
03acd8f8
BS
4186 if (hard_reg_n_uses[i].uses != 0
4187 && ! TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4188 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4189 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4190 if (TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
32131a9c
RK
4191 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4192}
4193\f
4194/* Reload pseudo-registers into hard regs around each insn as needed.
4195 Additional register load insns are output before the insn that needs it
4196 and perhaps store insns after insns that modify the reloaded pseudo reg.
4197
4198 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 4199 which registers are already available in reload registers.
32131a9c
RK
4200 We update these for the reloads that we perform,
4201 as the insns are scanned. */
4202
4203static void
7609e720 4204reload_as_needed (live_known)
32131a9c
RK
4205 int live_known;
4206{
7609e720 4207 struct insn_chain *chain;
973838fd 4208#if defined (AUTO_INC_DEC) || defined (INSN_CLOBBERS_REGNO_P)
32131a9c 4209 register int i;
973838fd 4210#endif
32131a9c 4211 rtx x;
32131a9c 4212
4c9a05bc
RK
4213 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
4214 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
32131a9c 4215 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 4216 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
32131a9c 4217 reg_has_output_reload = (char *) alloca (max_regno);
e6e52be0 4218 CLEAR_HARD_REG_SET (reg_reloaded_valid);
32131a9c 4219
1f3b1e1a 4220 set_initial_elim_offsets ();
32131a9c 4221
7609e720 4222 for (chain = reload_insn_chain; chain; chain = chain->next)
32131a9c 4223 {
03acd8f8 4224 rtx prev;
7609e720
BS
4225 rtx insn = chain->insn;
4226 rtx old_next = NEXT_INSN (insn);
32131a9c
RK
4227
4228 /* If we pass a label, copy the offsets from the label information
4229 into the current offsets of each elimination. */
4230 if (GET_CODE (insn) == CODE_LABEL)
1f3b1e1a 4231 set_offsets_for_label (insn);
32131a9c
RK
4232
4233 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4234 {
0639444f 4235 rtx oldpat = PATTERN (insn);
32131a9c 4236
2758481d
RS
4237 /* If this is a USE and CLOBBER of a MEM, ensure that any
4238 references to eliminable registers have been removed. */
4239
4240 if ((GET_CODE (PATTERN (insn)) == USE
4241 || GET_CODE (PATTERN (insn)) == CLOBBER)
4242 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4243 XEXP (XEXP (PATTERN (insn), 0), 0)
4244 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
29ae5012 4245 GET_MODE (XEXP (PATTERN (insn), 0)),
1914f5da 4246 NULL_RTX);
2758481d 4247
32131a9c
RK
4248 /* If we need to do register elimination processing, do so.
4249 This might delete the insn, in which case we are done. */
2b49ee39 4250 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
32131a9c
RK
4251 {
4252 eliminate_regs_in_insn (insn, 1);
4253 if (GET_CODE (insn) == NOTE)
cb2afeb3
R
4254 {
4255 update_eliminable_offsets ();
4256 continue;
4257 }
32131a9c
RK
4258 }
4259
7609e720
BS
4260 /* If need_elim is nonzero but need_reload is zero, one might think
4261 that we could simply set n_reloads to 0. However, find_reloads
4262 could have done some manipulation of the insn (such as swapping
4263 commutative operands), and these manipulations are lost during
4264 the first pass for every insn that needs register elimination.
4265 So the actions of find_reloads must be redone here. */
4266
03acd8f8
BS
4267 if (! chain->need_elim && ! chain->need_reload
4268 && ! chain->need_operand_change)
32131a9c
RK
4269 n_reloads = 0;
4270 /* First find the pseudo regs that must be reloaded for this insn.
4271 This info is returned in the tables reload_... (see reload.h).
4272 Also modify the body of INSN by substituting RELOAD
4273 rtx's for those pseudo regs. */
4274 else
4275 {
4276 bzero (reg_has_output_reload, max_regno);
4277 CLEAR_HARD_REG_SET (reg_is_output_reload);
4278
4279 find_reloads (insn, 1, spill_indirect_levels, live_known,
4280 spill_reg_order);
4281 }
4282
dd6acd1b 4283 if (num_eliminable && chain->need_elim)
cb2afeb3
R
4284 update_eliminable_offsets ();
4285
32131a9c
RK
4286 if (n_reloads > 0)
4287 {
cb2afeb3 4288 rtx next = NEXT_INSN (insn);
3c3eeea6 4289 rtx p;
32131a9c 4290
cb2afeb3
R
4291 prev = PREV_INSN (insn);
4292
32131a9c
RK
4293 /* Now compute which reload regs to reload them into. Perhaps
4294 reusing reload regs from previous insns, or else output
4295 load insns to reload them. Maybe output store insns too.
4296 Record the choices of reload reg in reload_reg_rtx. */
03acd8f8 4297 choose_reload_regs (chain);
32131a9c 4298
05d10675 4299 /* Merge any reloads that we didn't combine for fear of
546b63fb
RK
4300 increasing the number of spill registers needed but now
4301 discover can be safely merged. */
f95182a4
ILT
4302 if (SMALL_REGISTER_CLASSES)
4303 merge_assigned_reloads (insn);
546b63fb 4304
32131a9c
RK
4305 /* Generate the insns to reload operands into or out of
4306 their reload regs. */
7609e720 4307 emit_reload_insns (chain);
32131a9c
RK
4308
4309 /* Substitute the chosen reload regs from reload_reg_rtx
4310 into the insn's body (or perhaps into the bodies of other
4311 load and store insn that we just made for reloading
4312 and that we moved the structure into). */
4313 subst_reloads ();
3c3eeea6
RK
4314
4315 /* If this was an ASM, make sure that all the reload insns
4316 we have generated are valid. If not, give an error
4317 and delete them. */
4318
4319 if (asm_noperands (PATTERN (insn)) >= 0)
4320 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4321 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4322 && (recog_memoized (p) < 0
0eadeb15 4323 || (extract_insn (p), ! constrain_operands (1))))
3c3eeea6
RK
4324 {
4325 error_for_asm (insn,
4326 "`asm' operand requires impossible reload");
4327 PUT_CODE (p, NOTE);
4328 NOTE_SOURCE_FILE (p) = 0;
4329 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4330 }
32131a9c
RK
4331 }
4332 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4333 is no longer validly lying around to save a future reload.
4334 Note that this does not detect pseudos that were reloaded
4335 for this insn in order to be stored in
4336 (obeying register constraints). That is correct; such reload
4337 registers ARE still valid. */
0639444f 4338 note_stores (oldpat, forget_old_reloads_1);
32131a9c
RK
4339
4340 /* There may have been CLOBBER insns placed after INSN. So scan
4341 between INSN and NEXT and use them to forget old reloads. */
7609e720 4342 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
32131a9c
RK
4343 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4344 note_stores (PATTERN (x), forget_old_reloads_1);
4345
4346#ifdef AUTO_INC_DEC
cb2afeb3
R
4347 /* Likewise for regs altered by auto-increment in this insn.
4348 REG_INC notes have been changed by reloading:
4349 find_reloads_address_1 records substitutions for them,
4350 which have been performed by subst_reloads above. */
4351 for (i = n_reloads - 1; i >= 0; i--)
4352 {
4353 rtx in_reg = reload_in_reg[i];
4354 if (in_reg)
4355 {
4356 enum rtx_code code = GET_CODE (in_reg);
4357 /* PRE_INC / PRE_DEC will have the reload register ending up
4358 with the same value as the stack slot, but that doesn't
4359 hold true for POST_INC / POST_DEC. Either we have to
4360 convert the memory access to a true POST_INC / POST_DEC,
4361 or we can't use the reload register for inheritance. */
4362 if ((code == POST_INC || code == POST_DEC)
4363 && TEST_HARD_REG_BIT (reg_reloaded_valid,
04bbb0c5
JW
4364 REGNO (reload_reg_rtx[i]))
4365 /* Make sure it is the inc/dec pseudo, and not
4366 some other (e.g. output operand) pseudo. */
4367 && (reg_reloaded_contents[REGNO (reload_reg_rtx[i])]
4368 == REGNO (XEXP (in_reg, 0))))
05d10675 4369
cb2afeb3
R
4370 {
4371 rtx reload_reg = reload_reg_rtx[i];
4372 enum machine_mode mode = GET_MODE (reload_reg);
4373 int n = 0;
4374 rtx p;
4375
4376 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4377 {
4378 /* We really want to ignore REG_INC notes here, so
4379 use PATTERN (p) as argument to reg_set_p . */
4380 if (reg_set_p (reload_reg, PATTERN (p)))
4381 break;
4382 n = count_occurrences (PATTERN (p), reload_reg);
4383 if (! n)
4384 continue;
4385 if (n == 1)
f67c2384
JL
4386 {
4387 n = validate_replace_rtx (reload_reg,
4388 gen_rtx (code, mode,
4389 reload_reg),
4390 p);
4391
4392 /* We must also verify that the constraints
4393 are met after the replacement. */
4394 extract_insn (p);
4395 if (n)
4396 n = constrain_operands (1);
4397 else
4398 break;
4399
4400 /* If the constraints were not met, then
4401 undo the replacement. */
4402 if (!n)
4403 {
4404 validate_replace_rtx (gen_rtx (code, mode,
4405 reload_reg),
4406 reload_reg, p);
4407 break;
4408 }
05d10675 4409
f67c2384 4410 }
cb2afeb3
R
4411 break;
4412 }
4413 if (n == 1)
02eb1393
R
4414 {
4415 REG_NOTES (p)
4416 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4417 REG_NOTES (p));
4418 /* Mark this as having an output reload so that the
4419 REG_INC processing code below won't invalidate
4420 the reload for inheritance. */
4421 SET_HARD_REG_BIT (reg_is_output_reload,
4422 REGNO (reload_reg));
4423 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4424 }
cb2afeb3
R
4425 else
4426 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX);
4427 }
02eb1393
R
4428 else if ((code == PRE_INC || code == PRE_DEC)
4429 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4430 REGNO (reload_reg_rtx[i]))
4431 /* Make sure it is the inc/dec pseudo, and not
4432 some other (e.g. output operand) pseudo. */
4433 && (reg_reloaded_contents[REGNO (reload_reg_rtx[i])]
4434 == REGNO (XEXP (in_reg, 0))))
4435 {
4436 SET_HARD_REG_BIT (reg_is_output_reload,
4437 REGNO (reload_reg_rtx[i]));
4438 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4439 }
cb2afeb3
R
4440 }
4441 }
02eb1393
R
4442 /* If a pseudo that got a hard register is auto-incremented,
4443 we must purge records of copying it into pseudos without
4444 hard registers. */
32131a9c
RK
4445 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4446 if (REG_NOTE_KIND (x) == REG_INC)
4447 {
4448 /* See if this pseudo reg was reloaded in this insn.
4449 If so, its last-reload info is still valid
4450 because it is based on this insn's reload. */
4451 for (i = 0; i < n_reloads; i++)
4452 if (reload_out[i] == XEXP (x, 0))
4453 break;
4454
08fb99fa 4455 if (i == n_reloads)
9a881562 4456 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
4457 }
4458#endif
4459 }
4460 /* A reload reg's contents are unknown after a label. */
4461 if (GET_CODE (insn) == CODE_LABEL)
e6e52be0 4462 CLEAR_HARD_REG_SET (reg_reloaded_valid);
32131a9c
RK
4463
4464 /* Don't assume a reload reg is still good after a call insn
4465 if it is a call-used reg. */
546b63fb 4466 else if (GET_CODE (insn) == CALL_INSN)
e6e52be0 4467 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
32131a9c
RK
4468
4469 /* In case registers overlap, allow certain insns to invalidate
4470 particular hard registers. */
4471
4472#ifdef INSN_CLOBBERS_REGNO_P
e6e52be0
R
4473 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4474 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4475 && INSN_CLOBBERS_REGNO_P (insn, i))
4476 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
32131a9c
RK
4477#endif
4478
32131a9c
RK
4479#ifdef USE_C_ALLOCA
4480 alloca (0);
4481#endif
4482 }
4483}
4484
4485/* Discard all record of any value reloaded from X,
4486 or reloaded in X from someplace else;
4487 unless X is an output reload reg of the current insn.
4488
4489 X may be a hard reg (the reload reg)
4490 or it may be a pseudo reg that was reloaded from. */
4491
4492static void
9a881562 4493forget_old_reloads_1 (x, ignored)
32131a9c 4494 rtx x;
487a6e06 4495 rtx ignored ATTRIBUTE_UNUSED;
32131a9c
RK
4496{
4497 register int regno;
4498 int nr;
0a2e51a9
RS
4499 int offset = 0;
4500
4501 /* note_stores does give us subregs of hard regs. */
4502 while (GET_CODE (x) == SUBREG)
4503 {
4504 offset += SUBREG_WORD (x);
4505 x = SUBREG_REG (x);
4506 }
32131a9c
RK
4507
4508 if (GET_CODE (x) != REG)
4509 return;
4510
0a2e51a9 4511 regno = REGNO (x) + offset;
32131a9c
RK
4512
4513 if (regno >= FIRST_PSEUDO_REGISTER)
4514 nr = 1;
4515 else
4516 {
4517 int i;
4518 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4519 /* Storing into a spilled-reg invalidates its contents.
4520 This can happen if a block-local pseudo is allocated to that reg
4521 and it wasn't spilled because this block's total need is 0.
4522 Then some insn might have an optional reload and use this reg. */
4523 for (i = 0; i < nr; i++)
e6e52be0
R
4524 /* But don't do this if the reg actually serves as an output
4525 reload reg in the current instruction. */
4526 if (n_reloads == 0
4527 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4528 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
32131a9c
RK
4529 }
4530
4531 /* Since value of X has changed,
4532 forget any value previously copied from it. */
4533
4534 while (nr-- > 0)
4535 /* But don't forget a copy if this is the output reload
4536 that establishes the copy's validity. */
4537 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4538 reg_last_reload_reg[regno + nr] = 0;
4539}
4540\f
4541/* For each reload, the mode of the reload register. */
4542static enum machine_mode reload_mode[MAX_RELOADS];
4543
4544/* For each reload, the largest number of registers it will require. */
4545static int reload_nregs[MAX_RELOADS];
4546
4547/* Comparison function for qsort to decide which of two reloads
4548 should be handled first. *P1 and *P2 are the reload numbers. */
4549
4550static int
788a0818
RK
4551reload_reg_class_lower (r1p, r2p)
4552 const GENERIC_PTR r1p;
4553 const GENERIC_PTR r2p;
32131a9c 4554{
788a0818 4555 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
32131a9c 4556 register int t;
a8fdc208 4557
32131a9c
RK
4558 /* Consider required reloads before optional ones. */
4559 t = reload_optional[r1] - reload_optional[r2];
4560 if (t != 0)
4561 return t;
4562
4563 /* Count all solitary classes before non-solitary ones. */
4564 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4565 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4566 if (t != 0)
4567 return t;
4568
4569 /* Aside from solitaires, consider all multi-reg groups first. */
4570 t = reload_nregs[r2] - reload_nregs[r1];
4571 if (t != 0)
4572 return t;
4573
4574 /* Consider reloads in order of increasing reg-class number. */
4575 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4576 if (t != 0)
4577 return t;
4578
4579 /* If reloads are equally urgent, sort by reload number,
4580 so that the results of qsort leave nothing to chance. */
4581 return r1 - r2;
4582}
4583\f
4584/* The following HARD_REG_SETs indicate when each hard register is
4585 used for a reload of various parts of the current insn. */
4586
4587/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4588static HARD_REG_SET reload_reg_used;
546b63fb
RK
4589/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4590static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
47c8cf91
ILT
4591/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4592static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
4593/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4594static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
47c8cf91
ILT
4595/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4596static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
4597/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4598static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4599/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4600static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4601/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4602static HARD_REG_SET reload_reg_used_in_op_addr;
893bc853
RK
4603/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4604static HARD_REG_SET reload_reg_used_in_op_addr_reload;
546b63fb
RK
4605/* If reg is in use for a RELOAD_FOR_INSN reload. */
4606static HARD_REG_SET reload_reg_used_in_insn;
4607/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4608static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4609
4610/* If reg is in use as a reload reg for any sort of reload. */
4611static HARD_REG_SET reload_reg_used_at_all;
4612
be7ae2a4
RK
4613/* If reg is use as an inherited reload. We just mark the first register
4614 in the group. */
4615static HARD_REG_SET reload_reg_used_for_inherit;
4616
f1db3576
JL
4617/* Records which hard regs are used in any way, either as explicit use or
4618 by being allocated to a pseudo during any point of the current insn. */
4619static HARD_REG_SET reg_used_in_insn;
297927a8 4620
546b63fb
RK
4621/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4622 TYPE. MODE is used to indicate how many consecutive regs are
4623 actually used. */
32131a9c
RK
4624
4625static void
546b63fb 4626mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4627 int regno;
546b63fb
RK
4628 int opnum;
4629 enum reload_type type;
32131a9c
RK
4630 enum machine_mode mode;
4631{
4632 int nregs = HARD_REGNO_NREGS (regno, mode);
4633 int i;
4634
4635 for (i = regno; i < nregs + regno; i++)
4636 {
546b63fb 4637 switch (type)
32131a9c
RK
4638 {
4639 case RELOAD_OTHER:
4640 SET_HARD_REG_BIT (reload_reg_used, i);
4641 break;
4642
546b63fb
RK
4643 case RELOAD_FOR_INPUT_ADDRESS:
4644 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4645 break;
4646
47c8cf91
ILT
4647 case RELOAD_FOR_INPADDR_ADDRESS:
4648 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4649 break;
4650
546b63fb
RK
4651 case RELOAD_FOR_OUTPUT_ADDRESS:
4652 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4653 break;
4654
47c8cf91
ILT
4655 case RELOAD_FOR_OUTADDR_ADDRESS:
4656 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4657 break;
4658
32131a9c
RK
4659 case RELOAD_FOR_OPERAND_ADDRESS:
4660 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4661 break;
4662
893bc853
RK
4663 case RELOAD_FOR_OPADDR_ADDR:
4664 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4665 break;
4666
546b63fb
RK
4667 case RELOAD_FOR_OTHER_ADDRESS:
4668 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4669 break;
4670
32131a9c 4671 case RELOAD_FOR_INPUT:
546b63fb 4672 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4673 break;
4674
4675 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4676 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4677 break;
4678
4679 case RELOAD_FOR_INSN:
4680 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4681 break;
4682 }
4683
4684 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4685 }
4686}
4687
be7ae2a4
RK
4688/* Similarly, but show REGNO is no longer in use for a reload. */
4689
4690static void
4691clear_reload_reg_in_use (regno, opnum, type, mode)
4692 int regno;
4693 int opnum;
4694 enum reload_type type;
4695 enum machine_mode mode;
4696{
4697 int nregs = HARD_REGNO_NREGS (regno, mode);
cb2afeb3 4698 int start_regno, end_regno;
be7ae2a4 4699 int i;
cb2afeb3
R
4700 /* A complication is that for some reload types, inheritance might
4701 allow multiple reloads of the same types to share a reload register.
4702 We set check_opnum if we have to check only reloads with the same
4703 operand number, and check_any if we have to check all reloads. */
4704 int check_opnum = 0;
4705 int check_any = 0;
4706 HARD_REG_SET *used_in_set;
be7ae2a4 4707
cb2afeb3 4708 switch (type)
be7ae2a4 4709 {
cb2afeb3
R
4710 case RELOAD_OTHER:
4711 used_in_set = &reload_reg_used;
4712 break;
be7ae2a4 4713
cb2afeb3
R
4714 case RELOAD_FOR_INPUT_ADDRESS:
4715 used_in_set = &reload_reg_used_in_input_addr[opnum];
4716 break;
be7ae2a4 4717
cb2afeb3
R
4718 case RELOAD_FOR_INPADDR_ADDRESS:
4719 check_opnum = 1;
4720 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4721 break;
47c8cf91 4722
cb2afeb3
R
4723 case RELOAD_FOR_OUTPUT_ADDRESS:
4724 used_in_set = &reload_reg_used_in_output_addr[opnum];
4725 break;
be7ae2a4 4726
cb2afeb3
R
4727 case RELOAD_FOR_OUTADDR_ADDRESS:
4728 check_opnum = 1;
4729 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4730 break;
47c8cf91 4731
cb2afeb3
R
4732 case RELOAD_FOR_OPERAND_ADDRESS:
4733 used_in_set = &reload_reg_used_in_op_addr;
4734 break;
be7ae2a4 4735
cb2afeb3
R
4736 case RELOAD_FOR_OPADDR_ADDR:
4737 check_any = 1;
4738 used_in_set = &reload_reg_used_in_op_addr_reload;
4739 break;
893bc853 4740
cb2afeb3
R
4741 case RELOAD_FOR_OTHER_ADDRESS:
4742 used_in_set = &reload_reg_used_in_other_addr;
4743 check_any = 1;
4744 break;
be7ae2a4 4745
cb2afeb3
R
4746 case RELOAD_FOR_INPUT:
4747 used_in_set = &reload_reg_used_in_input[opnum];
4748 break;
be7ae2a4 4749
cb2afeb3
R
4750 case RELOAD_FOR_OUTPUT:
4751 used_in_set = &reload_reg_used_in_output[opnum];
4752 break;
be7ae2a4 4753
cb2afeb3
R
4754 case RELOAD_FOR_INSN:
4755 used_in_set = &reload_reg_used_in_insn;
4756 break;
4757 default:
4758 abort ();
4759 }
4760 /* We resolve conflicts with remaining reloads of the same type by
4761 excluding the intervals of of reload registers by them from the
4762 interval of freed reload registers. Since we only keep track of
4763 one set of interval bounds, we might have to exclude somewhat
4764 more then what would be necessary if we used a HARD_REG_SET here.
4765 But this should only happen very infrequently, so there should
4766 be no reason to worry about it. */
05d10675 4767
cb2afeb3
R
4768 start_regno = regno;
4769 end_regno = regno + nregs;
4770 if (check_opnum || check_any)
4771 {
4772 for (i = n_reloads - 1; i >= 0; i--)
4773 {
4774 if (reload_when_needed[i] == type
4775 && (check_any || reload_opnum[i] == opnum)
4776 && reload_reg_rtx[i])
4777 {
4778 int conflict_start = true_regnum (reload_reg_rtx[i]);
4779 int conflict_end
4780 = (conflict_start
4781 + HARD_REGNO_NREGS (conflict_start, reload_mode[i]));
4782
4783 /* If there is an overlap with the first to-be-freed register,
4784 adjust the interval start. */
4785 if (conflict_start <= start_regno && conflict_end > start_regno)
4786 start_regno = conflict_end;
4787 /* Otherwise, if there is a conflict with one of the other
4788 to-be-freed registers, adjust the interval end. */
4789 if (conflict_start > start_regno && conflict_start < end_regno)
4790 end_regno = conflict_start;
4791 }
be7ae2a4
RK
4792 }
4793 }
cb2afeb3
R
4794 for (i = start_regno; i < end_regno; i++)
4795 CLEAR_HARD_REG_BIT (*used_in_set, i);
be7ae2a4
RK
4796}
4797
32131a9c 4798/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4799 specified by OPNUM and TYPE. */
32131a9c
RK
4800
4801static int
546b63fb 4802reload_reg_free_p (regno, opnum, type)
32131a9c 4803 int regno;
546b63fb
RK
4804 int opnum;
4805 enum reload_type type;
32131a9c 4806{
546b63fb
RK
4807 int i;
4808
2edc8d65
RK
4809 /* In use for a RELOAD_OTHER means it's not available for anything. */
4810 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4811 return 0;
546b63fb
RK
4812
4813 switch (type)
32131a9c
RK
4814 {
4815 case RELOAD_OTHER:
2edc8d65
RK
4816 /* In use for anything means we can't use it for RELOAD_OTHER. */
4817 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
224f1d71
RK
4818 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4819 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4820 return 0;
4821
4822 for (i = 0; i < reload_n_operands; i++)
4823 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4824 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
224f1d71 4825 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4826 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
224f1d71
RK
4827 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4828 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4829 return 0;
4830
4831 return 1;
32131a9c 4832
32131a9c 4833 case RELOAD_FOR_INPUT:
546b63fb
RK
4834 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4835 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4836 return 0;
4837
893bc853
RK
4838 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4839 return 0;
4840
546b63fb
RK
4841 /* If it is used for some other input, can't use it. */
4842 for (i = 0; i < reload_n_operands; i++)
4843 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4844 return 0;
4845
4846 /* If it is used in a later operand's address, can't use it. */
4847 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
4848 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4849 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
4850 return 0;
4851
4852 return 1;
4853
4854 case RELOAD_FOR_INPUT_ADDRESS:
4855 /* Can't use a register if it is used for an input address for this
4856 operand or used as an input in an earlier one. */
47c8cf91
ILT
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4858 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4859 return 0;
4860
4861 for (i = 0; i < opnum; i++)
4862 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4863 return 0;
4864
4865 return 1;
4866
4867 case RELOAD_FOR_INPADDR_ADDRESS:
4868 /* Can't use a register if it is used for an input address
05d10675
BS
4869 for this operand or used as an input in an earlier
4870 one. */
47c8cf91 4871 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
546b63fb
RK
4872 return 0;
4873
4874 for (i = 0; i < opnum; i++)
4875 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4876 return 0;
4877
4878 return 1;
4879
4880 case RELOAD_FOR_OUTPUT_ADDRESS:
4881 /* Can't use a register if it is used for an output address for this
4882 operand or used as an output in this or a later operand. */
4883 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4884 return 0;
4885
4886 for (i = opnum; i < reload_n_operands; i++)
4887 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4888 return 0;
4889
4890 return 1;
4891
47c8cf91
ILT
4892 case RELOAD_FOR_OUTADDR_ADDRESS:
4893 /* Can't use a register if it is used for an output address
05d10675
BS
4894 for this operand or used as an output in this or a
4895 later operand. */
47c8cf91
ILT
4896 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4897 return 0;
4898
4899 for (i = opnum; i < reload_n_operands; i++)
4900 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4901 return 0;
4902
4903 return 1;
4904
32131a9c 4905 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4906 for (i = 0; i < reload_n_operands; i++)
4907 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4908 return 0;
4909
4910 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4911 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4912
893bc853
RK
4913 case RELOAD_FOR_OPADDR_ADDR:
4914 for (i = 0; i < reload_n_operands; i++)
05d10675
BS
4915 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4916 return 0;
893bc853 4917
a94ce333 4918 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
893bc853 4919
32131a9c 4920 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4921 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4922 outputs, or an operand address for this or an earlier output. */
4923 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4924 return 0;
4925
4926 for (i = 0; i < reload_n_operands; i++)
4927 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4928 return 0;
4929
4930 for (i = 0; i <= opnum; i++)
47c8cf91
ILT
4931 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4932 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
546b63fb
RK
4933 return 0;
4934
4935 return 1;
4936
4937 case RELOAD_FOR_INSN:
4938 for (i = 0; i < reload_n_operands; i++)
4939 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4940 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4941 return 0;
4942
4943 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4944 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4945
4946 case RELOAD_FOR_OTHER_ADDRESS:
4947 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4948 }
4949 abort ();
4950}
4951
32131a9c 4952/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4953 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4954 is still available in REGNO at the end of the insn.
4955
4956 We can assume that the reload reg was already tested for availability
4957 at the time it is needed, and we should not check this again,
4958 in case the reg has already been marked in use. */
4959
4960static int
546b63fb 4961reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4962 int regno;
546b63fb
RK
4963 int opnum;
4964 enum reload_type type;
32131a9c 4965{
546b63fb
RK
4966 int i;
4967
4968 switch (type)
32131a9c
RK
4969 {
4970 case RELOAD_OTHER:
4971 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4972 its value must reach the end. */
4973 return 1;
4974
4975 /* If this use is for part of the insn,
05d10675 4976 its value reaches if no subsequent part uses the same register.
546b63fb
RK
4977 Just like the above function, don't try to do this with lots
4978 of fallthroughs. */
4979
4980 case RELOAD_FOR_OTHER_ADDRESS:
4981 /* Here we check for everything else, since these don't conflict
4982 with anything else and everything comes later. */
4983
4984 for (i = 0; i < reload_n_operands; i++)
4985 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 4986 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
4987 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4988 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 4989 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
4990 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4991 return 0;
4992
4993 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4994 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4995 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4996
4997 case RELOAD_FOR_INPUT_ADDRESS:
47c8cf91 4998 case RELOAD_FOR_INPADDR_ADDRESS:
546b63fb
RK
4999 /* Similar, except that we check only for this and subsequent inputs
5000 and the address of only subsequent inputs and we do not need
5001 to check for RELOAD_OTHER objects since they are known not to
5002 conflict. */
5003
5004 for (i = opnum; i < reload_n_operands; i++)
5005 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5006 return 0;
5007
5008 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
5009 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5010 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
546b63fb
RK
5011 return 0;
5012
5013 for (i = 0; i < reload_n_operands; i++)
5014 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 5015 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
5016 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5017 return 0;
5018
893bc853
RK
5019 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5020 return 0;
5021
546b63fb
RK
5022 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5023 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5024
32131a9c 5025 case RELOAD_FOR_INPUT:
546b63fb 5026 /* Similar to input address, except we start at the next operand for
05d10675 5027 both input and input address and we do not check for
546b63fb
RK
5028 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5029 would conflict. */
5030
5031 for (i = opnum + 1; i < reload_n_operands; i++)
5032 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
47c8cf91 5033 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
546b63fb
RK
5034 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5035 return 0;
5036
0f41302f 5037 /* ... fall through ... */
546b63fb 5038
32131a9c 5039 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5040 /* Check outputs and their addresses. */
5041
5042 for (i = 0; i < reload_n_operands; i++)
5043 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 5044 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
546b63fb
RK
5045 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5046 return 0;
5047
5048 return 1;
5049
893bc853
RK
5050 case RELOAD_FOR_OPADDR_ADDR:
5051 for (i = 0; i < reload_n_operands; i++)
5052 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
47c8cf91 5053 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
893bc853
RK
5054 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5055 return 0;
5056
a94ce333
JW
5057 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5058 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
893bc853 5059
546b63fb 5060 case RELOAD_FOR_INSN:
893bc853 5061 /* These conflict with other outputs with RELOAD_OTHER. So
546b63fb
RK
5062 we need only check for output addresses. */
5063
5064 opnum = -1;
5065
0f41302f 5066 /* ... fall through ... */
546b63fb 5067
32131a9c 5068 case RELOAD_FOR_OUTPUT:
546b63fb 5069 case RELOAD_FOR_OUTPUT_ADDRESS:
47c8cf91 5070 case RELOAD_FOR_OUTADDR_ADDRESS:
546b63fb
RK
5071 /* We already know these can't conflict with a later output. So the
5072 only thing to check are later output addresses. */
5073 for (i = opnum + 1; i < reload_n_operands; i++)
47c8cf91
ILT
5074 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5075 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
546b63fb
RK
5076 return 0;
5077
32131a9c
RK
5078 return 1;
5079 }
546b63fb 5080
32131a9c
RK
5081 abort ();
5082}
5083\f
351aa1c1
RK
5084/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5085 Return 0 otherwise.
5086
5087 This function uses the same algorithm as reload_reg_free_p above. */
5088
f5963e61 5089int
351aa1c1
RK
5090reloads_conflict (r1, r2)
5091 int r1, r2;
5092{
5093 enum reload_type r1_type = reload_when_needed[r1];
5094 enum reload_type r2_type = reload_when_needed[r2];
5095 int r1_opnum = reload_opnum[r1];
5096 int r2_opnum = reload_opnum[r2];
5097
2edc8d65
RK
5098 /* RELOAD_OTHER conflicts with everything. */
5099 if (r2_type == RELOAD_OTHER)
351aa1c1
RK
5100 return 1;
5101
5102 /* Otherwise, check conflicts differently for each type. */
5103
5104 switch (r1_type)
5105 {
5106 case RELOAD_FOR_INPUT:
05d10675 5107 return (r2_type == RELOAD_FOR_INSN
351aa1c1 5108 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
893bc853 5109 || r2_type == RELOAD_FOR_OPADDR_ADDR
351aa1c1 5110 || r2_type == RELOAD_FOR_INPUT
47c8cf91
ILT
5111 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5112 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5113 && r2_opnum > r1_opnum));
351aa1c1
RK
5114
5115 case RELOAD_FOR_INPUT_ADDRESS:
5116 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5117 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5118
47c8cf91
ILT
5119 case RELOAD_FOR_INPADDR_ADDRESS:
5120 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5121 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5122
351aa1c1
RK
5123 case RELOAD_FOR_OUTPUT_ADDRESS:
5124 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5125 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5126
47c8cf91
ILT
5127 case RELOAD_FOR_OUTADDR_ADDRESS:
5128 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5129 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5130
351aa1c1
RK
5131 case RELOAD_FOR_OPERAND_ADDRESS:
5132 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
a94ce333 5133 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
351aa1c1 5134
893bc853 5135 case RELOAD_FOR_OPADDR_ADDR:
05d10675 5136 return (r2_type == RELOAD_FOR_INPUT
a94ce333 5137 || r2_type == RELOAD_FOR_OPADDR_ADDR);
893bc853 5138
351aa1c1
RK
5139 case RELOAD_FOR_OUTPUT:
5140 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
47c8cf91
ILT
5141 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5142 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
351aa1c1
RK
5143 && r2_opnum >= r1_opnum));
5144
5145 case RELOAD_FOR_INSN:
5146 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5147 || r2_type == RELOAD_FOR_INSN
5148 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5149
5150 case RELOAD_FOR_OTHER_ADDRESS:
5151 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5152
adab4fc5 5153 case RELOAD_OTHER:
2edc8d65 5154 return 1;
adab4fc5 5155
351aa1c1
RK
5156 default:
5157 abort ();
5158 }
5159}
5160\f
32131a9c
RK
5161/* Vector of reload-numbers showing the order in which the reloads should
5162 be processed. */
5163short reload_order[MAX_RELOADS];
5164
5165/* Indexed by reload number, 1 if incoming value
5166 inherited from previous insns. */
5167char reload_inherited[MAX_RELOADS];
5168
5169/* For an inherited reload, this is the insn the reload was inherited from,
5170 if we know it. Otherwise, this is 0. */
5171rtx reload_inheritance_insn[MAX_RELOADS];
5172
5173/* If non-zero, this is a place to get the value of the reload,
5174 rather than using reload_in. */
5175rtx reload_override_in[MAX_RELOADS];
5176
e6e52be0
R
5177/* For each reload, the hard register number of the register used,
5178 or -1 if we did not need a register for this reload. */
32131a9c
RK
5179int reload_spill_index[MAX_RELOADS];
5180
6e684430
R
5181/* Return 1 if the value in reload reg REGNO, as used by a reload
5182 needed for the part of the insn specified by OPNUM and TYPE,
5183 may be used to load VALUE into it.
f5470689
R
5184
5185 Other read-only reloads with the same value do not conflict
5186 unless OUT is non-zero and these other reloads have to live while
5187 output reloads live.
dfe96118
R
5188 If OUT is CONST0_RTX, this is a special case: it means that the
5189 test should not be for using register REGNO as reload register, but
5190 for copying from register REGNO into the reload register.
f5470689
R
5191
5192 RELOADNUM is the number of the reload we want to load this value for;
5193 a reload does not conflict with itself.
5194
dfe96118
R
5195 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
5196 reloads that load an address for the very reload we are considering.
5197
6e684430
R
5198 The caller has to make sure that there is no conflict with the return
5199 register. */
5200static int
dfe96118 5201reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum,
05d10675 5202 ignore_address_reloads)
6e684430
R
5203 int regno;
5204 int opnum;
5205 enum reload_type type;
f5470689
R
5206 rtx value, out;
5207 int reloadnum;
5828374f 5208 int ignore_address_reloads;
6e684430
R
5209{
5210 int time1;
5211 int i;
dfe96118
R
5212 int copy = 0;
5213
5214 if (out == const0_rtx)
5215 {
5216 copy = 1;
5217 out = NULL_RTX;
5218 }
6e684430
R
5219
5220 /* We use some pseudo 'time' value to check if the lifetimes of the
5221 new register use would overlap with the one of a previous reload
5222 that is not read-only or uses a different value.
5223 The 'time' used doesn't have to be linear in any shape or form, just
5224 monotonic.
5225 Some reload types use different 'buckets' for each operand.
5226 So there are MAX_RECOG_OPERANDS different time values for each
cecbf6e2
R
5227 such reload type.
5228 We compute TIME1 as the time when the register for the prospective
5229 new reload ceases to be live, and TIME2 for each existing
5230 reload as the time when that the reload register of that reload
5231 becomes live.
5232 Where there is little to be gained by exact lifetime calculations,
5233 we just make conservative assumptions, i.e. a longer lifetime;
5234 this is done in the 'default:' cases. */
6e684430
R
5235 switch (type)
5236 {
5237 case RELOAD_FOR_OTHER_ADDRESS:
5238 time1 = 0;
5239 break;
dfe96118
R
5240 case RELOAD_OTHER:
5241 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
5242 break;
05d10675
BS
5243 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5244 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5245 respectively, to the time values for these, we get distinct time
5246 values. To get distinct time values for each operand, we have to
5247 multiply opnum by at least three. We round that up to four because
5248 multiply by four is often cheaper. */
6e684430 5249 case RELOAD_FOR_INPADDR_ADDRESS:
dfe96118 5250 time1 = opnum * 4 + 2;
6e684430
R
5251 break;
5252 case RELOAD_FOR_INPUT_ADDRESS:
dfe96118
R
5253 time1 = opnum * 4 + 3;
5254 break;
5255 case RELOAD_FOR_INPUT:
5256 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
5257 executes (inclusive). */
5258 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
6e684430 5259 break;
cb2afeb3 5260 case RELOAD_FOR_OPADDR_ADDR:
05d10675
BS
5261 /* opnum * 4 + 4
5262 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
cb2afeb3
R
5263 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5264 break;
5265 case RELOAD_FOR_OPERAND_ADDRESS:
5266 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5267 is executed. */
dfe96118
R
5268 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
5269 break;
5270 case RELOAD_FOR_OUTADDR_ADDRESS:
5271 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
6e684430 5272 break;
6e684430 5273 case RELOAD_FOR_OUTPUT_ADDRESS:
dfe96118 5274 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
6e684430
R
5275 break;
5276 default:
dfe96118 5277 time1 = MAX_RECOG_OPERANDS * 5 + 5;
6e684430
R
5278 }
5279
5280 for (i = 0; i < n_reloads; i++)
5281 {
5282 rtx reg = reload_reg_rtx[i];
5283 if (reg && GET_CODE (reg) == REG
5284 && ((unsigned) regno - true_regnum (reg)
83e0821b 5285 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
f5470689 5286 && i != reloadnum)
6e684430 5287 {
f5470689 5288 if (! reload_in[i] || ! rtx_equal_p (reload_in[i], value)
25963977 5289 || reload_out[i] || out)
6e684430 5290 {
f5470689
R
5291 int time2;
5292 switch (reload_when_needed[i])
5293 {
5294 case RELOAD_FOR_OTHER_ADDRESS:
5295 time2 = 0;
5296 break;
5297 case RELOAD_FOR_INPADDR_ADDRESS:
cb2afeb3
R
5298 /* find_reloads makes sure that a
5299 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5300 by at most one - the first -
5301 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5302 address reload is inherited, the address address reload
5303 goes away, so we can ignore this conflict. */
dfe96118
R
5304 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
5305 && ignore_address_reloads
5306 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
5307 Then the address address is still needed to store
5308 back the new address. */
5309 && ! reload_out[reloadnum])
cb2afeb3 5310 continue;
dfe96118
R
5311 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
5312 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
5313 reloads go away. */
5314 if (type == RELOAD_FOR_INPUT && opnum == reload_opnum[i]
5315 && ignore_address_reloads
5316 /* Unless we are reloading an auto_inc expression. */
5317 && ! reload_out[reloadnum])
5318 continue;
5319 time2 = reload_opnum[i] * 4 + 2;
f5470689
R
5320 break;
5321 case RELOAD_FOR_INPUT_ADDRESS:
dfe96118
R
5322 if (type == RELOAD_FOR_INPUT && opnum == reload_opnum[i]
5323 && ignore_address_reloads
5324 && ! reload_out[reloadnum])
5325 continue;
5326 time2 = reload_opnum[i] * 4 + 3;
f5470689
R
5327 break;
5328 case RELOAD_FOR_INPUT:
dfe96118 5329 time2 = reload_opnum[i] * 4 + 4;
f5470689 5330 break;
05d10675
BS
5331 /* reload_opnum[i] * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
5332 == MAX_RECOG_OPERAND * 4 */
cb2afeb3 5333 case RELOAD_FOR_OPADDR_ADDR:
dfe96118
R
5334 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
5335 && ignore_address_reloads
5336 && ! reload_out[reloadnum])
cb2afeb3 5337 continue;
dfe96118 5338 time2 = MAX_RECOG_OPERANDS * 4 + 1;
cb2afeb3
R
5339 break;
5340 case RELOAD_FOR_OPERAND_ADDRESS:
dfe96118
R
5341 time2 = MAX_RECOG_OPERANDS * 4 + 2;
5342 break;
5343 case RELOAD_FOR_INSN:
5344 time2 = MAX_RECOG_OPERANDS * 4 + 3;
cb2afeb3 5345 break;
f5470689 5346 case RELOAD_FOR_OUTPUT:
05d10675
BS
5347 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5348 instruction is executed. */
dfe96118 5349 time2 = MAX_RECOG_OPERANDS * 4 + 4;
f5470689 5350 break;
05d10675
BS
5351 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
5352 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
5353 value. */
cb2afeb3 5354 case RELOAD_FOR_OUTADDR_ADDRESS:
dfe96118
R
5355 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
5356 && ignore_address_reloads
5357 && ! reload_out[reloadnum])
cb2afeb3 5358 continue;
dfe96118
R
5359 time2 = MAX_RECOG_OPERANDS * 4 + 4 + reload_opnum[i];
5360 break;
f5470689 5361 case RELOAD_FOR_OUTPUT_ADDRESS:
dfe96118 5362 time2 = MAX_RECOG_OPERANDS * 4 + 5 + reload_opnum[i];
f5470689
R
5363 break;
5364 case RELOAD_OTHER:
dfe96118
R
5365 /* If there is no conflict in the input part, handle this
5366 like an output reload. */
f5470689
R
5367 if (! reload_in[i] || rtx_equal_p (reload_in[i], value))
5368 {
dfe96118 5369 time2 = MAX_RECOG_OPERANDS * 4 + 4;
f5470689
R
5370 break;
5371 }
dfe96118
R
5372 time2 = 1;
5373 /* RELOAD_OTHER might be live beyond instruction execution,
5374 but this is not obvious when we set time2 = 1. So check
5375 here if there might be a problem with the new reload
5376 clobbering the register used by the RELOAD_OTHER. */
5377 if (out)
5378 return 0;
5379 break;
f5470689 5380 default:
dfe96118 5381 return 0;
f5470689 5382 }
25963977
R
5383 if ((time1 >= time2
5384 && (! reload_in[i] || reload_out[i]
5385 || ! rtx_equal_p (reload_in[i], value)))
701d55e8
R
5386 || (out && reload_out_reg[reloadnum]
5387 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
f5470689 5388 return 0;
6e684430 5389 }
6e684430
R
5390 }
5391 }
5392 return 1;
5393}
5394
32131a9c
RK
5395/* Find a spill register to use as a reload register for reload R.
5396 LAST_RELOAD is non-zero if this is the last reload for the insn being
5397 processed.
5398
5399 Set reload_reg_rtx[R] to the register allocated.
5400
5401 If NOERROR is nonzero, we return 1 if successful,
5402 or 0 if we couldn't find a spill reg and we didn't change anything. */
5403
5404static int
7609e720
BS
5405allocate_reload_reg (chain, r, last_reload, noerror)
5406 struct insn_chain *chain;
32131a9c 5407 int r;
32131a9c
RK
5408 int last_reload;
5409 int noerror;
5410{
7609e720 5411 rtx insn = chain->insn;
03acd8f8 5412 int i, pass, count, regno;
32131a9c 5413 rtx new;
32131a9c
RK
5414
5415 /* If we put this reload ahead, thinking it is a group,
5416 then insist on finding a group. Otherwise we can grab a
a8fdc208 5417 reg that some other reload needs.
32131a9c
RK
5418 (That can happen when we have a 68000 DATA_OR_FP_REG
5419 which is a group of data regs or one fp reg.)
5420 We need not be so restrictive if there are no more reloads
5421 for this insn.
5422
5423 ??? Really it would be nicer to have smarter handling
5424 for that kind of reg class, where a problem like this is normal.
5425 Perhaps those classes should be avoided for reloading
5426 by use of more alternatives. */
5427
5428 int force_group = reload_nregs[r] > 1 && ! last_reload;
5429
5430 /* If we want a single register and haven't yet found one,
5431 take any reg in the right class and not in use.
5432 If we want a consecutive group, here is where we look for it.
5433
5434 We use two passes so we can first look for reload regs to
5435 reuse, which are already in use for other reloads in this insn,
5436 and only then use additional registers.
5437 I think that maximizing reuse is needed to make sure we don't
5438 run out of reload regs. Suppose we have three reloads, and
5439 reloads A and B can share regs. These need two regs.
5440 Suppose A and B are given different regs.
5441 That leaves none for C. */
5442 for (pass = 0; pass < 2; pass++)
5443 {
5444 /* I is the index in spill_regs.
5445 We advance it round-robin between insns to use all spill regs
5446 equally, so that inherited reloads have a chance
a5339699
RK
5447 of leapfrogging each other. Don't do this, however, when we have
5448 group needs and failure would be fatal; if we only have a relatively
5449 small number of spill registers, and more than one of them has
05d10675 5450 group needs, then by starting in the middle, we may end up
a5339699
RK
5451 allocating the first one in such a way that we are not left with
5452 sufficient groups to handle the rest. */
5453
5454 if (noerror || ! force_group)
5455 i = last_spill_reg;
5456 else
5457 i = -1;
05d10675 5458
a5339699 5459 for (count = 0; count < n_spills; count++)
32131a9c
RK
5460 {
5461 int class = (int) reload_reg_class[r];
03acd8f8 5462 int regnum;
32131a9c 5463
03acd8f8
BS
5464 i++;
5465 if (i >= n_spills)
5466 i -= n_spills;
5467 regnum = spill_regs[i];
32131a9c 5468
03acd8f8 5469 if ((reload_reg_free_p (regnum, reload_opnum[r],
6e684430 5470 reload_when_needed[r])
f5470689 5471 || (reload_in[r]
05d10675
BS
5472 /* We check reload_reg_used to make sure we
5473 don't clobber the return register. */
03acd8f8
BS
5474 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5475 && reload_reg_free_for_value_p (regnum,
05d10675
BS
5476 reload_opnum[r],
5477 reload_when_needed[r],
5478 reload_in[r],
5479 reload_out[r], r, 1)))
03acd8f8
BS
5480 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5481 && HARD_REGNO_MODE_OK (regnum, reload_mode[r])
be7ae2a4
RK
5482 /* Look first for regs to share, then for unshared. But
5483 don't share regs used for inherited reloads; they are
5484 the ones we want to preserve. */
5485 && (pass
5486 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
03acd8f8 5487 regnum)
be7ae2a4 5488 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
03acd8f8 5489 regnum))))
32131a9c 5490 {
03acd8f8 5491 int nr = HARD_REGNO_NREGS (regnum, reload_mode[r]);
32131a9c
RK
5492 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5493 (on 68000) got us two FP regs. If NR is 1,
5494 we would reject both of them. */
5495 if (force_group)
5496 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5497 /* If we need only one reg, we have already won. */
5498 if (nr == 1)
5499 {
5500 /* But reject a single reg if we demand a group. */
5501 if (force_group)
5502 continue;
5503 break;
5504 }
5505 /* Otherwise check that as many consecutive regs as we need
5506 are available here.
5507 Also, don't use for a group registers that are
5508 needed for nongroups. */
03acd8f8 5509 if (! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regnum))
32131a9c
RK
5510 while (nr > 1)
5511 {
03acd8f8 5512 regno = regnum + nr - 1;
32131a9c
RK
5513 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5514 && spill_reg_order[regno] >= 0
546b63fb
RK
5515 && reload_reg_free_p (regno, reload_opnum[r],
5516 reload_when_needed[r])
03acd8f8 5517 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups,
32131a9c
RK
5518 regno)))
5519 break;
5520 nr--;
5521 }
5522 if (nr == 1)
5523 break;
5524 }
5525 }
5526
5527 /* If we found something on pass 1, omit pass 2. */
5528 if (count < n_spills)
5529 break;
5530 }
5531
5532 /* We should have found a spill register by now. */
5533 if (count == n_spills)
5534 {
5535 if (noerror)
5536 return 0;
139fc12e 5537 goto failure;
32131a9c
RK
5538 }
5539
be7ae2a4
RK
5540 /* I is the index in SPILL_REG_RTX of the reload register we are to
5541 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
5542
5543 new = spill_reg_rtx[i];
5544
5545 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4 5546 spill_reg_rtx[i] = new
38a448ca 5547 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
05d10675 5548
32131a9c
RK
5549 regno = true_regnum (new);
5550
5551 /* Detect when the reload reg can't hold the reload mode.
5552 This used to be one `if', but Sequent compiler can't handle that. */
5553 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5554 {
5555 enum machine_mode test_mode = VOIDmode;
5556 if (reload_in[r])
5557 test_mode = GET_MODE (reload_in[r]);
5558 /* If reload_in[r] has VOIDmode, it means we will load it
5559 in whatever mode the reload reg has: to wit, reload_mode[r].
5560 We have already tested that for validity. */
5561 /* Aside from that, we need to test that the expressions
5562 to reload from or into have modes which are valid for this
5563 reload register. Otherwise the reload insns would be invalid. */
5564 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5565 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5566 if (! (reload_out[r] != 0
5567 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
5568 {
5569 /* The reg is OK. */
5570 last_spill_reg = i;
5571
5572 /* Mark as in use for this insn the reload regs we use
5573 for this. */
5574 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5575 reload_when_needed[r], reload_mode[r]);
5576
5577 reload_reg_rtx[r] = new;
e6e52be0 5578 reload_spill_index[r] = spill_regs[i];
be7ae2a4
RK
5579 return 1;
5580 }
32131a9c
RK
5581 }
5582
5583 /* The reg is not OK. */
5584 if (noerror)
5585 return 0;
5586
139fc12e 5587 failure:
32131a9c
RK
5588 if (asm_noperands (PATTERN (insn)) < 0)
5589 /* It's the compiler's fault. */
a89b2cc4 5590 fatal_insn ("Could not find a spill register", insn);
32131a9c
RK
5591
5592 /* It's the user's fault; the operand's mode and constraint
5593 don't match. Disable this reload so we don't crash in final. */
5594 error_for_asm (insn,
5595 "`asm' operand constraint incompatible with operand size");
5596 reload_in[r] = 0;
5597 reload_out[r] = 0;
5598 reload_reg_rtx[r] = 0;
5599 reload_optional[r] = 1;
5600 reload_secondary_p[r] = 1;
5601
5602 return 1;
5603}
5604\f
5605/* Assign hard reg targets for the pseudo-registers we must reload
5606 into hard regs for this insn.
5607 Also output the instructions to copy them in and out of the hard regs.
5608
5609 For machines with register classes, we are responsible for
5610 finding a reload reg in the proper class. */
5611
5612static void
03acd8f8 5613choose_reload_regs (chain)
7609e720 5614 struct insn_chain *chain;
32131a9c 5615{
7609e720 5616 rtx insn = chain->insn;
32131a9c
RK
5617 register int i, j;
5618 int max_group_size = 1;
5619 enum reg_class group_class = NO_REGS;
5620 int inheritance;
cb2afeb3 5621 int pass;
32131a9c
RK
5622
5623 rtx save_reload_reg_rtx[MAX_RELOADS];
5624 char save_reload_inherited[MAX_RELOADS];
5625 rtx save_reload_inheritance_insn[MAX_RELOADS];
5626 rtx save_reload_override_in[MAX_RELOADS];
5627 int save_reload_spill_index[MAX_RELOADS];
5628 HARD_REG_SET save_reload_reg_used;
546b63fb 5629 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
47c8cf91 5630 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
546b63fb 5631 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
47c8cf91 5632 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
546b63fb
RK
5633 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5634 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 5635 HARD_REG_SET save_reload_reg_used_in_op_addr;
893bc853 5636 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
546b63fb
RK
5637 HARD_REG_SET save_reload_reg_used_in_insn;
5638 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
5639 HARD_REG_SET save_reload_reg_used_at_all;
5640
5641 bzero (reload_inherited, MAX_RELOADS);
4c9a05bc
RK
5642 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5643 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
32131a9c
RK
5644
5645 CLEAR_HARD_REG_SET (reload_reg_used);
5646 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 5647 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
893bc853 5648 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
546b63fb
RK
5649 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5650 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 5651
f1db3576
JL
5652 CLEAR_HARD_REG_SET (reg_used_in_insn);
5653 {
5654 HARD_REG_SET tmp;
5655 REG_SET_TO_HARD_REG_SET (tmp, chain->live_before);
5656 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5657 REG_SET_TO_HARD_REG_SET (tmp, chain->live_after);
5658 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5659 compute_use_by_pseudos (&reg_used_in_insn, chain->live_before);
5660 compute_use_by_pseudos (&reg_used_in_insn, chain->live_after);
5661 }
546b63fb
RK
5662 for (i = 0; i < reload_n_operands; i++)
5663 {
5664 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5665 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5666 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
47c8cf91 5667 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
546b63fb 5668 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
47c8cf91 5669 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
546b63fb 5670 }
32131a9c 5671
03acd8f8 5672 IOR_COMPL_HARD_REG_SET (reload_reg_used, chain->used_spill_regs);
05d10675 5673
32131a9c
RK
5674#if 0 /* Not needed, now that we can always retry without inheritance. */
5675 /* See if we have more mandatory reloads than spill regs.
5676 If so, then we cannot risk optimizations that could prevent
a8fdc208 5677 reloads from sharing one spill register.
32131a9c
RK
5678
5679 Since we will try finding a better register than reload_reg_rtx
5680 unless it is equal to reload_in or reload_out, count such reloads. */
5681
5682 {
03acd8f8 5683 int tem = 0;
32131a9c
RK
5684 for (j = 0; j < n_reloads; j++)
5685 if (! reload_optional[j]
5686 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5687 && (reload_reg_rtx[j] == 0
5688 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5689 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5690 tem++;
5691 if (tem > n_spills)
5692 must_reuse = 1;
5693 }
5694#endif
5695
32131a9c
RK
5696 /* In order to be certain of getting the registers we need,
5697 we must sort the reloads into order of increasing register class.
5698 Then our grabbing of reload registers will parallel the process
a8fdc208 5699 that provided the reload registers.
32131a9c
RK
5700
5701 Also note whether any of the reloads wants a consecutive group of regs.
5702 If so, record the maximum size of the group desired and what
5703 register class contains all the groups needed by this insn. */
5704
5705 for (j = 0; j < n_reloads; j++)
5706 {
5707 reload_order[j] = j;
5708 reload_spill_index[j] = -1;
5709
5710 reload_mode[j]
05d10675 5711 = ((reload_inmode[j] == VOIDmode
546b63fb
RK
5712 || (GET_MODE_SIZE (reload_outmode[j])
5713 > GET_MODE_SIZE (reload_inmode[j])))
05d10675 5714 ? reload_outmode[j] : reload_inmode[j]);
32131a9c
RK
5715
5716 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5717
5718 if (reload_nregs[j] > 1)
5719 {
5720 max_group_size = MAX (reload_nregs[j], max_group_size);
5721 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5722 }
5723
5724 /* If we have already decided to use a certain register,
5725 don't use it in another way. */
5726 if (reload_reg_rtx[j])
546b63fb 5727 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
5728 reload_when_needed[j], reload_mode[j]);
5729 }
5730
5731 if (n_reloads > 1)
5732 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5733
4c9a05bc
RK
5734 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5735 sizeof reload_reg_rtx);
32131a9c 5736 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4c9a05bc
RK
5737 bcopy ((char *) reload_inheritance_insn,
5738 (char *) save_reload_inheritance_insn,
32131a9c 5739 sizeof reload_inheritance_insn);
4c9a05bc 5740 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
32131a9c 5741 sizeof reload_override_in);
4c9a05bc 5742 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
32131a9c
RK
5743 sizeof reload_spill_index);
5744 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5745 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
5746 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5747 reload_reg_used_in_op_addr);
893bc853
RK
5748
5749 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5750 reload_reg_used_in_op_addr_reload);
5751
546b63fb
RK
5752 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5753 reload_reg_used_in_insn);
5754 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5755 reload_reg_used_in_other_addr);
5756
5757 for (i = 0; i < reload_n_operands; i++)
5758 {
5759 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5760 reload_reg_used_in_output[i]);
5761 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5762 reload_reg_used_in_input[i]);
5763 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5764 reload_reg_used_in_input_addr[i]);
47c8cf91
ILT
5765 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5766 reload_reg_used_in_inpaddr_addr[i]);
546b63fb
RK
5767 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5768 reload_reg_used_in_output_addr[i]);
47c8cf91
ILT
5769 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5770 reload_reg_used_in_outaddr_addr[i]);
546b63fb 5771 }
32131a9c 5772
58b1581b
RS
5773 /* If -O, try first with inheritance, then turning it off.
5774 If not -O, don't do inheritance.
5775 Using inheritance when not optimizing leads to paradoxes
5776 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5777 because one side of the comparison might be inherited. */
32131a9c 5778
58b1581b 5779 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
5780 {
5781 /* Process the reloads in order of preference just found.
5782 Beyond this point, subregs can be found in reload_reg_rtx.
5783
5784 This used to look for an existing reloaded home for all
5785 of the reloads, and only then perform any new reloads.
5786 But that could lose if the reloads were done out of reg-class order
5787 because a later reload with a looser constraint might have an old
5788 home in a register needed by an earlier reload with a tighter constraint.
5789
5790 To solve this, we make two passes over the reloads, in the order
5791 described above. In the first pass we try to inherit a reload
5792 from a previous insn. If there is a later reload that needs a
5793 class that is a proper subset of the class being processed, we must
5794 also allocate a spill register during the first pass.
5795
5796 Then make a second pass over the reloads to allocate any reloads
5797 that haven't been given registers yet. */
5798
be7ae2a4
RK
5799 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5800
32131a9c
RK
5801 for (j = 0; j < n_reloads; j++)
5802 {
5803 register int r = reload_order[j];
8593b745 5804 rtx search_equiv = NULL_RTX;
32131a9c
RK
5805
5806 /* Ignore reloads that got marked inoperative. */
b080c137
RK
5807 if (reload_out[r] == 0 && reload_in[r] == 0
5808 && ! reload_secondary_p[r])
32131a9c
RK
5809 continue;
5810
b29514ee 5811 /* If find_reloads chose to use reload_in or reload_out as a reload
b080c137
RK
5812 register, we don't need to chose one. Otherwise, try even if it
5813 found one since we might save an insn if we find the value lying
b29514ee
R
5814 around.
5815 Try also when reload_in is a pseudo without a hard reg. */
32131a9c
RK
5816 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5817 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
b29514ee
R
5818 || (rtx_equal_p (reload_out[r], reload_reg_rtx[r])
5819 && GET_CODE (reload_in[r]) != MEM
5820 && true_regnum (reload_in[r]) < FIRST_PSEUDO_REGISTER)))
32131a9c
RK
5821 continue;
5822
5823#if 0 /* No longer needed for correct operation.
5824 It might give better code, or might not; worth an experiment? */
5825 /* If this is an optional reload, we can't inherit from earlier insns
5826 until we are sure that any non-optional reloads have been allocated.
5827 The following code takes advantage of the fact that optional reloads
5828 are at the end of reload_order. */
5829 if (reload_optional[r] != 0)
5830 for (i = 0; i < j; i++)
5831 if ((reload_out[reload_order[i]] != 0
5832 || reload_in[reload_order[i]] != 0
5833 || reload_secondary_p[reload_order[i]])
5834 && ! reload_optional[reload_order[i]]
5835 && reload_reg_rtx[reload_order[i]] == 0)
7609e720 5836 allocate_reload_reg (chain, reload_order[i], 0, inheritance);
32131a9c
RK
5837#endif
5838
5839 /* First see if this pseudo is already available as reloaded
5840 for a previous insn. We cannot try to inherit for reloads
5841 that are smaller than the maximum number of registers needed
5842 for groups unless the register we would allocate cannot be used
5843 for the groups.
5844
5845 We could check here to see if this is a secondary reload for
5846 an object that is already in a register of the desired class.
5847 This would avoid the need for the secondary reload register.
5848 But this is complex because we can't easily determine what
b080c137
RK
5849 objects might want to be loaded via this reload. So let a
5850 register be allocated here. In `emit_reload_insns' we suppress
5851 one of the loads in the case described above. */
32131a9c
RK
5852
5853 if (inheritance)
5854 {
cb2afeb3 5855 int word = 0;
32131a9c 5856 register int regno = -1;
6a651371 5857 enum machine_mode mode = VOIDmode;
32131a9c
RK
5858
5859 if (reload_in[r] == 0)
5860 ;
5861 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
5862 {
5863 regno = REGNO (reload_in[r]);
5864 mode = GET_MODE (reload_in[r]);
5865 }
32131a9c 5866 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
5867 {
5868 regno = REGNO (reload_in_reg[r]);
5869 mode = GET_MODE (reload_in_reg[r]);
5870 }
cb2afeb3
R
5871 else if (GET_CODE (reload_in_reg[r]) == SUBREG
5872 && GET_CODE (SUBREG_REG (reload_in_reg[r])) == REG)
b60a8416 5873 {
cb2afeb3
R
5874 word = SUBREG_WORD (reload_in_reg[r]);
5875 regno = REGNO (SUBREG_REG (reload_in_reg[r]));
5876 if (regno < FIRST_PSEUDO_REGISTER)
5877 regno += word;
5878 mode = GET_MODE (reload_in_reg[r]);
5879 }
5880#ifdef AUTO_INC_DEC
5881 else if ((GET_CODE (reload_in_reg[r]) == PRE_INC
5882 || GET_CODE (reload_in_reg[r]) == PRE_DEC
5883 || GET_CODE (reload_in_reg[r]) == POST_INC
5884 || GET_CODE (reload_in_reg[r]) == POST_DEC)
5885 && GET_CODE (XEXP (reload_in_reg[r], 0)) == REG)
5886 {
5887 regno = REGNO (XEXP (reload_in_reg[r], 0));
5888 mode = GET_MODE (XEXP (reload_in_reg[r], 0));
5889 reload_out[r] = reload_in[r];
b60a8416 5890 }
cb2afeb3 5891#endif
32131a9c
RK
5892#if 0
5893 /* This won't work, since REGNO can be a pseudo reg number.
5894 Also, it takes much more hair to keep track of all the things
5895 that can invalidate an inherited reload of part of a pseudoreg. */
5896 else if (GET_CODE (reload_in[r]) == SUBREG
5897 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5898 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5899#endif
5900
5901 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5902 {
cb2afeb3
R
5903 enum reg_class class = reload_reg_class[r], last_class;
5904 rtx last_reg = reg_last_reload_reg[regno];
05d10675 5905
cb2afeb3
R
5906 i = REGNO (last_reg) + word;
5907 last_class = REGNO_REG_CLASS (i);
5908 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5909 >= GET_MODE_SIZE (mode) + word * UNITS_PER_WORD)
5910 && reg_reloaded_contents[i] == regno
e6e52be0 5911 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
e6e52be0 5912 && HARD_REGNO_MODE_OK (i, reload_mode[r])
cb2afeb3
R
5913 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5914 /* Even if we can't use this register as a reload
5915 register, we might use it for reload_override_in,
5916 if copying it to the desired class is cheap
5917 enough. */
5918 || ((REGISTER_MOVE_COST (last_class, class)
5919 < MEMORY_MOVE_COST (mode, class, 1))
5920#ifdef SECONDARY_INPUT_RELOAD_CLASS
5921 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5922 last_reg)
5923 == NO_REGS)
5924#endif
5925#ifdef SECONDARY_MEMORY_NEEDED
5926 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5927 mode)
5928#endif
5929 ))
5930
32131a9c
RK
5931 && (reload_nregs[r] == max_group_size
5932 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
e6e52be0 5933 i))
dfe96118
R
5934 && reload_reg_free_for_value_p (i, reload_opnum[r],
5935 reload_when_needed[r],
5936 reload_in[r],
5937 const0_rtx, r, 1))
32131a9c
RK
5938 {
5939 /* If a group is needed, verify that all the subsequent
0f41302f 5940 registers still have their values intact. */
32131a9c 5941 int nr
e6e52be0 5942 = HARD_REGNO_NREGS (i, reload_mode[r]);
32131a9c
RK
5943 int k;
5944
5945 for (k = 1; k < nr; k++)
e6e52be0
R
5946 if (reg_reloaded_contents[i + k] != regno
5947 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
32131a9c
RK
5948 break;
5949
5950 if (k == nr)
5951 {
c74fa651
RS
5952 int i1;
5953
cb2afeb3
R
5954 last_reg = (GET_MODE (last_reg) == mode
5955 ? last_reg : gen_rtx_REG (mode, i));
5956
c74fa651
RS
5957 /* We found a register that contains the
5958 value we need. If this register is the
5959 same as an `earlyclobber' operand of the
5960 current insn, just mark it as a place to
5961 reload from since we can't use it as the
5962 reload register itself. */
5963
5964 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5965 if (reg_overlap_mentioned_for_reload_p
5966 (reg_last_reload_reg[regno],
5967 reload_earlyclobbers[i1]))
5968 break;
5969
8908158d 5970 if (i1 != n_earlyclobbers
dfe96118
R
5971 || ! (reload_reg_free_for_value_p
5972 (i, reload_opnum[r], reload_when_needed[r],
5973 reload_in[r], reload_out[r], r, 1))
e6e52be0 5974 /* Don't use it if we'd clobber a pseudo reg. */
f1db3576 5975 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
e6e52be0
R
5976 && reload_out[r]
5977 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
0c7f2259
R
5978 /* Don't clobber the frame pointer. */
5979 || (i == HARD_FRAME_POINTER_REGNUM
5980 && reload_out[r])
8908158d
RS
5981 /* Don't really use the inherited spill reg
5982 if we need it wider than we've got it. */
5983 || (GET_MODE_SIZE (reload_mode[r])
b29514ee 5984 > GET_MODE_SIZE (mode))
cb2afeb3
R
5985 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5986 i)
5987
b29514ee
R
5988 /* If find_reloads chose reload_out as reload
5989 register, stay with it - that leaves the
5990 inherited register for subsequent reloads. */
297927a8 5991 || (reload_out[r] && reload_reg_rtx[r]
b29514ee
R
5992 && rtx_equal_p (reload_out[r],
5993 reload_reg_rtx[r])))
cb2afeb3
R
5994 {
5995 reload_override_in[r] = last_reg;
5996 reload_inheritance_insn[r]
5997 = reg_reloaded_insn[i];
5998 }
c74fa651
RS
5999 else
6000 {
54c40e68 6001 int k;
c74fa651
RS
6002 /* We can use this as a reload reg. */
6003 /* Mark the register as in use for this part of
6004 the insn. */
e6e52be0 6005 mark_reload_reg_in_use (i,
c74fa651
RS
6006 reload_opnum[r],
6007 reload_when_needed[r],
6008 reload_mode[r]);
cb2afeb3 6009 reload_reg_rtx[r] = last_reg;
c74fa651
RS
6010 reload_inherited[r] = 1;
6011 reload_inheritance_insn[r]
6012 = reg_reloaded_insn[i];
6013 reload_spill_index[r] = i;
54c40e68
RS
6014 for (k = 0; k < nr; k++)
6015 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
e6e52be0 6016 i + k);
c74fa651 6017 }
32131a9c
RK
6018 }
6019 }
6020 }
6021 }
6022
6023 /* Here's another way to see if the value is already lying around. */
6024 if (inheritance
6025 && reload_in[r] != 0
6026 && ! reload_inherited[r]
6027 && reload_out[r] == 0
6028 && (CONSTANT_P (reload_in[r])
6029 || GET_CODE (reload_in[r]) == PLUS
6030 || GET_CODE (reload_in[r]) == REG
6031 || GET_CODE (reload_in[r]) == MEM)
6032 && (reload_nregs[r] == max_group_size
6033 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
8593b745
R
6034 search_equiv = reload_in[r];
6035 /* If this is an output reload from a simple move insn, look
6036 if an equivalence for the input is available. */
6037 else if (inheritance && reload_in[r] == 0 && reload_out[r] != 0)
6038 {
6039 rtx set = single_set (insn);
6040
6041 if (set
6042 && rtx_equal_p (reload_out[r], SET_DEST (set))
6043 && CONSTANT_P (SET_SRC (set)))
6044 search_equiv = SET_SRC (set);
6045 }
6046
6047 if (search_equiv)
32131a9c
RK
6048 {
6049 register rtx equiv
8593b745 6050 = find_equiv_reg (search_equiv, insn, reload_reg_class[r],
fb3821f7 6051 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
6052 int regno;
6053
6054 if (equiv != 0)
6055 {
6056 if (GET_CODE (equiv) == REG)
6057 regno = REGNO (equiv);
6058 else if (GET_CODE (equiv) == SUBREG)
6059 {
f8a9e02b
RK
6060 /* This must be a SUBREG of a hard register.
6061 Make a new REG since this might be used in an
6062 address and not all machines support SUBREGs
6063 there. */
6064 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
38a448ca 6065 equiv = gen_rtx_REG (reload_mode[r], regno);
32131a9c
RK
6066 }
6067 else
6068 abort ();
6069 }
6070
6071 /* If we found a spill reg, reject it unless it is free
6072 and of the desired class. */
6073 if (equiv != 0
cb2afeb3
R
6074 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
6075 && ! reload_reg_free_for_value_p (regno, reload_opnum[r],
6076 reload_when_needed[r],
6077 reload_in[r],
dfe96118 6078 reload_out[r], r, 1))
32131a9c
RK
6079 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
6080 regno)))
6081 equiv = 0;
6082
32131a9c
RK
6083 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
6084 equiv = 0;
6085
6086 /* We found a register that contains the value we need.
6087 If this register is the same as an `earlyclobber' operand
6088 of the current insn, just mark it as a place to reload from
6089 since we can't use it as the reload register itself. */
6090
6091 if (equiv != 0)
6092 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
6093 if (reg_overlap_mentioned_for_reload_p (equiv,
6094 reload_earlyclobbers[i]))
32131a9c
RK
6095 {
6096 reload_override_in[r] = equiv;
6097 equiv = 0;
6098 break;
6099 }
6100
3c785e47
R
6101 /* If the equiv register we have found is explicitly clobbered
6102 in the current insn, it depends on the reload type if we
6103 can use it, use it for reload_override_in, or not at all.
6104 In particular, we then can't use EQUIV for a
6105 RELOAD_FOR_OUTPUT_ADDRESS reload. */
32131a9c
RK
6106
6107 if (equiv != 0 && regno_clobbered_p (regno, insn))
6108 {
3c785e47
R
6109 switch (reload_when_needed[r])
6110 {
6111 case RELOAD_FOR_OTHER_ADDRESS:
6112 case RELOAD_FOR_INPADDR_ADDRESS:
6113 case RELOAD_FOR_INPUT_ADDRESS:
6114 case RELOAD_FOR_OPADDR_ADDR:
6115 break;
6116 case RELOAD_OTHER:
6117 case RELOAD_FOR_INPUT:
6118 case RELOAD_FOR_OPERAND_ADDRESS:
6119 reload_override_in[r] = equiv;
6120 /* Fall through. */
6121 default:
6122 equiv = 0;
6123 break;
6124 }
32131a9c
RK
6125 }
6126
6127 /* If we found an equivalent reg, say no code need be generated
6128 to load it, and use it as our reload reg. */
3ec2ea3e 6129 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c 6130 {
100338df
JL
6131 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
6132 int k;
32131a9c
RK
6133 reload_reg_rtx[r] = equiv;
6134 reload_inherited[r] = 1;
100338df 6135
91d7e7ac
R
6136 /* If reg_reloaded_valid is not set for this register,
6137 there might be a stale spill_reg_store lying around.
6138 We must clear it, since otherwise emit_reload_insns
6139 might delete the store. */
6140 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6141 spill_reg_store[regno] = NULL_RTX;
100338df
JL
6142 /* If any of the hard registers in EQUIV are spill
6143 registers, mark them as in use for this insn. */
6144 for (k = 0; k < nr; k++)
be7ae2a4 6145 {
100338df
JL
6146 i = spill_reg_order[regno + k];
6147 if (i >= 0)
6148 {
6149 mark_reload_reg_in_use (regno, reload_opnum[r],
6150 reload_when_needed[r],
6151 reload_mode[r]);
6152 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6153 regno + k);
6154 }
be7ae2a4 6155 }
32131a9c
RK
6156 }
6157 }
6158
6159 /* If we found a register to use already, or if this is an optional
6160 reload, we are done. */
6161 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
6162 continue;
6163
6164#if 0 /* No longer needed for correct operation. Might or might not
6165 give better code on the average. Want to experiment? */
6166
6167 /* See if there is a later reload that has a class different from our
6168 class that intersects our class or that requires less register
6169 than our reload. If so, we must allocate a register to this
6170 reload now, since that reload might inherit a previous reload
6171 and take the only available register in our class. Don't do this
6172 for optional reloads since they will force all previous reloads
6173 to be allocated. Also don't do this for reloads that have been
6174 turned off. */
6175
6176 for (i = j + 1; i < n_reloads; i++)
6177 {
6178 int s = reload_order[i];
6179
d45cf215
RS
6180 if ((reload_in[s] == 0 && reload_out[s] == 0
6181 && ! reload_secondary_p[s])
32131a9c
RK
6182 || reload_optional[s])
6183 continue;
6184
6185 if ((reload_reg_class[s] != reload_reg_class[r]
6186 && reg_classes_intersect_p (reload_reg_class[r],
6187 reload_reg_class[s]))
6188 || reload_nregs[s] < reload_nregs[r])
05d10675 6189 break;
32131a9c
RK
6190 }
6191
6192 if (i == n_reloads)
6193 continue;
6194
7609e720 6195 allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance);
32131a9c
RK
6196#endif
6197 }
6198
6199 /* Now allocate reload registers for anything non-optional that
6200 didn't get one yet. */
6201 for (j = 0; j < n_reloads; j++)
6202 {
6203 register int r = reload_order[j];
6204
6205 /* Ignore reloads that got marked inoperative. */
6206 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
6207 continue;
6208
6209 /* Skip reloads that already have a register allocated or are
0f41302f 6210 optional. */
32131a9c
RK
6211 if (reload_reg_rtx[r] != 0 || reload_optional[r])
6212 continue;
6213
7609e720 6214 if (! allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance))
32131a9c
RK
6215 break;
6216 }
6217
6218 /* If that loop got all the way, we have won. */
6219 if (j == n_reloads)
6220 break;
6221
32131a9c
RK
6222 /* Loop around and try without any inheritance. */
6223 /* First undo everything done by the failed attempt
6224 to allocate with inheritance. */
4c9a05bc
RK
6225 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
6226 sizeof reload_reg_rtx);
6227 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
6228 sizeof reload_inherited);
6229 bcopy ((char *) save_reload_inheritance_insn,
6230 (char *) reload_inheritance_insn,
32131a9c 6231 sizeof reload_inheritance_insn);
4c9a05bc 6232 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
32131a9c 6233 sizeof reload_override_in);
4c9a05bc 6234 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
32131a9c
RK
6235 sizeof reload_spill_index);
6236 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
6237 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
6238 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
6239 save_reload_reg_used_in_op_addr);
893bc853
RK
6240 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
6241 save_reload_reg_used_in_op_addr_reload);
546b63fb
RK
6242 COPY_HARD_REG_SET (reload_reg_used_in_insn,
6243 save_reload_reg_used_in_insn);
6244 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
6245 save_reload_reg_used_in_other_addr);
6246
6247 for (i = 0; i < reload_n_operands; i++)
6248 {
6249 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
6250 save_reload_reg_used_in_input[i]);
6251 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
6252 save_reload_reg_used_in_output[i]);
6253 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
6254 save_reload_reg_used_in_input_addr[i]);
47c8cf91
ILT
6255 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
6256 save_reload_reg_used_in_inpaddr_addr[i]);
546b63fb
RK
6257 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
6258 save_reload_reg_used_in_output_addr[i]);
47c8cf91
ILT
6259 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
6260 save_reload_reg_used_in_outaddr_addr[i]);
546b63fb 6261 }
32131a9c
RK
6262 }
6263
6264 /* If we thought we could inherit a reload, because it seemed that
6265 nothing else wanted the same reload register earlier in the insn,
cb2afeb3
R
6266 verify that assumption, now that all reloads have been assigned.
6267 Likewise for reloads where reload_override_in has been set. */
32131a9c 6268
cb2afeb3
R
6269 /* If doing expensive optimizations, do one preliminary pass that doesn't
6270 cancel any inheritance, but removes reloads that have been needed only
6271 for reloads that we know can be inherited. */
6272 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
32131a9c 6273 {
cb2afeb3 6274 for (j = 0; j < n_reloads; j++)
029b38ff 6275 {
cb2afeb3
R
6276 register int r = reload_order[j];
6277 rtx check_reg;
cb2afeb3
R
6278 if (reload_inherited[r] && reload_reg_rtx[r])
6279 check_reg = reload_reg_rtx[r];
6280 else if (reload_override_in[r]
6281 && (GET_CODE (reload_override_in[r]) == REG
05d10675 6282 || GET_CODE (reload_override_in[r]) == SUBREG))
cb2afeb3
R
6283 check_reg = reload_override_in[r];
6284 else
6285 continue;
dfe96118 6286 if (! reload_reg_free_for_value_p (true_regnum (check_reg),
05d10675
BS
6287 reload_opnum[r],
6288 reload_when_needed[r],
6289 reload_in[r],
6290 (reload_inherited[r]
dfe96118
R
6291 ? reload_out[r] : const0_rtx),
6292 r, 1))
029b38ff 6293 {
cb2afeb3
R
6294 if (pass)
6295 continue;
6296 reload_inherited[r] = 0;
6297 reload_override_in[r] = 0;
029b38ff 6298 }
cb2afeb3
R
6299 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6300 reload_override_in, then we do not need its related
6301 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6302 likewise for other reload types.
6303 We handle this by removing a reload when its only replacement
6304 is mentioned in reload_in of the reload we are going to inherit.
6305 A special case are auto_inc expressions; even if the input is
6306 inherited, we still need the address for the output. We can
fe92fe26 6307 recognize them because they have RELOAD_OUT set to RELOAD_IN.
cb2afeb3
R
6308 If we suceeded removing some reload and we are doing a preliminary
6309 pass just to remove such reloads, make another pass, since the
6310 removal of one reload might allow us to inherit another one. */
fe92fe26
R
6311 else if (reload_in[r]
6312 && reload_out[r] != reload_in[r]
cb2afeb3
R
6313 && remove_address_replacements (reload_in[r]) && pass)
6314 pass = 2;
32131a9c
RK
6315 }
6316 }
6317
6318 /* Now that reload_override_in is known valid,
6319 actually override reload_in. */
6320 for (j = 0; j < n_reloads; j++)
6321 if (reload_override_in[j])
6322 reload_in[j] = reload_override_in[j];
6323
6324 /* If this reload won't be done because it has been cancelled or is
6325 optional and not inherited, clear reload_reg_rtx so other
6326 routines (such as subst_reloads) don't get confused. */
6327 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
6328 if (reload_reg_rtx[j] != 0
6329 && ((reload_optional[j] && ! reload_inherited[j])
6330 || (reload_in[j] == 0 && reload_out[j] == 0
6331 && ! reload_secondary_p[j])))
6332 {
6333 int regno = true_regnum (reload_reg_rtx[j]);
6334
6335 if (spill_reg_order[regno] >= 0)
6336 clear_reload_reg_in_use (regno, reload_opnum[j],
6337 reload_when_needed[j], reload_mode[j]);
6338 reload_reg_rtx[j] = 0;
6339 }
32131a9c
RK
6340
6341 /* Record which pseudos and which spill regs have output reloads. */
6342 for (j = 0; j < n_reloads; j++)
6343 {
6344 register int r = reload_order[j];
6345
6346 i = reload_spill_index[r];
6347
e6e52be0 6348 /* I is nonneg if this reload uses a register.
32131a9c
RK
6349 If reload_reg_rtx[r] is 0, this is an optional reload
6350 that we opted to ignore. */
cb2afeb3 6351 if (reload_out_reg[r] != 0 && GET_CODE (reload_out_reg[r]) == REG
32131a9c
RK
6352 && reload_reg_rtx[r] != 0)
6353 {
cb2afeb3 6354 register int nregno = REGNO (reload_out_reg[r]);
372e033b
RS
6355 int nr = 1;
6356
6357 if (nregno < FIRST_PSEUDO_REGISTER)
6358 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
6359
6360 while (--nr >= 0)
372e033b
RS
6361 reg_has_output_reload[nregno + nr] = 1;
6362
6363 if (i >= 0)
32131a9c 6364 {
e6e52be0 6365 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
372e033b 6366 while (--nr >= 0)
e6e52be0 6367 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
32131a9c
RK
6368 }
6369
6370 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
6371 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
6372 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
6373 abort ();
6374 }
6375 }
6376}
cb2afeb3
R
6377
6378/* Deallocate the reload register for reload R. This is called from
6379 remove_address_replacements. */
6380void
6381deallocate_reload_reg (r)
6382 int r;
6383{
6384 int regno;
6385
6386 if (! reload_reg_rtx[r])
6387 return;
6388 regno = true_regnum (reload_reg_rtx[r]);
6389 reload_reg_rtx[r] = 0;
6390 if (spill_reg_order[regno] >= 0)
6391 clear_reload_reg_in_use (regno, reload_opnum[r], reload_when_needed[r],
6392 reload_mode[r]);
6393 reload_spill_index[r] = -1;
6394}
32131a9c 6395\f
e9a25f70 6396/* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
546b63fb
RK
6397 reloads of the same item for fear that we might not have enough reload
6398 registers. However, normally they will get the same reload register
05d10675 6399 and hence actually need not be loaded twice.
546b63fb
RK
6400
6401 Here we check for the most common case of this phenomenon: when we have
6402 a number of reloads for the same object, each of which were allocated
6403 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6404 reload, and is not modified in the insn itself. If we find such,
6405 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6406 This will not increase the number of spill registers needed and will
6407 prevent redundant code. */
6408
546b63fb
RK
6409static void
6410merge_assigned_reloads (insn)
6411 rtx insn;
6412{
6413 int i, j;
6414
6415 /* Scan all the reloads looking for ones that only load values and
6416 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6417 assigned and not modified by INSN. */
6418
6419 for (i = 0; i < n_reloads; i++)
6420 {
d668e863
R
6421 int conflicting_input = 0;
6422 int max_input_address_opnum = -1;
6423 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6424
546b63fb
RK
6425 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
6426 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
6427 || reg_set_p (reload_reg_rtx[i], insn))
6428 continue;
6429
6430 /* Look at all other reloads. Ensure that the only use of this
6431 reload_reg_rtx is in a reload that just loads the same value
6432 as we do. Note that any secondary reloads must be of the identical
6433 class since the values, modes, and result registers are the
6434 same, so we need not do anything with any secondary reloads. */
6435
6436 for (j = 0; j < n_reloads; j++)
6437 {
6438 if (i == j || reload_reg_rtx[j] == 0
6439 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
6440 reload_reg_rtx[i]))
6441 continue;
6442
d668e863
R
6443 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6444 && reload_opnum[j] > max_input_address_opnum)
6445 max_input_address_opnum = reload_opnum[j];
6446
546b63fb 6447 /* If the reload regs aren't exactly the same (e.g, different modes)
d668e863
R
6448 or if the values are different, we can't merge this reload.
6449 But if it is an input reload, we might still merge
6450 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
546b63fb
RK
6451
6452 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6453 || reload_out[j] != 0 || reload_in[j] == 0
6454 || ! rtx_equal_p (reload_in[i], reload_in[j]))
d668e863
R
6455 {
6456 if (reload_when_needed[j] != RELOAD_FOR_INPUT
6457 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
6458 || reload_opnum[i] > reload_opnum[j])
6459 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS))
6460 break;
6461 conflicting_input = 1;
6462 if (min_conflicting_input_opnum > reload_opnum[j])
6463 min_conflicting_input_opnum = reload_opnum[j];
6464 }
546b63fb
RK
6465 }
6466
6467 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6468 we, in fact, found any matching reloads. */
6469
d668e863
R
6470 if (j == n_reloads
6471 && max_input_address_opnum <= min_conflicting_input_opnum)
546b63fb
RK
6472 {
6473 for (j = 0; j < n_reloads; j++)
6474 if (i != j && reload_reg_rtx[j] != 0
d668e863
R
6475 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6476 && (! conflicting_input
6477 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6478 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS))
546b63fb
RK
6479 {
6480 reload_when_needed[i] = RELOAD_OTHER;
6481 reload_in[j] = 0;
efdb3590 6482 reload_spill_index[j] = -1;
546b63fb
RK
6483 transfer_replacements (i, j);
6484 }
6485
6486 /* If this is now RELOAD_OTHER, look for any reloads that load
6487 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6488 if they were for inputs, RELOAD_OTHER for outputs. Note that
6489 this test is equivalent to looking for reloads for this operand
6490 number. */
6491
6492 if (reload_when_needed[i] == RELOAD_OTHER)
6493 for (j = 0; j < n_reloads; j++)
6494 if (reload_in[j] != 0
6495 && reload_when_needed[i] != RELOAD_OTHER
6496 && reg_overlap_mentioned_for_reload_p (reload_in[j],
6497 reload_in[i]))
6498 reload_when_needed[j]
47c8cf91
ILT
6499 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6500 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6501 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
546b63fb
RK
6502 }
6503 }
05d10675 6504}
e9a25f70 6505
546b63fb 6506\f
32131a9c
RK
6507/* Output insns to reload values in and out of the chosen reload regs. */
6508
6509static void
7609e720
BS
6510emit_reload_insns (chain)
6511 struct insn_chain *chain;
32131a9c 6512{
7609e720
BS
6513 rtx insn = chain->insn;
6514
32131a9c 6515 register int j;
546b63fb
RK
6516 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6517 rtx other_input_address_reload_insns = 0;
6518 rtx other_input_reload_insns = 0;
6519 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
47c8cf91 6520 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
546b63fb
RK
6521 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6522 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
47c8cf91 6523 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
546b63fb 6524 rtx operand_reload_insns = 0;
893bc853 6525 rtx other_operand_reload_insns = 0;
befa01b9 6526 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
32131a9c 6527 rtx following_insn = NEXT_INSN (insn);
c93b03c2 6528 rtx before_insn = PREV_INSN (insn);
32131a9c
RK
6529 int special;
6530 /* Values to be put in spill_reg_store are put here first. */
6531 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
e6e52be0
R
6532 HARD_REG_SET reg_reloaded_died;
6533
6534 CLEAR_HARD_REG_SET (reg_reloaded_died);
32131a9c 6535
546b63fb
RK
6536 for (j = 0; j < reload_n_operands; j++)
6537 input_reload_insns[j] = input_address_reload_insns[j]
47c8cf91 6538 = inpaddr_address_reload_insns[j]
befa01b9 6539 = output_reload_insns[j] = output_address_reload_insns[j]
47c8cf91 6540 = outaddr_address_reload_insns[j]
befa01b9 6541 = other_output_reload_insns[j] = 0;
546b63fb 6542
32131a9c
RK
6543 /* Now output the instructions to copy the data into and out of the
6544 reload registers. Do these in the order that the reloads were reported,
6545 since reloads of base and index registers precede reloads of operands
6546 and the operands may need the base and index registers reloaded. */
6547
6548 for (j = 0; j < n_reloads; j++)
6549 {
6550 register rtx old;
6551 rtx oldequiv_reg = 0;
80d92002 6552 rtx this_reload_insn = 0;
b60a8416 6553 int expect_occurrences = 1;
73b2ad9e 6554
cb2afeb3
R
6555 if (reload_reg_rtx[j]
6556 && REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER)
6557 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = 0;
32131a9c 6558
cb2afeb3
R
6559 old = (reload_in[j] && GET_CODE (reload_in[j]) == MEM
6560 ? reload_in_reg[j] : reload_in[j]);
6561
6562 if (old != 0
6563 /* AUTO_INC reloads need to be handled even if inherited. We got an
6564 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6565 && (! reload_inherited[j] || (reload_out[j] && ! reload_out_reg[j]))
32131a9c
RK
6566 && ! rtx_equal_p (reload_reg_rtx[j], old)
6567 && reload_reg_rtx[j] != 0)
6568 {
6569 register rtx reloadreg = reload_reg_rtx[j];
6570 rtx oldequiv = 0;
6571 enum machine_mode mode;
546b63fb 6572 rtx *where;
32131a9c
RK
6573
6574 /* Determine the mode to reload in.
6575 This is very tricky because we have three to choose from.
6576 There is the mode the insn operand wants (reload_inmode[J]).
6577 There is the mode of the reload register RELOADREG.
6578 There is the intrinsic mode of the operand, which we could find
6579 by stripping some SUBREGs.
6580 It turns out that RELOADREG's mode is irrelevant:
6581 we can change that arbitrarily.
6582
6583 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6584 then the reload reg may not support QImode moves, so use SImode.
6585 If foo is in memory due to spilling a pseudo reg, this is safe,
6586 because the QImode value is in the least significant part of a
6587 slot big enough for a SImode. If foo is some other sort of
6588 memory reference, then it is impossible to reload this case,
6589 so previous passes had better make sure this never happens.
6590
6591 Then consider a one-word union which has SImode and one of its
6592 members is a float, being fetched as (SUBREG:SF union:SI).
6593 We must fetch that as SFmode because we could be loading into
6594 a float-only register. In this case OLD's mode is correct.
6595
6596 Consider an immediate integer: it has VOIDmode. Here we need
6597 to get a mode from something else.
6598
6599 In some cases, there is a fourth mode, the operand's
6600 containing mode. If the insn specifies a containing mode for
6601 this operand, it overrides all others.
6602
6603 I am not sure whether the algorithm here is always right,
6604 but it does the right things in those cases. */
6605
6606 mode = GET_MODE (old);
6607 if (mode == VOIDmode)
6608 mode = reload_inmode[j];
32131a9c
RK
6609
6610#ifdef SECONDARY_INPUT_RELOAD_CLASS
6611 /* If we need a secondary register for this operation, see if
6612 the value is already in a register in that class. Don't
6613 do this if the secondary register will be used as a scratch
6614 register. */
6615
b80bba27
RK
6616 if (reload_secondary_in_reload[j] >= 0
6617 && reload_secondary_in_icode[j] == CODE_FOR_nothing
58b1581b 6618 && optimize)
32131a9c
RK
6619 oldequiv
6620 = find_equiv_reg (old, insn,
b80bba27 6621 reload_reg_class[reload_secondary_in_reload[j]],
fb3821f7 6622 -1, NULL_PTR, 0, mode);
32131a9c
RK
6623#endif
6624
6625 /* If reloading from memory, see if there is a register
6626 that already holds the same value. If so, reload from there.
6627 We can pass 0 as the reload_reg_p argument because
6628 any other reload has either already been emitted,
6629 in which case find_equiv_reg will see the reload-insn,
6630 or has yet to be emitted, in which case it doesn't matter
6631 because we will use this equiv reg right away. */
6632
58b1581b 6633 if (oldequiv == 0 && optimize
32131a9c
RK
6634 && (GET_CODE (old) == MEM
6635 || (GET_CODE (old) == REG
6636 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6637 && reg_renumber[REGNO (old)] < 0)))
546b63fb 6638 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 6639 -1, NULL_PTR, 0, mode);
32131a9c
RK
6640
6641 if (oldequiv)
6642 {
6643 int regno = true_regnum (oldequiv);
6644
dfe96118
R
6645 /* Don't use OLDEQUIV if any other reload changes it at an
6646 earlier stage of this insn or at this stage. */
6647 if (! reload_reg_free_for_value_p (regno, reload_opnum[j],
6648 reload_when_needed[j],
6649 reload_in[j], const0_rtx, j,
6650 0))
32131a9c
RK
6651 oldequiv = 0;
6652
546b63fb
RK
6653 /* If it is no cheaper to copy from OLDEQUIV into the
6654 reload register than it would be to move from memory,
6655 don't use it. Likewise, if we need a secondary register
6656 or memory. */
6657
6658 if (oldequiv != 0
6659 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6660 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6661 reload_reg_class[j])
370b1b83 6662 >= MEMORY_MOVE_COST (mode, reload_reg_class[j], 1)))
546b63fb
RK
6663#ifdef SECONDARY_INPUT_RELOAD_CLASS
6664 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6665 mode, oldequiv)
6666 != NO_REGS)
6667#endif
6668#ifdef SECONDARY_MEMORY_NEEDED
370b1b83
R
6669 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6670 reload_reg_class[j],
546b63fb
RK
6671 mode)
6672#endif
6673 ))
6674 oldequiv = 0;
32131a9c
RK
6675 }
6676
cb2afeb3
R
6677 /* delete_output_reload is only invoked properly if old contains
6678 the original pseudo register. Since this is replaced with a
6679 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6680 find the pseudo in RELOAD_IN_REG. */
6681 if (oldequiv == 0
6682 && reload_override_in[j]
6683 && GET_CODE (reload_in_reg[j]) == REG)
6684 {
6685 oldequiv = old;
6686 old = reload_in_reg[j];
6687 }
32131a9c
RK
6688 if (oldequiv == 0)
6689 oldequiv = old;
6690 else if (GET_CODE (oldequiv) == REG)
6691 oldequiv_reg = oldequiv;
6692 else if (GET_CODE (oldequiv) == SUBREG)
6693 oldequiv_reg = SUBREG_REG (oldequiv);
6694
76182796
RK
6695 /* If we are reloading from a register that was recently stored in
6696 with an output-reload, see if we can prove there was
6697 actually no need to store the old value in it. */
6698
6699 if (optimize && GET_CODE (oldequiv) == REG
6700 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
e6e52be0 6701 && spill_reg_store[REGNO (oldequiv)]
cb2afeb3
R
6702 && GET_CODE (old) == REG
6703 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6704 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6705 reload_out_reg[j])))
6706 delete_output_reload (insn, j, REGNO (oldequiv));
76182796 6707
32131a9c 6708 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
6709 then load RELOADREG from OLDEQUIV. Note that we cannot use
6710 gen_lowpart_common since it can do the wrong thing when
6711 RELOADREG has a multi-word mode. Note that RELOADREG
6712 must always be a REG here. */
32131a9c
RK
6713
6714 if (GET_MODE (reloadreg) != mode)
38a448ca 6715 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
32131a9c
RK
6716 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6717 oldequiv = SUBREG_REG (oldequiv);
6718 if (GET_MODE (oldequiv) != VOIDmode
6719 && mode != GET_MODE (oldequiv))
38a448ca 6720 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
32131a9c 6721
546b63fb 6722 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
6723 switch (reload_when_needed[j])
6724 {
32131a9c 6725 case RELOAD_OTHER:
546b63fb
RK
6726 where = &other_input_reload_insns;
6727 break;
6728 case RELOAD_FOR_INPUT:
6729 where = &input_reload_insns[reload_opnum[j]];
32131a9c 6730 break;
546b63fb
RK
6731 case RELOAD_FOR_INPUT_ADDRESS:
6732 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 6733 break;
47c8cf91
ILT
6734 case RELOAD_FOR_INPADDR_ADDRESS:
6735 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6736 break;
546b63fb
RK
6737 case RELOAD_FOR_OUTPUT_ADDRESS:
6738 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c 6739 break;
47c8cf91
ILT
6740 case RELOAD_FOR_OUTADDR_ADDRESS:
6741 where = &outaddr_address_reload_insns[reload_opnum[j]];
6742 break;
32131a9c 6743 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
6744 where = &operand_reload_insns;
6745 break;
893bc853
RK
6746 case RELOAD_FOR_OPADDR_ADDR:
6747 where = &other_operand_reload_insns;
6748 break;
546b63fb
RK
6749 case RELOAD_FOR_OTHER_ADDRESS:
6750 where = &other_input_address_reload_insns;
6751 break;
6752 default:
6753 abort ();
32131a9c
RK
6754 }
6755
546b63fb 6756 push_to_sequence (*where);
32131a9c
RK
6757 special = 0;
6758
6759 /* Auto-increment addresses must be reloaded in a special way. */
cb2afeb3 6760 if (reload_out[j] && ! reload_out_reg[j])
32131a9c
RK
6761 {
6762 /* We are not going to bother supporting the case where a
6763 incremented register can't be copied directly from
6764 OLDEQUIV since this seems highly unlikely. */
b80bba27 6765 if (reload_secondary_in_reload[j] >= 0)
32131a9c 6766 abort ();
cb2afeb3
R
6767
6768 if (reload_inherited[j])
6769 oldequiv = reloadreg;
6770
6771 old = XEXP (reload_in_reg[j], 0);
6772
6773 if (optimize && GET_CODE (oldequiv) == REG
6774 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6775 && spill_reg_store[REGNO (oldequiv)]
6776 && GET_CODE (old) == REG
6777 && (dead_or_set_p (insn,
6778 spill_reg_stored_to[REGNO (oldequiv)])
6779 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6780 old)))
6781 delete_output_reload (insn, j, REGNO (oldequiv));
6782
32131a9c
RK
6783 /* Prevent normal processing of this reload. */
6784 special = 1;
6785 /* Output a special code sequence for this case. */
cb2afeb3
R
6786 new_spill_reg_store[REGNO (reloadreg)]
6787 = inc_for_reload (reloadreg, oldequiv, reload_out[j],
6788 reload_inc[j]);
32131a9c
RK
6789 }
6790
6791 /* If we are reloading a pseudo-register that was set by the previous
6792 insn, see if we can get rid of that pseudo-register entirely
6793 by redirecting the previous insn into our reload register. */
6794
6795 else if (optimize && GET_CODE (old) == REG
6796 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6797 && dead_or_set_p (insn, old)
6798 /* This is unsafe if some other reload
6799 uses the same reg first. */
dfe96118
R
6800 && reload_reg_free_for_value_p (REGNO (reloadreg),
6801 reload_opnum[j],
05d10675 6802 reload_when_needed[j],
dfe96118
R
6803 old, reload_out[j],
6804 j, 0))
32131a9c
RK
6805 {
6806 rtx temp = PREV_INSN (insn);
6807 while (temp && GET_CODE (temp) == NOTE)
6808 temp = PREV_INSN (temp);
6809 if (temp
6810 && GET_CODE (temp) == INSN
6811 && GET_CODE (PATTERN (temp)) == SET
6812 && SET_DEST (PATTERN (temp)) == old
6813 /* Make sure we can access insn_operand_constraint. */
6814 && asm_noperands (PATTERN (temp)) < 0
6815 /* This is unsafe if prev insn rejects our reload reg. */
6816 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6817 reloadreg)
6818 /* This is unsafe if operand occurs more than once in current
6819 insn. Perhaps some occurrences aren't reloaded. */
6820 && count_occurrences (PATTERN (insn), old) == 1
6821 /* Don't risk splitting a matching pair of operands. */
6822 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6823 {
6824 /* Store into the reload register instead of the pseudo. */
6825 SET_DEST (PATTERN (temp)) = reloadreg;
d30e8ef0
BS
6826
6827 /* If the previous insn is an output reload, the source is
6828 a reload register, and its spill_reg_store entry will
6829 contain the previous destination. This is now
6830 invalid. */
6831 if (GET_CODE (SET_SRC (PATTERN (temp))) == REG
6832 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6833 {
6834 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6835 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6836 }
6837
32131a9c
RK
6838 /* If these are the only uses of the pseudo reg,
6839 pretend for GDB it lives in the reload reg we used. */
b1f21e0a
MM
6840 if (REG_N_DEATHS (REGNO (old)) == 1
6841 && REG_N_SETS (REGNO (old)) == 1)
32131a9c
RK
6842 {
6843 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6844 alter_reg (REGNO (old), -1);
6845 }
6846 special = 1;
6847 }
6848 }
6849
546b63fb
RK
6850 /* We can't do that, so output an insn to load RELOADREG. */
6851
32131a9c
RK
6852 if (! special)
6853 {
6854#ifdef SECONDARY_INPUT_RELOAD_CLASS
6855 rtx second_reload_reg = 0;
6856 enum insn_code icode;
6857
6858 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
6859 and icode, if any. If OLDEQUIV and OLD are different or
6860 if this is an in-out reload, recompute whether or not we
6861 still need a secondary register and what the icode should
6862 be. If we still need a secondary register and the class or
6863 icode is different, go back to reloading from OLD if using
6864 OLDEQUIV means that we got the wrong type of register. We
6865 cannot have different class or icode due to an in-out reload
6866 because we don't make such reloads when both the input and
6867 output need secondary reload registers. */
32131a9c 6868
b80bba27 6869 if (reload_secondary_in_reload[j] >= 0)
32131a9c 6870 {
b80bba27 6871 int secondary_reload = reload_secondary_in_reload[j];
1554c2c6
RK
6872 rtx real_oldequiv = oldequiv;
6873 rtx real_old = old;
4eea1672 6874 rtx tmp;
1554c2c6
RK
6875
6876 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6877 and similarly for OLD.
b80bba27 6878 See comments in get_secondary_reload in reload.c. */
cb2afeb3
R
6879 /* If it is a pseudo that cannot be replaced with its
6880 equivalent MEM, we must fall back to reload_in, which
d62dab41
R
6881 will have all the necessary substitutions registered.
6882 Likewise for a pseudo that can't be replaced with its
05d10675 6883 equivalent constant.
4eea1672
RH
6884
6885 Take extra care for subregs of such pseudos. Note that
6886 we cannot use reg_equiv_mem in this case because it is
6887 not in the right mode. */
05d10675 6888
4eea1672
RH
6889 tmp = oldequiv;
6890 if (GET_CODE (tmp) == SUBREG)
6891 tmp = SUBREG_REG (tmp);
6892 if (GET_CODE (tmp) == REG
6893 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6894 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6895 || reg_equiv_constant[REGNO (tmp)] != 0))
cb2afeb3 6896 {
4eea1672
RH
6897 if (! reg_equiv_mem[REGNO (tmp)]
6898 || num_not_at_initial_offset
6899 || GET_CODE (oldequiv) == SUBREG)
cb2afeb3
R
6900 real_oldequiv = reload_in[j];
6901 else
4eea1672 6902 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
cb2afeb3 6903 }
1554c2c6 6904
4eea1672
RH
6905 tmp = old;
6906 if (GET_CODE (tmp) == SUBREG)
6907 tmp = SUBREG_REG (tmp);
6908 if (GET_CODE (tmp) == REG
6909 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6910 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6911 || reg_equiv_constant[REGNO (tmp)] != 0))
cb2afeb3 6912 {
4eea1672
RH
6913 if (! reg_equiv_mem[REGNO (tmp)]
6914 || num_not_at_initial_offset
6915 || GET_CODE (old) == SUBREG)
cb2afeb3
R
6916 real_old = reload_in[j];
6917 else
4eea1672 6918 real_old = reg_equiv_mem[REGNO (tmp)];
cb2afeb3 6919 }
1554c2c6 6920
32131a9c 6921 second_reload_reg = reload_reg_rtx[secondary_reload];
b80bba27 6922 icode = reload_secondary_in_icode[j];
32131a9c 6923
d445b551
RK
6924 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6925 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
6926 {
6927 enum reg_class new_class
6928 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 6929 mode, real_oldequiv);
32131a9c
RK
6930
6931 if (new_class == NO_REGS)
6932 second_reload_reg = 0;
6933 else
6934 {
6935 enum insn_code new_icode;
6936 enum machine_mode new_mode;
6937
6938 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6939 REGNO (second_reload_reg)))
1554c2c6 6940 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6941 else
6942 {
6943 new_icode = reload_in_optab[(int) mode];
6944 if (new_icode != CODE_FOR_nothing
6945 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 6946 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 6947 (reloadreg, mode)))
a8fdc208
RS
6948 || (insn_operand_predicate[(int) new_icode][1]
6949 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 6950 (real_oldequiv, mode)))))
32131a9c
RK
6951 new_icode = CODE_FOR_nothing;
6952
6953 if (new_icode == CODE_FOR_nothing)
6954 new_mode = mode;
6955 else
196ddf8a 6956 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
6957
6958 if (GET_MODE (second_reload_reg) != new_mode)
6959 {
6960 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6961 new_mode))
1554c2c6 6962 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6963 else
6964 second_reload_reg
38a448ca
RH
6965 = gen_rtx_REG (new_mode,
6966 REGNO (second_reload_reg));
32131a9c
RK
6967 }
6968 }
6969 }
6970 }
6971
6972 /* If we still need a secondary reload register, check
6973 to see if it is being used as a scratch or intermediate
1554c2c6
RK
6974 register and generate code appropriately. If we need
6975 a scratch register, use REAL_OLDEQUIV since the form of
05d10675 6976 the insn may depend on the actual address if it is
1554c2c6 6977 a MEM. */
32131a9c
RK
6978
6979 if (second_reload_reg)
6980 {
6981 if (icode != CODE_FOR_nothing)
6982 {
5e03c156
RK
6983 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6984 second_reload_reg));
32131a9c
RK
6985 special = 1;
6986 }
6987 else
6988 {
6989 /* See if we need a scratch register to load the
6990 intermediate register (a tertiary reload). */
6991 enum insn_code tertiary_icode
b80bba27 6992 = reload_secondary_in_icode[secondary_reload];
32131a9c
RK
6993
6994 if (tertiary_icode != CODE_FOR_nothing)
6995 {
6996 rtx third_reload_reg
05d10675 6997 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
32131a9c 6998
546b63fb
RK
6999 emit_insn ((GEN_FCN (tertiary_icode)
7000 (second_reload_reg, real_oldequiv,
7001 third_reload_reg)));
32131a9c
RK
7002 }
7003 else
cb2afeb3 7004 gen_reload (second_reload_reg, real_oldequiv,
5e03c156
RK
7005 reload_opnum[j],
7006 reload_when_needed[j]);
546b63fb
RK
7007
7008 oldequiv = second_reload_reg;
32131a9c
RK
7009 }
7010 }
7011 }
7012#endif
7013
2d182c6f 7014 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
cb2afeb3
R
7015 {
7016 rtx real_oldequiv = oldequiv;
7017
7018 if ((GET_CODE (oldequiv) == REG
7019 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
d62dab41
R
7020 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
7021 || reg_equiv_constant[REGNO (oldequiv)] != 0))
cb2afeb3
R
7022 || (GET_CODE (oldequiv) == SUBREG
7023 && GET_CODE (SUBREG_REG (oldequiv)) == REG
7024 && (REGNO (SUBREG_REG (oldequiv))
7025 >= FIRST_PSEUDO_REGISTER)
d62dab41
R
7026 && ((reg_equiv_memory_loc
7027 [REGNO (SUBREG_REG (oldequiv))] != 0)
7028 || (reg_equiv_constant
7029 [REGNO (SUBREG_REG (oldequiv))] != 0))))
cb2afeb3
R
7030 real_oldequiv = reload_in[j];
7031 gen_reload (reloadreg, real_oldequiv, reload_opnum[j],
7032 reload_when_needed[j]);
7033 }
32131a9c 7034
32131a9c
RK
7035 }
7036
80d92002 7037 this_reload_insn = get_last_insn ();
546b63fb
RK
7038 /* End this sequence. */
7039 *where = get_insns ();
7040 end_sequence ();
cb2afeb3
R
7041
7042 /* Update reload_override_in so that delete_address_reloads_1
7043 can see the actual register usage. */
7044 if (oldequiv_reg)
7045 reload_override_in[j] = oldequiv;
32131a9c
RK
7046 }
7047
b60a8416
R
7048 /* When inheriting a wider reload, we have a MEM in reload_in[j],
7049 e.g. inheriting a SImode output reload for
7050 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
7051 if (optimize && reload_inherited[j] && reload_in[j]
7052 && GET_CODE (reload_in[j]) == MEM
cb2afeb3 7053 && GET_CODE (reload_in_reg[j]) == MEM
b60a8416
R
7054 && reload_spill_index[j] >= 0
7055 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
7056 {
7057 expect_occurrences
7058 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1;
7059 reload_in[j]
7060 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
7061 }
32131a9c
RK
7062
7063 /* If we are reloading a register that was recently stored in with an
7064 output-reload, see if we can prove there was
7065 actually no need to store the old value in it. */
7066
cb2afeb3
R
7067 if (optimize
7068 && (reload_inherited[j] || reload_override_in[j])
7069 && reload_reg_rtx[j]
7070 && GET_CODE (reload_reg_rtx[j]) == REG
7071 && spill_reg_store[REGNO (reload_reg_rtx[j])] != 0
32131a9c
RK
7072#if 0
7073 /* There doesn't seem to be any reason to restrict this to pseudos
7074 and doing so loses in the case where we are copying from a
7075 register of the wrong class. */
05d10675
BS
7076 && (REGNO (spill_reg_stored_to[REGNO (reload_reg_rtx[j])])
7077 >= FIRST_PSEUDO_REGISTER)
32131a9c 7078#endif
05d10675
BS
7079 /* The insn might have already some references to stackslots
7080 replaced by MEMs, while reload_out_reg still names the
7081 original pseudo. */
cb2afeb3
R
7082 && (dead_or_set_p (insn,
7083 spill_reg_stored_to[REGNO (reload_reg_rtx[j])])
7084 || rtx_equal_p (spill_reg_stored_to[REGNO (reload_reg_rtx[j])],
7085 reload_out_reg[j])))
7086 delete_output_reload (insn, j, REGNO (reload_reg_rtx[j]));
32131a9c
RK
7087
7088 /* Input-reloading is done. Now do output-reloading,
7089 storing the value from the reload-register after the main insn
7090 if reload_out[j] is nonzero.
7091
7092 ??? At some point we need to support handling output reloads of
7093 JUMP_INSNs or insns that set cc0. */
cb2afeb3
R
7094
7095 /* If this is an output reload that stores something that is
7096 not loaded in this same reload, see if we can eliminate a previous
7097 store. */
7098 {
7099 rtx pseudo = reload_out_reg[j];
05d10675 7100
cb2afeb3
R
7101 if (pseudo
7102 && GET_CODE (pseudo) == REG
7103 && ! rtx_equal_p (reload_in_reg[j], pseudo)
7104 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7105 && reg_last_reload_reg[REGNO (pseudo)])
7106 {
7107 int pseudo_no = REGNO (pseudo);
7108 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7109
7110 /* We don't need to test full validity of last_regno for
7111 inherit here; we only want to know if the store actually
7112 matches the pseudo. */
7113 if (reg_reloaded_contents[last_regno] == pseudo_no
7114 && spill_reg_store[last_regno]
7115 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7116 delete_output_reload (insn, j, last_regno);
7117 }
7118 }
7119
7120 old = reload_out_reg[j];
32131a9c
RK
7121 if (old != 0
7122 && reload_reg_rtx[j] != old
7123 && reload_reg_rtx[j] != 0)
7124 {
7125 register rtx reloadreg = reload_reg_rtx[j];
29a82058 7126#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
32131a9c 7127 register rtx second_reloadreg = 0;
29a82058 7128#endif
32131a9c
RK
7129 rtx note, p;
7130 enum machine_mode mode;
7131 int special = 0;
7132
7133 /* An output operand that dies right away does need a reload,
7134 but need not be copied from it. Show the new location in the
7135 REG_UNUSED note. */
7136 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
7137 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7138 {
7139 XEXP (note, 0) = reload_reg_rtx[j];
7140 continue;
7141 }
a7911cd2
RK
7142 /* Likewise for a SUBREG of an operand that dies. */
7143 else if (GET_CODE (old) == SUBREG
7144 && GET_CODE (SUBREG_REG (old)) == REG
7145 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7146 SUBREG_REG (old))))
7147 {
7148 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7149 reload_reg_rtx[j]);
7150 continue;
7151 }
32131a9c
RK
7152 else if (GET_CODE (old) == SCRATCH)
7153 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7154 but we don't want to make an output reload. */
7155 continue;
7156
7157#if 0
7158 /* Strip off of OLD any size-increasing SUBREGs such as
7159 (SUBREG:SI foo:QI 0). */
7160
7161 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
7162 && (GET_MODE_SIZE (GET_MODE (old))
7163 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
7164 old = SUBREG_REG (old);
7165#endif
7166
7167 /* If is a JUMP_INSN, we can't support output reloads yet. */
7168 if (GET_CODE (insn) == JUMP_INSN)
7169 abort ();
7170
d7e0324f 7171 if (reload_when_needed[j] == RELOAD_OTHER)
5ca582cf 7172 start_sequence ();
d7e0324f
RK
7173 else
7174 push_to_sequence (output_reload_insns[reload_opnum[j]]);
546b63fb 7175
cb2afeb3
R
7176 old = reload_out[j];
7177
32131a9c
RK
7178 /* Determine the mode to reload in.
7179 See comments above (for input reloading). */
7180
7181 mode = GET_MODE (old);
7182 if (mode == VOIDmode)
79a365a7
RS
7183 {
7184 /* VOIDmode should never happen for an output. */
7185 if (asm_noperands (PATTERN (insn)) < 0)
7186 /* It's the compiler's fault. */
a89b2cc4 7187 fatal_insn ("VOIDmode on an output", insn);
79a365a7
RS
7188 error_for_asm (insn, "output operand is constant in `asm'");
7189 /* Prevent crash--use something we know is valid. */
7190 mode = word_mode;
38a448ca 7191 old = gen_rtx_REG (mode, REGNO (reloadreg));
79a365a7 7192 }
32131a9c 7193
32131a9c 7194 if (GET_MODE (reloadreg) != mode)
38a448ca 7195 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
32131a9c
RK
7196
7197#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7198
7199 /* If we need two reload regs, set RELOADREG to the intermediate
5e03c156 7200 one, since it will be stored into OLD. We might need a secondary
32131a9c
RK
7201 register only for an input reload, so check again here. */
7202
b80bba27 7203 if (reload_secondary_out_reload[j] >= 0)
32131a9c 7204 {
1554c2c6 7205 rtx real_old = old;
32131a9c 7206
1554c2c6
RK
7207 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
7208 && reg_equiv_mem[REGNO (old)] != 0)
7209 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 7210
1554c2c6
RK
7211 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
7212 mode, real_old)
7213 != NO_REGS))
7214 {
7215 second_reloadreg = reloadreg;
b80bba27 7216 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
32131a9c 7217
1554c2c6
RK
7218 /* See if RELOADREG is to be used as a scratch register
7219 or as an intermediate register. */
b80bba27 7220 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
32131a9c 7221 {
b80bba27 7222 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
546b63fb 7223 (real_old, second_reloadreg, reloadreg)));
1554c2c6 7224 special = 1;
32131a9c
RK
7225 }
7226 else
1554c2c6
RK
7227 {
7228 /* See if we need both a scratch and intermediate reload
7229 register. */
5e03c156 7230
b80bba27 7231 int secondary_reload = reload_secondary_out_reload[j];
1554c2c6 7232 enum insn_code tertiary_icode
b80bba27 7233 = reload_secondary_out_icode[secondary_reload];
32131a9c 7234
1554c2c6 7235 if (GET_MODE (reloadreg) != mode)
38a448ca 7236 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
1554c2c6
RK
7237
7238 if (tertiary_icode != CODE_FOR_nothing)
7239 {
7240 rtx third_reloadreg
b80bba27 7241 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
a7911cd2 7242 rtx tem;
5e03c156
RK
7243
7244 /* Copy primary reload reg to secondary reload reg.
7245 (Note that these have been swapped above, then
7246 secondary reload reg to OLD using our insn. */
7247
a7911cd2
RK
7248 /* If REAL_OLD is a paradoxical SUBREG, remove it
7249 and try to put the opposite SUBREG on
7250 RELOADREG. */
7251 if (GET_CODE (real_old) == SUBREG
7252 && (GET_MODE_SIZE (GET_MODE (real_old))
7253 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7254 && 0 != (tem = gen_lowpart_common
7255 (GET_MODE (SUBREG_REG (real_old)),
7256 reloadreg)))
7257 real_old = SUBREG_REG (real_old), reloadreg = tem;
7258
5e03c156
RK
7259 gen_reload (reloadreg, second_reloadreg,
7260 reload_opnum[j], reload_when_needed[j]);
7261 emit_insn ((GEN_FCN (tertiary_icode)
7262 (real_old, reloadreg, third_reloadreg)));
7263 special = 1;
9ad5f9f6 7264 }
5e03c156 7265
1554c2c6 7266 else
5e03c156
RK
7267 /* Copy between the reload regs here and then to
7268 OUT later. */
1554c2c6 7269
5e03c156
RK
7270 gen_reload (reloadreg, second_reloadreg,
7271 reload_opnum[j], reload_when_needed[j]);
1554c2c6 7272 }
32131a9c
RK
7273 }
7274 }
7275#endif
7276
7277 /* Output the last reload insn. */
7278 if (! special)
d7c2e385
L
7279 {
7280 rtx set;
7281
7282 /* Don't output the last reload if OLD is not the dest of
7283 INSN and is in the src and is clobbered by INSN. */
7284 if (! flag_expensive_optimizations
7285 || GET_CODE (old) != REG
7286 || !(set = single_set (insn))
7287 || rtx_equal_p (old, SET_DEST (set))
7288 || !reg_mentioned_p (old, SET_SRC (set))
7289 || !regno_clobbered_p (REGNO (old), insn))
7290 gen_reload (old, reloadreg, reload_opnum[j],
7291 reload_when_needed[j]);
7292 }
32131a9c 7293
32131a9c 7294 /* Look at all insns we emitted, just to be safe. */
546b63fb 7295 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
7296 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7297 {
e6e52be0
R
7298 rtx pat = PATTERN (p);
7299
32131a9c
RK
7300 /* If this output reload doesn't come from a spill reg,
7301 clear any memory of reloaded copies of the pseudo reg.
7302 If this output reload comes from a spill reg,
7303 reg_has_output_reload will make this do nothing. */
e6e52be0
R
7304 note_stores (pat, forget_old_reloads_1);
7305
7306 if (reg_mentioned_p (reload_reg_rtx[j], pat))
7307 {
cb2afeb3 7308 rtx set = single_set (insn);
e6e52be0 7309 if (reload_spill_index[j] < 0
cb2afeb3
R
7310 && set
7311 && SET_SRC (set) == reload_reg_rtx[j])
e6e52be0 7312 {
cb2afeb3 7313 int src = REGNO (SET_SRC (set));
32131a9c 7314
e6e52be0
R
7315 reload_spill_index[j] = src;
7316 SET_HARD_REG_BIT (reg_is_output_reload, src);
7317 if (find_regno_note (insn, REG_DEAD, src))
7318 SET_HARD_REG_BIT (reg_reloaded_died, src);
7319 }
cb2afeb3 7320 if (REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER)
9da46522
R
7321 {
7322 int s = reload_secondary_out_reload[j];
cb2afeb3 7323 set = single_set (p);
9da46522
R
7324 /* If this reload copies only to the secondary reload
7325 register, the secondary reload does the actual
7326 store. */
7327 if (s >= 0 && set == NULL_RTX)
7328 ; /* We can't tell what function the secondary reload
7329 has and where the actual store to the pseudo is
7330 made; leave new_spill_reg_store alone. */
7331 else if (s >= 0
05d10675
BS
7332 && SET_SRC (set) == reload_reg_rtx[j]
7333 && SET_DEST (set) == reload_reg_rtx[s])
9da46522
R
7334 {
7335 /* Usually the next instruction will be the
7336 secondary reload insn; if we can confirm
7337 that it is, setting new_spill_reg_store to
7338 that insn will allow an extra optimization. */
7339 rtx s_reg = reload_reg_rtx[s];
7340 rtx next = NEXT_INSN (p);
7341 reload_out[s] = reload_out[j];
cb2afeb3 7342 reload_out_reg[s] = reload_out_reg[j];
9da46522
R
7343 set = single_set (next);
7344 if (set && SET_SRC (set) == s_reg
7345 && ! new_spill_reg_store[REGNO (s_reg)])
cb2afeb3
R
7346 {
7347 SET_HARD_REG_BIT (reg_is_output_reload,
7348 REGNO (s_reg));
7349 new_spill_reg_store[REGNO (s_reg)] = next;
7350 }
9da46522
R
7351 }
7352 else
cb2afeb3 7353 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = p;
9da46522 7354 }
e6e52be0 7355 }
32131a9c
RK
7356 }
7357
d7e0324f 7358 if (reload_when_needed[j] == RELOAD_OTHER)
befa01b9
JW
7359 {
7360 emit_insns (other_output_reload_insns[reload_opnum[j]]);
7361 other_output_reload_insns[reload_opnum[j]] = get_insns ();
7362 }
7363 else
7364 output_reload_insns[reload_opnum[j]] = get_insns ();
d7e0324f 7365
546b63fb 7366 end_sequence ();
32131a9c 7367 }
32131a9c
RK
7368 }
7369
546b63fb
RK
7370 /* Now write all the insns we made for reloads in the order expected by
7371 the allocation functions. Prior to the insn being reloaded, we write
7372 the following reloads:
7373
7374 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7375
2edc8d65 7376 RELOAD_OTHER reloads.
546b63fb 7377
47c8cf91
ILT
7378 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7379 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7380 RELOAD_FOR_INPUT reload for the operand.
546b63fb 7381
893bc853
RK
7382 RELOAD_FOR_OPADDR_ADDRS reloads.
7383
546b63fb
RK
7384 RELOAD_FOR_OPERAND_ADDRESS reloads.
7385
7386 After the insn being reloaded, we write the following:
7387
47c8cf91
ILT
7388 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7389 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7390 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7391 reloads for the operand. The RELOAD_OTHER output reloads are
7392 output in descending order by reload number. */
546b63fb 7393
c93b03c2
RH
7394 emit_insns_before (other_input_address_reload_insns, insn);
7395 emit_insns_before (other_input_reload_insns, insn);
546b63fb
RK
7396
7397 for (j = 0; j < reload_n_operands; j++)
7398 {
c93b03c2
RH
7399 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7400 emit_insns_before (input_address_reload_insns[j], insn);
7401 emit_insns_before (input_reload_insns[j], insn);
546b63fb
RK
7402 }
7403
c93b03c2
RH
7404 emit_insns_before (other_operand_reload_insns, insn);
7405 emit_insns_before (operand_reload_insns, insn);
546b63fb
RK
7406
7407 for (j = 0; j < reload_n_operands; j++)
7408 {
47c8cf91 7409 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
546b63fb
RK
7410 emit_insns_before (output_address_reload_insns[j], following_insn);
7411 emit_insns_before (output_reload_insns[j], following_insn);
befa01b9 7412 emit_insns_before (other_output_reload_insns[j], following_insn);
c93b03c2
RH
7413 }
7414
7415 /* Keep basic block info up to date. */
7416 if (n_basic_blocks)
7417 {
3b413743 7418 if (BLOCK_HEAD (chain->block) == insn)
05d10675 7419 BLOCK_HEAD (chain->block) = NEXT_INSN (before_insn);
3b413743 7420 if (BLOCK_END (chain->block) == insn)
05d10675 7421 BLOCK_END (chain->block) = PREV_INSN (following_insn);
546b63fb
RK
7422 }
7423
32131a9c
RK
7424 /* For all the spill regs newly reloaded in this instruction,
7425 record what they were reloaded from, so subsequent instructions
d445b551
RK
7426 can inherit the reloads.
7427
7428 Update spill_reg_store for the reloads of this insn.
e9e79d69 7429 Copy the elements that were updated in the loop above. */
32131a9c
RK
7430
7431 for (j = 0; j < n_reloads; j++)
7432 {
7433 register int r = reload_order[j];
7434 register int i = reload_spill_index[r];
7435
78a2bc08 7436 /* If this is a non-inherited input reload from a pseudo, we must
05d10675
BS
7437 clear any memory of a previous store to the same pseudo. Only do
7438 something if there will not be an output reload for the pseudo
7439 being reloaded. */
78a2bc08 7440 if (reload_in_reg[r] != 0
05d10675
BS
7441 && ! (reload_inherited[r] || reload_override_in[r]))
7442 {
7443 rtx reg = reload_in_reg[r];
78a2bc08 7444
05d10675 7445 if (GET_CODE (reg) == SUBREG)
78a2bc08 7446 reg = SUBREG_REG (reg);
05d10675
BS
7447
7448 if (GET_CODE (reg) == REG
78a2bc08
R
7449 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7450 && ! reg_has_output_reload[REGNO (reg)])
7451 {
7452 int nregno = REGNO (reg);
7453
7454 if (reg_last_reload_reg[nregno])
05d10675
BS
7455 {
7456 int last_regno = REGNO (reg_last_reload_reg[nregno]);
78a2bc08 7457
05d10675 7458 if (reg_reloaded_contents[last_regno] == nregno)
78a2bc08 7459 spill_reg_store[last_regno] = 0;
05d10675 7460 }
78a2bc08
R
7461 }
7462 }
05d10675 7463
e6e52be0 7464 /* I is nonneg if this reload used a register.
32131a9c 7465 If reload_reg_rtx[r] is 0, this is an optional reload
51f0c3b7 7466 that we opted to ignore. */
d445b551 7467
51f0c3b7 7468 if (i >= 0 && reload_reg_rtx[r] != 0)
32131a9c 7469 {
32131a9c 7470 int nr
e6e52be0 7471 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r]));
32131a9c 7472 int k;
51f0c3b7
JW
7473 int part_reaches_end = 0;
7474 int all_reaches_end = 1;
32131a9c 7475
51f0c3b7
JW
7476 /* For a multi register reload, we need to check if all or part
7477 of the value lives to the end. */
32131a9c
RK
7478 for (k = 0; k < nr; k++)
7479 {
e6e52be0 7480 if (reload_reg_reaches_end_p (i + k, reload_opnum[r],
51f0c3b7
JW
7481 reload_when_needed[r]))
7482 part_reaches_end = 1;
7483 else
7484 all_reaches_end = 0;
32131a9c
RK
7485 }
7486
51f0c3b7
JW
7487 /* Ignore reloads that don't reach the end of the insn in
7488 entirety. */
7489 if (all_reaches_end)
32131a9c 7490 {
51f0c3b7
JW
7491 /* First, clear out memory of what used to be in this spill reg.
7492 If consecutive registers are used, clear them all. */
d08ea79f 7493
32131a9c 7494 for (k = 0; k < nr; k++)
e6e52be0 7495 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
d08ea79f 7496
51f0c3b7 7497 /* Maybe the spill reg contains a copy of reload_out. */
cb2afeb3
R
7498 if (reload_out[r] != 0
7499 && (GET_CODE (reload_out[r]) == REG
7500#ifdef AUTO_INC_DEC
7501 || ! reload_out_reg[r]
7502#endif
7503 || GET_CODE (reload_out_reg[r]) == REG))
51f0c3b7 7504 {
cb2afeb3
R
7505 rtx out = (GET_CODE (reload_out[r]) == REG
7506 ? reload_out[r]
7507 : reload_out_reg[r]
7508 ? reload_out_reg[r]
7509/* AUTO_INC */ : XEXP (reload_in_reg[r], 0));
7510 register int nregno = REGNO (out);
51f0c3b7
JW
7511 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7512 : HARD_REGNO_NREGS (nregno,
7513 GET_MODE (reload_reg_rtx[r])));
7514
7515 spill_reg_store[i] = new_spill_reg_store[i];
cb2afeb3 7516 spill_reg_stored_to[i] = out;
51f0c3b7
JW
7517 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7518
7519 /* If NREGNO is a hard register, it may occupy more than
05d10675 7520 one register. If it does, say what is in the
51f0c3b7
JW
7521 rest of the registers assuming that both registers
7522 agree on how many words the object takes. If not,
7523 invalidate the subsequent registers. */
7524
7525 if (nregno < FIRST_PSEUDO_REGISTER)
7526 for (k = 1; k < nnr; k++)
7527 reg_last_reload_reg[nregno + k]
7528 = (nr == nnr
38a448ca
RH
7529 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7530 REGNO (reload_reg_rtx[r]) + k)
51f0c3b7
JW
7531 : 0);
7532
7533 /* Now do the inverse operation. */
7534 for (k = 0; k < nr; k++)
7535 {
e6e52be0
R
7536 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7537 reg_reloaded_contents[i + k]
51f0c3b7
JW
7538 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7539 ? nregno
7540 : nregno + k);
e6e52be0
R
7541 reg_reloaded_insn[i + k] = insn;
7542 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
51f0c3b7
JW
7543 }
7544 }
d08ea79f 7545
51f0c3b7
JW
7546 /* Maybe the spill reg contains a copy of reload_in. Only do
7547 something if there will not be an output reload for
7548 the register being reloaded. */
cb2afeb3 7549 else if (reload_out_reg[r] == 0
51f0c3b7
JW
7550 && reload_in[r] != 0
7551 && ((GET_CODE (reload_in[r]) == REG
cb2afeb3 7552 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER
51f0c3b7
JW
7553 && ! reg_has_output_reload[REGNO (reload_in[r])])
7554 || (GET_CODE (reload_in_reg[r]) == REG
cb2afeb3
R
7555 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))
7556 && ! reg_set_p (reload_reg_rtx[r], PATTERN (insn)))
51f0c3b7
JW
7557 {
7558 register int nregno;
7559 int nnr;
d445b551 7560
cb2afeb3
R
7561 if (GET_CODE (reload_in[r]) == REG
7562 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER)
51f0c3b7 7563 nregno = REGNO (reload_in[r]);
cb2afeb3 7564 else if (GET_CODE (reload_in_reg[r]) == REG)
51f0c3b7 7565 nregno = REGNO (reload_in_reg[r]);
cb2afeb3
R
7566 else
7567 nregno = REGNO (XEXP (reload_in_reg[r], 0));
d08ea79f 7568
51f0c3b7
JW
7569 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7570 : HARD_REGNO_NREGS (nregno,
7571 GET_MODE (reload_reg_rtx[r])));
05d10675 7572
51f0c3b7
JW
7573 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7574
7575 if (nregno < FIRST_PSEUDO_REGISTER)
7576 for (k = 1; k < nnr; k++)
7577 reg_last_reload_reg[nregno + k]
7578 = (nr == nnr
38a448ca
RH
7579 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7580 REGNO (reload_reg_rtx[r]) + k)
51f0c3b7
JW
7581 : 0);
7582
7583 /* Unless we inherited this reload, show we haven't
cb2afeb3
R
7584 recently done a store.
7585 Previous stores of inherited auto_inc expressions
7586 also have to be discarded. */
7587 if (! reload_inherited[r]
7588 || (reload_out[r] && ! reload_out_reg[r]))
51f0c3b7
JW
7589 spill_reg_store[i] = 0;
7590
7591 for (k = 0; k < nr; k++)
7592 {
e6e52be0
R
7593 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7594 reg_reloaded_contents[i + k]
51f0c3b7
JW
7595 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7596 ? nregno
7597 : nregno + k);
e6e52be0
R
7598 reg_reloaded_insn[i + k] = insn;
7599 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
51f0c3b7
JW
7600 }
7601 }
7602 }
d445b551 7603
51f0c3b7
JW
7604 /* However, if part of the reload reaches the end, then we must
7605 invalidate the old info for the part that survives to the end. */
7606 else if (part_reaches_end)
7607 {
546b63fb 7608 for (k = 0; k < nr; k++)
e6e52be0 7609 if (reload_reg_reaches_end_p (i + k,
51f0c3b7
JW
7610 reload_opnum[r],
7611 reload_when_needed[r]))
e6e52be0 7612 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
32131a9c
RK
7613 }
7614 }
7615
7616 /* The following if-statement was #if 0'd in 1.34 (or before...).
7617 It's reenabled in 1.35 because supposedly nothing else
7618 deals with this problem. */
7619
7620 /* If a register gets output-reloaded from a non-spill register,
7621 that invalidates any previous reloaded copy of it.
7622 But forget_old_reloads_1 won't get to see it, because
7623 it thinks only about the original insn. So invalidate it here. */
cb2afeb3
R
7624 if (i < 0 && reload_out[r] != 0
7625 && (GET_CODE (reload_out[r]) == REG
7626 || (GET_CODE (reload_out[r]) == MEM
7627 && GET_CODE (reload_out_reg[r]) == REG)))
32131a9c 7628 {
cb2afeb3
R
7629 rtx out = (GET_CODE (reload_out[r]) == REG
7630 ? reload_out[r] : reload_out_reg[r]);
7631 register int nregno = REGNO (out);
c7093272 7632 if (nregno >= FIRST_PSEUDO_REGISTER)
cb2afeb3 7633 {
6a651371 7634 rtx src_reg, store_insn = NULL_RTX;
cb2afeb3
R
7635
7636 reg_last_reload_reg[nregno] = 0;
7637
7638 /* If we can find a hard register that is stored, record
7639 the storing insn so that we may delete this insn with
7640 delete_output_reload. */
7641 src_reg = reload_reg_rtx[r];
7642
7643 /* If this is an optional reload, try to find the source reg
7644 from an input reload. */
7645 if (! src_reg)
7646 {
7647 rtx set = single_set (insn);
3d0ec3b3 7648 if (set && SET_DEST (set) == reload_out[r])
cb2afeb3
R
7649 {
7650 int k;
7651
7652 src_reg = SET_SRC (set);
7653 store_insn = insn;
7654 for (k = 0; k < n_reloads; k++)
7655 {
7656 if (reload_in[k] == src_reg)
7657 {
7658 src_reg = reload_reg_rtx[k];
7659 break;
7660 }
7661 }
7662 }
7663 }
7664 else
7665 store_insn = new_spill_reg_store[REGNO (src_reg)];
7666 if (src_reg && GET_CODE (src_reg) == REG
7667 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7668 {
7669 int src_regno = REGNO (src_reg);
7670 int nr = HARD_REGNO_NREGS (src_regno, reload_mode[r]);
7671 /* The place where to find a death note varies with
7672 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7673 necessarily checked exactly in the code that moves
7674 notes, so just check both locations. */
7675 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7676 if (! note)
7677 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7678 while (nr-- > 0)
7679 {
7680 spill_reg_store[src_regno + nr] = store_insn;
7681 spill_reg_stored_to[src_regno + nr] = out;
7682 reg_reloaded_contents[src_regno + nr] = nregno;
7683 reg_reloaded_insn[src_regno + nr] = store_insn;
00f9f1bc 7684 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
cb2afeb3
R
7685 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7686 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7687 if (note)
7688 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7689 else
7690 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7691 }
7692 reg_last_reload_reg[nregno] = src_reg;
7693 }
7694 }
c7093272
RK
7695 else
7696 {
7697 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
36281332 7698
c7093272
RK
7699 while (num_regs-- > 0)
7700 reg_last_reload_reg[nregno + num_regs] = 0;
7701 }
32131a9c
RK
7702 }
7703 }
e6e52be0 7704 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
32131a9c
RK
7705}
7706\f
5e03c156
RK
7707/* Emit code to perform a reload from IN (which may be a reload register) to
7708 OUT (which may also be a reload register). IN or OUT is from operand
05d10675 7709 OPNUM with reload type TYPE.
546b63fb 7710
3c3eeea6 7711 Returns first insn emitted. */
32131a9c
RK
7712
7713rtx
5e03c156
RK
7714gen_reload (out, in, opnum, type)
7715 rtx out;
32131a9c 7716 rtx in;
546b63fb
RK
7717 int opnum;
7718 enum reload_type type;
32131a9c 7719{
546b63fb 7720 rtx last = get_last_insn ();
7a5b18b0
RK
7721 rtx tem;
7722
7723 /* If IN is a paradoxical SUBREG, remove it and try to put the
7724 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7725 if (GET_CODE (in) == SUBREG
7726 && (GET_MODE_SIZE (GET_MODE (in))
7727 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7728 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7729 in = SUBREG_REG (in), out = tem;
7730 else if (GET_CODE (out) == SUBREG
7731 && (GET_MODE_SIZE (GET_MODE (out))
7732 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7733 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7734 out = SUBREG_REG (out), in = tem;
32131a9c 7735
a8fdc208 7736 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
7737 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7738 register that didn't get a hard register. In that case we can just
7739 call emit_move_insn.
7740
a7fd196c
JW
7741 We can also be asked to reload a PLUS that adds a register or a MEM to
7742 another register, constant or MEM. This can occur during frame pointer
7743 elimination and while reloading addresses. This case is handled by
7744 trying to emit a single insn to perform the add. If it is not valid,
7745 we use a two insn sequence.
32131a9c
RK
7746
7747 Finally, we could be called to handle an 'o' constraint by putting
7748 an address into a register. In that case, we first try to do this
7749 with a named pattern of "reload_load_address". If no such pattern
7750 exists, we just emit a SET insn and hope for the best (it will normally
7751 be valid on machines that use 'o').
7752
7753 This entire process is made complex because reload will never
7754 process the insns we generate here and so we must ensure that
7755 they will fit their constraints and also by the fact that parts of
7756 IN might be being reloaded separately and replaced with spill registers.
7757 Because of this, we are, in some sense, just guessing the right approach
7758 here. The one listed above seems to work.
7759
7760 ??? At some point, this whole thing needs to be rethought. */
7761
7762 if (GET_CODE (in) == PLUS
a7fd196c 7763 && (GET_CODE (XEXP (in, 0)) == REG
5c6b1bd2 7764 || GET_CODE (XEXP (in, 0)) == SUBREG
a7fd196c
JW
7765 || GET_CODE (XEXP (in, 0)) == MEM)
7766 && (GET_CODE (XEXP (in, 1)) == REG
5c6b1bd2 7767 || GET_CODE (XEXP (in, 1)) == SUBREG
a7fd196c
JW
7768 || CONSTANT_P (XEXP (in, 1))
7769 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 7770 {
a7fd196c
JW
7771 /* We need to compute the sum of a register or a MEM and another
7772 register, constant, or MEM, and put it into the reload
3002e160
JW
7773 register. The best possible way of doing this is if the machine
7774 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
7775
7776 The simplest approach is to try to generate such an insn and see if it
7777 is recognized and matches its constraints. If so, it can be used.
7778
7779 It might be better not to actually emit the insn unless it is valid,
0009eff2 7780 but we need to pass the insn as an operand to `recog' and
0eadeb15 7781 `extract_insn' and it is simpler to emit and then delete the insn if
0009eff2 7782 not valid than to dummy things up. */
a8fdc208 7783
af929c62 7784 rtx op0, op1, tem, insn;
32131a9c 7785 int code;
a8fdc208 7786
af929c62
RK
7787 op0 = find_replacement (&XEXP (in, 0));
7788 op1 = find_replacement (&XEXP (in, 1));
7789
32131a9c
RK
7790 /* Since constraint checking is strict, commutativity won't be
7791 checked, so we need to do that here to avoid spurious failure
7792 if the add instruction is two-address and the second operand
7793 of the add is the same as the reload reg, which is frequently
7794 the case. If the insn would be A = B + A, rearrange it so
0f41302f 7795 it will be A = A + B as constrain_operands expects. */
a8fdc208 7796
32131a9c 7797 if (GET_CODE (XEXP (in, 1)) == REG
5e03c156 7798 && REGNO (out) == REGNO (XEXP (in, 1)))
af929c62
RK
7799 tem = op0, op0 = op1, op1 = tem;
7800
7801 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
38a448ca 7802 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
32131a9c 7803
38a448ca 7804 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
32131a9c
RK
7805 code = recog_memoized (insn);
7806
7807 if (code >= 0)
7808 {
0eadeb15 7809 extract_insn (insn);
32131a9c
RK
7810 /* We want constrain operands to treat this insn strictly in
7811 its validity determination, i.e., the way it would after reload
7812 has completed. */
0eadeb15 7813 if (constrain_operands (1))
32131a9c
RK
7814 return insn;
7815 }
7816
546b63fb 7817 delete_insns_since (last);
32131a9c
RK
7818
7819 /* If that failed, we must use a conservative two-insn sequence.
09522f21
FS
7820
7821 Use a move to copy one operand into the reload register. Prefer
7822 to reload a constant, MEM or pseudo since the move patterns can
7823 handle an arbitrary operand. If OP1 is not a constant, MEM or
7824 pseudo and OP1 is not a valid operand for an add instruction, then
7825 reload OP1.
7826
7827 After reloading one of the operands into the reload register, add
7828 the reload register to the output register.
32131a9c
RK
7829
7830 If there is another way to do this for a specific machine, a
7831 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7832 we emit below. */
7833
09522f21
FS
7834 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7835
5c6b1bd2 7836 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
af929c62 7837 || (GET_CODE (op1) == REG
09522f21
FS
7838 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7839 || (code != CODE_FOR_nothing
7840 && ! (*insn_operand_predicate[code][2]) (op1, insn_operand_mode[code][2])))
af929c62 7841 tem = op0, op0 = op1, op1 = tem;
32131a9c 7842
5c6b1bd2 7843 gen_reload (out, op0, opnum, type);
39b56c2a 7844
5e03c156 7845 /* If OP0 and OP1 are the same, we can use OUT for OP1.
39b56c2a
RK
7846 This fixes a problem on the 32K where the stack pointer cannot
7847 be used as an operand of an add insn. */
7848
7849 if (rtx_equal_p (op0, op1))
5e03c156 7850 op1 = out;
39b56c2a 7851
5e03c156 7852 insn = emit_insn (gen_add2_insn (out, op1));
c77c9766
RK
7853
7854 /* If that failed, copy the address register to the reload register.
0f41302f 7855 Then add the constant to the reload register. */
c77c9766
RK
7856
7857 code = recog_memoized (insn);
7858
7859 if (code >= 0)
7860 {
0eadeb15 7861 extract_insn (insn);
c77c9766
RK
7862 /* We want constrain operands to treat this insn strictly in
7863 its validity determination, i.e., the way it would after reload
7864 has completed. */
0eadeb15 7865 if (constrain_operands (1))
4117a96b
R
7866 {
7867 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7868 REG_NOTES (insn)
9e6a5703 7869 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
4117a96b
R
7870 return insn;
7871 }
c77c9766
RK
7872 }
7873
7874 delete_insns_since (last);
7875
5c6b1bd2 7876 gen_reload (out, op1, opnum, type);
4117a96b 7877 insn = emit_insn (gen_add2_insn (out, op0));
9e6a5703 7878 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
32131a9c
RK
7879 }
7880
0dadecf6
RK
7881#ifdef SECONDARY_MEMORY_NEEDED
7882 /* If we need a memory location to do the move, do it that way. */
7883 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5e03c156 7884 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
0dadecf6 7885 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5e03c156
RK
7886 REGNO_REG_CLASS (REGNO (out)),
7887 GET_MODE (out)))
0dadecf6
RK
7888 {
7889 /* Get the memory to use and rewrite both registers to its mode. */
5e03c156 7890 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
0dadecf6 7891
5e03c156 7892 if (GET_MODE (loc) != GET_MODE (out))
38a448ca 7893 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
0dadecf6
RK
7894
7895 if (GET_MODE (loc) != GET_MODE (in))
38a448ca 7896 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
0dadecf6 7897
5c6b1bd2
RK
7898 gen_reload (loc, in, opnum, type);
7899 gen_reload (out, loc, opnum, type);
0dadecf6
RK
7900 }
7901#endif
7902
32131a9c
RK
7903 /* If IN is a simple operand, use gen_move_insn. */
7904 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
5e03c156 7905 emit_insn (gen_move_insn (out, in));
32131a9c
RK
7906
7907#ifdef HAVE_reload_load_address
7908 else if (HAVE_reload_load_address)
5e03c156 7909 emit_insn (gen_reload_load_address (out, in));
32131a9c
RK
7910#endif
7911
5e03c156 7912 /* Otherwise, just write (set OUT IN) and hope for the best. */
32131a9c 7913 else
38a448ca 7914 emit_insn (gen_rtx_SET (VOIDmode, out, in));
32131a9c
RK
7915
7916 /* Return the first insn emitted.
546b63fb 7917 We can not just return get_last_insn, because there may have
32131a9c
RK
7918 been multiple instructions emitted. Also note that gen_move_insn may
7919 emit more than one insn itself, so we can not assume that there is one
7920 insn emitted per emit_insn_before call. */
7921
546b63fb 7922 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
7923}
7924\f
7925/* Delete a previously made output-reload
7926 whose result we now believe is not needed.
7927 First we double-check.
7928
7929 INSN is the insn now being processed.
cb2afeb3
R
7930 LAST_RELOAD_REG is the hard register number for which we want to delete
7931 the last output reload.
7932 J is the reload-number that originally used REG. The caller has made
7933 certain that reload J doesn't use REG any longer for input. */
32131a9c
RK
7934
7935static void
cb2afeb3 7936delete_output_reload (insn, j, last_reload_reg)
32131a9c
RK
7937 rtx insn;
7938 int j;
cb2afeb3 7939 int last_reload_reg;
32131a9c 7940{
cb2afeb3
R
7941 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7942 rtx reg = spill_reg_stored_to[last_reload_reg];
7943 int k;
7944 int n_occurrences;
7945 int n_inherited = 0;
32131a9c 7946 register rtx i1;
cb2afeb3 7947 rtx substed;
05d10675 7948
32131a9c
RK
7949 /* Get the raw pseudo-register referred to. */
7950
32131a9c
RK
7951 while (GET_CODE (reg) == SUBREG)
7952 reg = SUBREG_REG (reg);
cb2afeb3
R
7953 substed = reg_equiv_memory_loc[REGNO (reg)];
7954
7955 /* This is unsafe if the operand occurs more often in the current
7956 insn than it is inherited. */
7957 for (k = n_reloads - 1; k >= 0; k--)
7958 {
7959 rtx reg2 = reload_in[k];
7960 if (! reg2)
7961 continue;
7962 if (GET_CODE (reg2) == MEM || reload_override_in[k])
7963 reg2 = reload_in_reg[k];
7964#ifdef AUTO_INC_DEC
7965 if (reload_out[k] && ! reload_out_reg[k])
7966 reg2 = XEXP (reload_in_reg[k], 0);
7967#endif
7968 while (GET_CODE (reg2) == SUBREG)
7969 reg2 = SUBREG_REG (reg2);
7970 if (rtx_equal_p (reg2, reg))
2eb6dac7
AS
7971 {
7972 if (reload_inherited[k] || reload_override_in[k] || k == j)
7973 {
cb2afeb3 7974 n_inherited++;
2eb6dac7
AS
7975 reg2 = reload_out_reg[k];
7976 if (! reg2)
7977 continue;
7978 while (GET_CODE (reg2) == SUBREG)
7979 reg2 = XEXP (reg2, 0);
7980 if (rtx_equal_p (reg2, reg))
7981 n_inherited++;
7982 }
7983 else
7984 return;
7985 }
cb2afeb3
R
7986 }
7987 n_occurrences = count_occurrences (PATTERN (insn), reg);
7988 if (substed)
7989 n_occurrences += count_occurrences (PATTERN (insn), substed);
7990 if (n_occurrences > n_inherited)
7991 return;
32131a9c
RK
7992
7993 /* If the pseudo-reg we are reloading is no longer referenced
7994 anywhere between the store into it and here,
7995 and no jumps or labels intervene, then the value can get
7996 here through the reload reg alone.
7997 Otherwise, give up--return. */
7998 for (i1 = NEXT_INSN (output_reload_insn);
7999 i1 != insn; i1 = NEXT_INSN (i1))
8000 {
8001 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
8002 return;
8003 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
8004 && reg_mentioned_p (reg, PATTERN (i1)))
aa6498c2 8005 {
cb2afeb3
R
8006 /* If this is USE in front of INSN, we only have to check that
8007 there are no more references than accounted for by inheritance. */
8008 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
aa6498c2 8009 {
cb2afeb3 8010 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
aa6498c2
R
8011 i1 = NEXT_INSN (i1);
8012 }
cb2afeb3 8013 if (n_occurrences <= n_inherited && i1 == insn)
aa6498c2
R
8014 break;
8015 return;
8016 }
32131a9c
RK
8017 }
8018
aa6498c2
R
8019 /* The caller has already checked that REG dies or is set in INSN.
8020 It has also checked that we are optimizing, and thus some inaccurancies
8021 in the debugging information are acceptable.
8022 So we could just delete output_reload_insn.
8023 But in some cases we can improve the debugging information without
8024 sacrificing optimization - maybe even improving the code:
8025 See if the pseudo reg has been completely replaced
32131a9c
RK
8026 with reload regs. If so, delete the store insn
8027 and forget we had a stack slot for the pseudo. */
aa6498c2
R
8028 if (reload_out[j] != reload_in[j]
8029 && REG_N_DEATHS (REGNO (reg)) == 1
a3a24aa6 8030 && REG_N_SETS (REGNO (reg)) == 1
aa6498c2
R
8031 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
8032 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
32131a9c
RK
8033 {
8034 rtx i2;
8035
8036 /* We know that it was used only between here
8037 and the beginning of the current basic block.
8038 (We also know that the last use before INSN was
8039 the output reload we are thinking of deleting, but never mind that.)
8040 Search that range; see if any ref remains. */
8041 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8042 {
d445b551
RK
8043 rtx set = single_set (i2);
8044
32131a9c
RK
8045 /* Uses which just store in the pseudo don't count,
8046 since if they are the only uses, they are dead. */
d445b551 8047 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
8048 continue;
8049 if (GET_CODE (i2) == CODE_LABEL
8050 || GET_CODE (i2) == JUMP_INSN)
8051 break;
8052 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
8053 && reg_mentioned_p (reg, PATTERN (i2)))
aa6498c2
R
8054 {
8055 /* Some other ref remains; just delete the output reload we
8056 know to be dead. */
cb2afeb3
R
8057 delete_address_reloads (output_reload_insn, insn);
8058 PUT_CODE (output_reload_insn, NOTE);
8059 NOTE_SOURCE_FILE (output_reload_insn) = 0;
8060 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
aa6498c2
R
8061 return;
8062 }
32131a9c
RK
8063 }
8064
8065 /* Delete the now-dead stores into this pseudo. */
8066 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
8067 {
d445b551
RK
8068 rtx set = single_set (i2);
8069
8070 if (set != 0 && SET_DEST (set) == reg)
5507b94b 8071 {
cb2afeb3 8072 delete_address_reloads (i2, insn);
5507b94b
RK
8073 /* This might be a basic block head,
8074 thus don't use delete_insn. */
8075 PUT_CODE (i2, NOTE);
8076 NOTE_SOURCE_FILE (i2) = 0;
8077 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
8078 }
32131a9c
RK
8079 if (GET_CODE (i2) == CODE_LABEL
8080 || GET_CODE (i2) == JUMP_INSN)
8081 break;
8082 }
8083
8084 /* For the debugging info,
8085 say the pseudo lives in this reload reg. */
8086 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
8087 alter_reg (REGNO (reg), -1);
8088 }
cb2afeb3
R
8089 delete_address_reloads (output_reload_insn, insn);
8090 PUT_CODE (output_reload_insn, NOTE);
8091 NOTE_SOURCE_FILE (output_reload_insn) = 0;
8092 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
8093
8094}
8095
8096/* We are going to delete DEAD_INSN. Recursively delete loads of
8097 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
8098 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
8099static void
8100delete_address_reloads (dead_insn, current_insn)
8101 rtx dead_insn, current_insn;
8102{
8103 rtx set = single_set (dead_insn);
8104 rtx set2, dst, prev, next;
8105 if (set)
8106 {
8107 rtx dst = SET_DEST (set);
8108 if (GET_CODE (dst) == MEM)
8109 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
8110 }
8111 /* If we deleted the store from a reloaded post_{in,de}c expression,
8112 we can delete the matching adds. */
8113 prev = PREV_INSN (dead_insn);
8114 next = NEXT_INSN (dead_insn);
8115 if (! prev || ! next)
8116 return;
8117 set = single_set (next);
8118 set2 = single_set (prev);
8119 if (! set || ! set2
8120 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8121 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8122 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8123 return;
8124 dst = SET_DEST (set);
8125 if (! rtx_equal_p (dst, SET_DEST (set2))
8126 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8127 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8128 || (INTVAL (XEXP (SET_SRC (set), 1))
8129 != - INTVAL (XEXP (SET_SRC (set2), 1))))
8130 return;
8131 delete_insn (prev);
8132 delete_insn (next);
8133}
8134
8135/* Subfunction of delete_address_reloads: process registers found in X. */
8136static void
8137delete_address_reloads_1 (dead_insn, x, current_insn)
8138 rtx dead_insn, x, current_insn;
8139{
8140 rtx prev, set, dst, i2;
8141 int i, j;
8142 enum rtx_code code = GET_CODE (x);
8143
8144 if (code != REG)
8145 {
6f7d635c 8146 const char *fmt= GET_RTX_FORMAT (code);
cb2afeb3
R
8147 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8148 {
8149 if (fmt[i] == 'e')
8150 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8151 else if (fmt[i] == 'E')
8152 {
8153 for (j = XVECLEN (x, i) - 1; j >=0; j--)
8154 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8155 current_insn);
8156 }
8157 }
8158 return;
8159 }
8160
8161 if (spill_reg_order[REGNO (x)] < 0)
8162 return;
aa6498c2 8163
cb2afeb3
R
8164 /* Scan backwards for the insn that sets x. This might be a way back due
8165 to inheritance. */
8166 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8167 {
8168 code = GET_CODE (prev);
8169 if (code == CODE_LABEL || code == JUMP_INSN)
8170 return;
8171 if (GET_RTX_CLASS (code) != 'i')
8172 continue;
8173 if (reg_set_p (x, PATTERN (prev)))
8174 break;
8175 if (reg_referenced_p (x, PATTERN (prev)))
8176 return;
8177 }
8178 if (! prev || INSN_UID (prev) < reload_first_uid)
8179 return;
8180 /* Check that PREV only sets the reload register. */
8181 set = single_set (prev);
8182 if (! set)
8183 return;
8184 dst = SET_DEST (set);
8185 if (GET_CODE (dst) != REG
8186 || ! rtx_equal_p (dst, x))
8187 return;
8188 if (! reg_set_p (dst, PATTERN (dead_insn)))
8189 {
8190 /* Check if DST was used in a later insn -
8191 it might have been inherited. */
8192 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8193 {
8194 if (GET_CODE (i2) == CODE_LABEL)
8195 break;
8196 if (GET_RTX_CLASS (GET_CODE (i2)) != 'i')
8197 continue;
8198 if (reg_referenced_p (dst, PATTERN (i2)))
8199 {
8200 /* If there is a reference to the register in the current insn,
8201 it might be loaded in a non-inherited reload. If no other
8202 reload uses it, that means the register is set before
8203 referenced. */
8204 if (i2 == current_insn)
8205 {
8206 for (j = n_reloads - 1; j >= 0; j--)
8207 if ((reload_reg_rtx[j] == dst && reload_inherited[j])
8208 || reload_override_in[j] == dst)
8209 return;
8210 for (j = n_reloads - 1; j >= 0; j--)
8211 if (reload_in[j] && reload_reg_rtx[j] == dst)
8212 break;
8213 if (j >= 0)
8214 break;
8215 }
8216 return;
8217 }
8218 if (GET_CODE (i2) == JUMP_INSN)
8219 break;
cb2afeb3 8220 /* If DST is still live at CURRENT_INSN, check if it is used for
3900dc09
R
8221 any reload. Note that even if CURRENT_INSN sets DST, we still
8222 have to check the reloads. */
cb2afeb3
R
8223 if (i2 == current_insn)
8224 {
8225 for (j = n_reloads - 1; j >= 0; j--)
8226 if ((reload_reg_rtx[j] == dst && reload_inherited[j])
8227 || reload_override_in[j] == dst)
8228 return;
8229 /* ??? We can't finish the loop here, because dst might be
8230 allocated to a pseudo in this block if no reload in this
8231 block needs any of the clsses containing DST - see
8232 spill_hard_reg. There is no easy way to tell this, so we
8233 have to scan till the end of the basic block. */
8234 }
3900dc09
R
8235 if (reg_set_p (dst, PATTERN (i2)))
8236 break;
cb2afeb3
R
8237 }
8238 }
8239 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8240 reg_reloaded_contents[REGNO (dst)] = -1;
8241 /* Can't use delete_insn here because PREV might be a basic block head. */
8242 PUT_CODE (prev, NOTE);
8243 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED;
8244 NOTE_SOURCE_FILE (prev) = 0;
32131a9c 8245}
32131a9c 8246\f
a8fdc208 8247/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 8248 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
8249 is a register or memory location;
8250 so reloading involves incrementing that location.
cb2afeb3 8251 IN is either identical to VALUE, or some cheaper place to reload from.
32131a9c
RK
8252
8253 INC_AMOUNT is the number to increment or decrement by (always positive).
cb2afeb3 8254 This cannot be deduced from VALUE.
32131a9c 8255
cb2afeb3
R
8256 Return the instruction that stores into RELOADREG. */
8257
8258static rtx
8259inc_for_reload (reloadreg, in, value, inc_amount)
32131a9c 8260 rtx reloadreg;
cb2afeb3 8261 rtx in, value;
32131a9c 8262 int inc_amount;
32131a9c
RK
8263{
8264 /* REG or MEM to be copied and incremented. */
8265 rtx incloc = XEXP (value, 0);
8266 /* Nonzero if increment after copying. */
8267 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 8268 rtx last;
0009eff2
RK
8269 rtx inc;
8270 rtx add_insn;
8271 int code;
cb2afeb3
R
8272 rtx store;
8273 rtx real_in = in == value ? XEXP (in, 0) : in;
32131a9c
RK
8274
8275 /* No hard register is equivalent to this register after
8276 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
8277 we could inc/dec that register as well (maybe even using it for
8278 the source), but I'm not sure it's worth worrying about. */
8279 if (GET_CODE (incloc) == REG)
8280 reg_last_reload_reg[REGNO (incloc)] = 0;
8281
8282 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8283 inc_amount = - inc_amount;
8284
fb3821f7 8285 inc = GEN_INT (inc_amount);
0009eff2
RK
8286
8287 /* If this is post-increment, first copy the location to the reload reg. */
cb2afeb3
R
8288 if (post && real_in != reloadreg)
8289 emit_insn (gen_move_insn (reloadreg, real_in));
0009eff2 8290
cb2afeb3
R
8291 if (in == value)
8292 {
8293 /* See if we can directly increment INCLOC. Use a method similar to
8294 that in gen_reload. */
0009eff2 8295
cb2afeb3
R
8296 last = get_last_insn ();
8297 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8298 gen_rtx_PLUS (GET_MODE (incloc),
8299 incloc, inc)));
05d10675 8300
cb2afeb3
R
8301 code = recog_memoized (add_insn);
8302 if (code >= 0)
32131a9c 8303 {
0eadeb15
BS
8304 extract_insn (add_insn);
8305 if (constrain_operands (1))
cb2afeb3
R
8306 {
8307 /* If this is a pre-increment and we have incremented the value
8308 where it lives, copy the incremented value to RELOADREG to
8309 be used as an address. */
0009eff2 8310
cb2afeb3
R
8311 if (! post)
8312 emit_insn (gen_move_insn (reloadreg, incloc));
546b63fb 8313
cb2afeb3
R
8314 return add_insn;
8315 }
32131a9c 8316 }
cb2afeb3 8317 delete_insns_since (last);
32131a9c 8318 }
0009eff2 8319
0009eff2
RK
8320 /* If couldn't do the increment directly, must increment in RELOADREG.
8321 The way we do this depends on whether this is pre- or post-increment.
8322 For pre-increment, copy INCLOC to the reload register, increment it
8323 there, then save back. */
8324
8325 if (! post)
8326 {
cb2afeb3
R
8327 if (in != reloadreg)
8328 emit_insn (gen_move_insn (reloadreg, real_in));
546b63fb 8329 emit_insn (gen_add2_insn (reloadreg, inc));
cb2afeb3 8330 store = emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 8331 }
32131a9c
RK
8332 else
8333 {
0009eff2
RK
8334 /* Postincrement.
8335 Because this might be a jump insn or a compare, and because RELOADREG
8336 may not be available after the insn in an input reload, we must do
8337 the incrementation before the insn being reloaded for.
8338
cb2afeb3 8339 We have already copied IN to RELOADREG. Increment the copy in
0009eff2
RK
8340 RELOADREG, save that back, then decrement RELOADREG so it has
8341 the original value. */
8342
546b63fb 8343 emit_insn (gen_add2_insn (reloadreg, inc));
cb2afeb3 8344 store = emit_insn (gen_move_insn (incloc, reloadreg));
546b63fb 8345 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 8346 }
0009eff2 8347
cb2afeb3 8348 return store;
32131a9c
RK
8349}
8350\f
8351/* Return 1 if we are certain that the constraint-string STRING allows
8352 the hard register REG. Return 0 if we can't be sure of this. */
8353
8354static int
8355constraint_accepts_reg_p (string, reg)
9b3142b3 8356 const char *string;
32131a9c
RK
8357 rtx reg;
8358{
8359 int value = 0;
8360 int regno = true_regnum (reg);
8361 int c;
8362
8363 /* Initialize for first alternative. */
8364 value = 0;
8365 /* Check that each alternative contains `g' or `r'. */
8366 while (1)
8367 switch (c = *string++)
8368 {
8369 case 0:
8370 /* If an alternative lacks `g' or `r', we lose. */
8371 return value;
8372 case ',':
8373 /* If an alternative lacks `g' or `r', we lose. */
8374 if (value == 0)
8375 return 0;
8376 /* Initialize for next alternative. */
8377 value = 0;
8378 break;
8379 case 'g':
8380 case 'r':
8381 /* Any general reg wins for this alternative. */
8382 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8383 value = 1;
8384 break;
8385 default:
8386 /* Any reg in specified class wins for this alternative. */
8387 {
0009eff2 8388 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 8389
0009eff2 8390 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
8391 value = 1;
8392 }
8393 }
8394}
8395\f
d445b551
RK
8396/* Return the number of places FIND appears within X, but don't count
8397 an occurrence if some SET_DEST is FIND. */
32131a9c 8398
184bb750 8399int
32131a9c
RK
8400count_occurrences (x, find)
8401 register rtx x, find;
8402{
8403 register int i, j;
8404 register enum rtx_code code;
6f7d635c 8405 register const char *format_ptr;
32131a9c
RK
8406 int count;
8407
8408 if (x == find)
8409 return 1;
8410 if (x == 0)
8411 return 0;
8412
8413 code = GET_CODE (x);
8414
8415 switch (code)
8416 {
8417 case REG:
8418 case QUEUED:
8419 case CONST_INT:
8420 case CONST_DOUBLE:
8421 case SYMBOL_REF:
8422 case CODE_LABEL:
8423 case PC:
8424 case CC0:
8425 return 0;
d445b551 8426
cb2afeb3
R
8427 case MEM:
8428 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
8429 return 1;
8430 break;
d445b551
RK
8431 case SET:
8432 if (SET_DEST (x) == find)
8433 return count_occurrences (SET_SRC (x), find);
8434 break;
05d10675 8435
e9a25f70
JL
8436 default:
8437 break;
32131a9c
RK
8438 }
8439
8440 format_ptr = GET_RTX_FORMAT (code);
8441 count = 0;
8442
8443 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8444 {
8445 switch (*format_ptr++)
8446 {
8447 case 'e':
8448 count += count_occurrences (XEXP (x, i), find);
8449 break;
8450
8451 case 'E':
8452 if (XVEC (x, i) != NULL)
8453 {
8454 for (j = 0; j < XVECLEN (x, i); j++)
8455 count += count_occurrences (XVECEXP (x, i, j), find);
8456 }
8457 break;
8458 }
8459 }
8460 return count;
8461}
2a9fb548
ILT
8462\f
8463/* This array holds values which are equivalent to a hard register
8464 during reload_cse_regs. Each array element is an EXPR_LIST of
8465 values. Each time a hard register is set, we set the corresponding
8466 array element to the value. Each time a hard register is copied
8467 into memory, we add the memory location to the corresponding array
8468 element. We don't store values or memory addresses with side
8469 effects in this array.
8470
8471 If the value is a CONST_INT, then the mode of the containing
8472 EXPR_LIST is the mode in which that CONST_INT was referenced.
8473
8474 We sometimes clobber a specific entry in a list. In that case, we
8475 just set XEXP (list-entry, 0) to 0. */
8476
8477static rtx *reg_values;
8478
ba325eba
ILT
8479/* This is a preallocated REG rtx which we use as a temporary in
8480 reload_cse_invalidate_regno, so that we don't need to allocate a
8481 new one each time through a loop in that function. */
8482
8483static rtx invalidate_regno_rtx;
8484
2a9fb548
ILT
8485/* Invalidate any entries in reg_values which depend on REGNO,
8486 including those for REGNO itself. This is called if REGNO is
8487 changing. If CLOBBER is true, then always forget anything we
8488 currently know about REGNO. MODE is the mode of the assignment to
8489 REGNO, which is used to determine how many hard registers are being
8490 changed. If MODE is VOIDmode, then only REGNO is being changed;
8491 this is used when invalidating call clobbered registers across a
8492 call. */
8493
8494static void
8495reload_cse_invalidate_regno (regno, mode, clobber)
8496 int regno;
8497 enum machine_mode mode;
8498 int clobber;
8499{
8500 int endregno;
8501 register int i;
8502
8503 /* Our callers don't always go through true_regnum; we may see a
8504 pseudo-register here from a CLOBBER or the like. We probably
8505 won't ever see a pseudo-register that has a real register number,
8506 for we check anyhow for safety. */
8507 if (regno >= FIRST_PSEUDO_REGISTER)
8508 regno = reg_renumber[regno];
8509 if (regno < 0)
8510 return;
8511
8512 if (mode == VOIDmode)
8513 endregno = regno + 1;
8514 else
8515 endregno = regno + HARD_REGNO_NREGS (regno, mode);
8516
8517 if (clobber)
8518 for (i = regno; i < endregno; i++)
8519 reg_values[i] = 0;
8520
8521 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8522 {
8523 rtx x;
8524
8525 for (x = reg_values[i]; x; x = XEXP (x, 1))
8526 {
8527 if (XEXP (x, 0) != 0
9e148ceb 8528 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
2a9fb548
ILT
8529 {
8530 /* If this is the only entry on the list, clear
05d10675
BS
8531 reg_values[i]. Otherwise, just clear this entry on
8532 the list. */
2a9fb548
ILT
8533 if (XEXP (x, 1) == 0 && x == reg_values[i])
8534 {
8535 reg_values[i] = 0;
8536 break;
8537 }
8538 XEXP (x, 0) = 0;
8539 }
8540 }
8541 }
ba325eba
ILT
8542
8543 /* We must look at earlier registers, in case REGNO is part of a
8544 multi word value but is not the first register. If an earlier
8545 register has a value in a mode which overlaps REGNO, then we must
8546 invalidate that earlier register. Note that we do not need to
8547 check REGNO or later registers (we must not check REGNO itself,
8548 because we would incorrectly conclude that there was a conflict). */
8549
8550 for (i = 0; i < regno; i++)
8551 {
8552 rtx x;
8553
8554 for (x = reg_values[i]; x; x = XEXP (x, 1))
8555 {
8556 if (XEXP (x, 0) != 0)
8557 {
dbd7556e 8558 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
ba325eba
ILT
8559 REGNO (invalidate_regno_rtx) = i;
8560 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
8561 NULL_PTR))
8562 {
8563 reload_cse_invalidate_regno (i, VOIDmode, 1);
8564 break;
8565 }
8566 }
8567 }
8568 }
2a9fb548
ILT
8569}
8570
866aa3b6
DE
8571/* The memory at address MEM_BASE is being changed.
8572 Return whether this change will invalidate VAL. */
2a9fb548
ILT
8573
8574static int
cbfc3ad3 8575reload_cse_mem_conflict_p (mem_base, val)
2a9fb548 8576 rtx mem_base;
2a9fb548
ILT
8577 rtx val;
8578{
8579 enum rtx_code code;
6f7d635c 8580 const char *fmt;
2a9fb548
ILT
8581 int i;
8582
8583 code = GET_CODE (val);
8584 switch (code)
8585 {
8586 /* Get rid of a few simple cases quickly. */
8587 case REG:
2a9fb548
ILT
8588 case PC:
8589 case CC0:
8590 case SCRATCH:
8591 case CONST:
8592 case CONST_INT:
8593 case CONST_DOUBLE:
8594 case SYMBOL_REF:
8595 case LABEL_REF:
8596 return 0;
8597
8598 case MEM:
866aa3b6
DE
8599 if (GET_MODE (mem_base) == BLKmode
8600 || GET_MODE (val) == BLKmode)
8601 return 1;
e9a25f70
JL
8602 if (anti_dependence (val, mem_base))
8603 return 1;
8604 /* The address may contain nested MEMs. */
8605 break;
2a9fb548
ILT
8606
8607 default:
8608 break;
8609 }
8610
8611 fmt = GET_RTX_FORMAT (code);
8612
8613 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8614 {
8615 if (fmt[i] == 'e')
8616 {
cbfc3ad3 8617 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
2a9fb548
ILT
8618 return 1;
8619 }
8620 else if (fmt[i] == 'E')
8621 {
8622 int j;
8623
8624 for (j = 0; j < XVECLEN (val, i); j++)
cbfc3ad3 8625 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
2a9fb548
ILT
8626 return 1;
8627 }
8628 }
8629
8630 return 0;
8631}
8632
8633/* Invalidate any entries in reg_values which are changed because of a
8634 store to MEM_RTX. If this is called because of a non-const call
8635 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8636
8637static void
8638reload_cse_invalidate_mem (mem_rtx)
8639 rtx mem_rtx;
8640{
8641 register int i;
2a9fb548
ILT
8642
8643 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8644 {
8645 rtx x;
8646
8647 for (x = reg_values[i]; x; x = XEXP (x, 1))
8648 {
8649 if (XEXP (x, 0) != 0
cbfc3ad3 8650 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
2a9fb548
ILT
8651 {
8652 /* If this is the only entry on the list, clear
05d10675
BS
8653 reg_values[i]. Otherwise, just clear this entry on
8654 the list. */
2a9fb548
ILT
8655 if (XEXP (x, 1) == 0 && x == reg_values[i])
8656 {
8657 reg_values[i] = 0;
8658 break;
8659 }
8660 XEXP (x, 0) = 0;
8661 }
8662 }
8663 }
8664}
8665
8666/* Invalidate DEST, which is being assigned to or clobbered. The
8667 second parameter exists so that this function can be passed to
8668 note_stores; it is ignored. */
8669
8670static void
8671reload_cse_invalidate_rtx (dest, ignore)
8672 rtx dest;
487a6e06 8673 rtx ignore ATTRIBUTE_UNUSED;
2a9fb548
ILT
8674{
8675 while (GET_CODE (dest) == STRICT_LOW_PART
8676 || GET_CODE (dest) == SIGN_EXTRACT
8677 || GET_CODE (dest) == ZERO_EXTRACT
8678 || GET_CODE (dest) == SUBREG)
8679 dest = XEXP (dest, 0);
8680
8681 if (GET_CODE (dest) == REG)
8682 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8683 else if (GET_CODE (dest) == MEM)
8684 reload_cse_invalidate_mem (dest);
8685}
8686
8687/* Do a very simple CSE pass over the hard registers.
8688
8689 This function detects no-op moves where we happened to assign two
8690 different pseudo-registers to the same hard register, and then
8691 copied one to the other. Reload will generate a useless
8692 instruction copying a register to itself.
8693
8694 This function also detects cases where we load a value from memory
8695 into two different registers, and (if memory is more expensive than
8696 registers) changes it to simply copy the first register into the
05d10675 8697 second register.
e9a25f70
JL
8698
8699 Another optimization is performed that scans the operands of each
8700 instruction to see whether the value is already available in a
8701 hard register. It then replaces the operand with the hard register
8702 if possible, much like an optional reload would. */
2a9fb548 8703
5adf6da0
R
8704static void
8705reload_cse_regs_1 (first)
2a9fb548
ILT
8706 rtx first;
8707{
8708 char *firstobj;
8709 rtx callmem;
8710 register int i;
8711 rtx insn;
8712
cbfc3ad3
RK
8713 init_alias_analysis ();
8714
2a9fb548 8715 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
e016950d 8716 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx));
2a9fb548
ILT
8717
8718 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8719 free them when we are done. */
8720 push_obstacks (&reload_obstack, &reload_obstack);
8721 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8722
8723 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8724 memory for a non-const call instruction. */
38a448ca 8725 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
2a9fb548 8726
ba325eba
ILT
8727 /* This is used in reload_cse_invalidate_regno to avoid consing a
8728 new REG in a loop in that function. */
38a448ca 8729 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
ba325eba 8730
2a9fb548
ILT
8731 for (insn = first; insn; insn = NEXT_INSN (insn))
8732 {
8733 rtx body;
8734
8735 if (GET_CODE (insn) == CODE_LABEL)
8736 {
8737 /* Forget all the register values at a code label. We don't
05d10675 8738 try to do anything clever around jumps. */
2a9fb548
ILT
8739 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8740 reg_values[i] = 0;
8741
8742 continue;
8743 }
8744
05d10675 8745#ifdef NON_SAVING_SETJMP
2a9fb548
ILT
8746 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8747 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8748 {
8749 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8750 reg_values[i] = 0;
8751
8752 continue;
8753 }
8754#endif
8755
8756 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8757 continue;
8758
8759 /* If this is a call instruction, forget anything stored in a
8760 call clobbered register, or, if this is not a const call, in
8761 memory. */
8762 if (GET_CODE (insn) == CALL_INSN)
8763 {
8764 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8765 if (call_used_regs[i])
8766 reload_cse_invalidate_regno (i, VOIDmode, 1);
8767
8768 if (! CONST_CALL_P (insn))
8769 reload_cse_invalidate_mem (callmem);
8770 }
8771
05d10675 8772
01e752d3
JL
8773 /* Forget all the register values at a volatile asm. */
8774 if (GET_CODE (insn) == INSN
8775 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
8776 && MEM_VOLATILE_P (PATTERN (insn)))
8777 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8778 reg_values[i] = 0;
8779
2a9fb548
ILT
8780 body = PATTERN (insn);
8781 if (GET_CODE (body) == SET)
8782 {
e9a25f70 8783 int count = 0;
31418d35 8784 if (reload_cse_noop_set_p (body, insn))
2a9fb548 8785 {
54e89d25
R
8786 /* If this sets the return value of the function, we must keep
8787 a USE around, in case this is in a different basic block
8788 than the final USE. Otherwise, we could loose important
8789 register lifeness information on SMALL_REGISTER_CLASSES
8790 machines, where return registers might be used as spills:
8791 subsequent passes assume that spill registers are dead at
8792 the end of a basic block. */
8793 if (REG_FUNCTION_VALUE_P (SET_DEST (body)))
8794 {
8795 pop_obstacks ();
8796 PATTERN (insn) = gen_rtx_USE (VOIDmode, SET_DEST (body));
8797 INSN_CODE (insn) = -1;
8798 REG_NOTES (insn) = NULL_RTX;
8799 push_obstacks (&reload_obstack, &reload_obstack);
8800 }
8801 else
8802 {
8803 PUT_CODE (insn, NOTE);
8804 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8805 NOTE_SOURCE_FILE (insn) = 0;
8806 }
2a9fb548
ILT
8807
8808 /* We're done with this insn. */
8809 continue;
8810 }
8811
e9a25f70 8812 /* It's not a no-op, but we can try to simplify it. */
e9a25f70
JL
8813 count += reload_cse_simplify_set (body, insn);
8814
6764d250
BS
8815 if (count > 0)
8816 apply_change_group ();
121315ea 8817 else
6764d250 8818 reload_cse_simplify_operands (insn);
05d10675 8819
2a9fb548
ILT
8820 reload_cse_record_set (body, body);
8821 }
8822 else if (GET_CODE (body) == PARALLEL)
8823 {
e9a25f70 8824 int count = 0;
54e89d25 8825 rtx value = NULL_RTX;
2a9fb548
ILT
8826
8827 /* If every action in a PARALLEL is a noop, we can delete
05d10675 8828 the entire PARALLEL. */
2a9fb548 8829 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
54e89d25
R
8830 {
8831 rtx part = XVECEXP (body, 0, i);
8832 if (GET_CODE (part) == SET)
8833 {
8834 if (! reload_cse_noop_set_p (part, insn))
8835 break;
8836 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8837 {
8838 if (value)
8839 break;
8840 value = SET_DEST (part);
8841 }
8842 }
8843 else if (GET_CODE (part) != CLOBBER)
8844 break;
8845 }
2a9fb548
ILT
8846 if (i < 0)
8847 {
54e89d25
R
8848 if (value)
8849 {
8850 pop_obstacks ();
8851 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8852 INSN_CODE (insn) = -1;
8853 REG_NOTES (insn) = NULL_RTX;
8854 push_obstacks (&reload_obstack, &reload_obstack);
8855 }
8856 else
8857 {
8858 PUT_CODE (insn, NOTE);
8859 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8860 NOTE_SOURCE_FILE (insn) = 0;
8861 }
2a9fb548
ILT
8862
8863 /* We're done with this insn. */
8864 continue;
8865 }
05d10675 8866
e9a25f70 8867 /* It's not a no-op, but we can try to simplify it. */
e9a25f70
JL
8868 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8869 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8870 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8871
6764d250
BS
8872 if (count > 0)
8873 apply_change_group ();
121315ea 8874 else
6764d250 8875 reload_cse_simplify_operands (insn);
2a9fb548
ILT
8876
8877 /* Look through the PARALLEL and record the values being
05d10675 8878 set, if possible. Also handle any CLOBBERs. */
2a9fb548
ILT
8879 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8880 {
8881 rtx x = XVECEXP (body, 0, i);
8882
8883 if (GET_CODE (x) == SET)
8884 reload_cse_record_set (x, body);
8885 else
8886 note_stores (x, reload_cse_invalidate_rtx);
8887 }
8888 }
8889 else
8890 note_stores (body, reload_cse_invalidate_rtx);
8891
8892#ifdef AUTO_INC_DEC
8893 /* Clobber any registers which appear in REG_INC notes. We
05d10675
BS
8894 could keep track of the changes to their values, but it is
8895 unlikely to help. */
2a9fb548
ILT
8896 {
8897 rtx x;
8898
8899 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8900 if (REG_NOTE_KIND (x) == REG_INC)
8901 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8902 }
8903#endif
8904
8905 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
05d10675 8906 after we have processed the insn. */
2a9fb548
ILT
8907 if (GET_CODE (insn) == CALL_INSN)
8908 {
8909 rtx x;
8910
8911 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8912 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8913 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8914 }
8915 }
8916
8917 /* Free all the temporary structures we created, and go back to the
8918 regular obstacks. */
8919 obstack_free (&reload_obstack, firstobj);
8920 pop_obstacks ();
8921}
8922
5adf6da0
R
8923/* Call cse / combine like post-reload optimization phases.
8924 FIRST is the first instruction. */
8925void
8926reload_cse_regs (first)
8927 rtx first;
8928{
8929 reload_cse_regs_1 (first);
8930 reload_combine ();
8931 reload_cse_move2add (first);
8932 if (flag_expensive_optimizations)
8933 reload_cse_regs_1 (first);
8934}
8935
2a9fb548
ILT
8936/* Return whether the values known for REGNO are equal to VAL. MODE
8937 is the mode of the object that VAL is being copied to; this matters
8938 if VAL is a CONST_INT. */
8939
8940static int
8941reload_cse_regno_equal_p (regno, val, mode)
8942 int regno;
8943 rtx val;
8944 enum machine_mode mode;
8945{
8946 rtx x;
8947
8948 if (val == 0)
8949 return 0;
8950
8951 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8952 if (XEXP (x, 0) != 0
8953 && rtx_equal_p (XEXP (x, 0), val)
bb173ade
RK
8954 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM
8955 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2a9fb548
ILT
8956 && (GET_CODE (val) != CONST_INT
8957 || mode == GET_MODE (x)
8958 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
6e848450
RK
8959 /* On a big endian machine if the value spans more than
8960 one register then this register holds the high part of
8961 it and we can't use it.
8962
8963 ??? We should also compare with the high part of the
8964 value. */
8965 && !(WORDS_BIG_ENDIAN
8966 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
2a9fb548
ILT
8967 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8968 GET_MODE_BITSIZE (GET_MODE (x))))))
8969 return 1;
8970
8971 return 0;
8972}
8973
31418d35
ILT
8974/* See whether a single set is a noop. SET is the set instruction we
8975 are should check, and INSN is the instruction from which it came. */
2a9fb548
ILT
8976
8977static int
31418d35 8978reload_cse_noop_set_p (set, insn)
2a9fb548 8979 rtx set;
6a651371 8980 rtx insn ATTRIBUTE_UNUSED;
2a9fb548
ILT
8981{
8982 rtx src, dest;
8983 enum machine_mode dest_mode;
8984 int dreg, sreg;
31418d35 8985 int ret;
2a9fb548
ILT
8986
8987 src = SET_SRC (set);
8988 dest = SET_DEST (set);
8989 dest_mode = GET_MODE (dest);
8990
8991 if (side_effects_p (src))
8992 return 0;
8993
8994 dreg = true_regnum (dest);
8995 sreg = true_regnum (src);
8996
31418d35
ILT
8997 /* Check for setting a register to itself. In this case, we don't
8998 have to worry about REG_DEAD notes. */
8999 if (dreg >= 0 && dreg == sreg)
9000 return 1;
9001
9002 ret = 0;
2a9fb548
ILT
9003 if (dreg >= 0)
9004 {
9005 /* Check for setting a register to itself. */
9006 if (dreg == sreg)
31418d35 9007 ret = 1;
2a9fb548
ILT
9008
9009 /* Check for setting a register to a value which we already know
05d10675 9010 is in the register. */
31418d35
ILT
9011 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
9012 ret = 1;
2a9fb548
ILT
9013
9014 /* Check for setting a register DREG to another register SREG
05d10675 9015 where SREG is equal to a value which is already in DREG. */
31418d35 9016 else if (sreg >= 0)
2a9fb548
ILT
9017 {
9018 rtx x;
9019
9020 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
31418d35 9021 {
99c2b71f
ILT
9022 rtx tmp;
9023
9024 if (XEXP (x, 0) == 0)
9025 continue;
9026
9027 if (dest_mode == GET_MODE (x))
9028 tmp = XEXP (x, 0);
9029 else if (GET_MODE_BITSIZE (dest_mode)
9030 < GET_MODE_BITSIZE (GET_MODE (x)))
9031 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9032 else
9033 continue;
9034
9035 if (tmp
9036 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
31418d35
ILT
9037 {
9038 ret = 1;
9039 break;
9040 }
9041 }
2a9fb548
ILT
9042 }
9043 }
9044 else if (GET_CODE (dest) == MEM)
9045 {
9046 /* Check for storing a register to memory when we know that the
05d10675 9047 register is equivalent to the memory location. */
2a9fb548
ILT
9048 if (sreg >= 0
9049 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
9050 && ! side_effects_p (dest))
31418d35 9051 ret = 1;
2a9fb548
ILT
9052 }
9053
31418d35 9054 return ret;
2a9fb548
ILT
9055}
9056
9057/* Try to simplify a single SET instruction. SET is the set pattern.
e9a25f70
JL
9058 INSN is the instruction it came from.
9059 This function only handles one case: if we set a register to a value
9060 which is not a register, we try to find that value in some other register
9061 and change the set into a register copy. */
2a9fb548 9062
e9a25f70 9063static int
2a9fb548
ILT
9064reload_cse_simplify_set (set, insn)
9065 rtx set;
9066 rtx insn;
9067{
9068 int dreg;
9069 rtx src;
9070 enum machine_mode dest_mode;
9071 enum reg_class dclass;
9072 register int i;
9073
2a9fb548
ILT
9074 dreg = true_regnum (SET_DEST (set));
9075 if (dreg < 0)
e9a25f70 9076 return 0;
2a9fb548
ILT
9077
9078 src = SET_SRC (set);
9079 if (side_effects_p (src) || true_regnum (src) >= 0)
e9a25f70 9080 return 0;
2a9fb548 9081
cbd5b9a2
KR
9082 dclass = REGNO_REG_CLASS (dreg);
9083
33ab8de0 9084 /* If memory loads are cheaper than register copies, don't change them. */
cbd5b9a2
KR
9085 if (GET_CODE (src) == MEM
9086 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
e9a25f70 9087 return 0;
2a9fb548 9088
0254c561
JC
9089 /* If the constant is cheaper than a register, don't change it. */
9090 if (CONSTANT_P (src)
9091 && rtx_cost (src, SET) < 2)
9092 return 0;
9093
2a9fb548 9094 dest_mode = GET_MODE (SET_DEST (set));
2a9fb548
ILT
9095 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9096 {
9097 if (i != dreg
9098 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
9099 && reload_cse_regno_equal_p (i, src, dest_mode))
9100 {
9101 int validated;
9102
9103 /* Pop back to the real obstacks while changing the insn. */
9104 pop_obstacks ();
9105
9106 validated = validate_change (insn, &SET_SRC (set),
38a448ca 9107 gen_rtx_REG (dest_mode, i), 1);
2a9fb548
ILT
9108
9109 /* Go back to the obstack we are using for temporary
05d10675 9110 storage. */
2a9fb548
ILT
9111 push_obstacks (&reload_obstack, &reload_obstack);
9112
6764d250
BS
9113 if (validated)
9114 return 1;
e9a25f70
JL
9115 }
9116 }
9117 return 0;
9118}
9119
9120/* Try to replace operands in INSN with equivalent values that are already
05d10675
BS
9121 in registers. This can be viewed as optional reloading.
9122
e9a25f70
JL
9123 For each non-register operand in the insn, see if any hard regs are
9124 known to be equivalent to that operand. Record the alternatives which
9125 can accept these hard registers. Among all alternatives, select the
9126 ones which are better or equal to the one currently matching, where
9127 "better" is in terms of '?' and '!' constraints. Among the remaining
9128 alternatives, select the one which replaces most operands with
9129 hard registers. */
9130
9131static int
9132reload_cse_simplify_operands (insn)
9133 rtx insn;
9134{
9135#ifdef REGISTER_CONSTRAINTS
e9a25f70
JL
9136 int i,j;
9137
9b3142b3 9138 const char *constraints[MAX_RECOG_OPERANDS];
05d10675 9139
e9a25f70
JL
9140 /* Vector recording how bad an alternative is. */
9141 int *alternative_reject;
9142 /* Vector recording how many registers can be introduced by choosing
9143 this alternative. */
9144 int *alternative_nregs;
9145 /* Array of vectors recording, for each operand and each alternative,
9146 which hard register to substitute, or -1 if the operand should be
9147 left as it is. */
9148 int *op_alt_regno[MAX_RECOG_OPERANDS];
9149 /* Array of alternatives, sorted in order of decreasing desirability. */
9150 int *alternative_order;
0254c561 9151 rtx reg = gen_rtx_REG (VOIDmode, -1);
05d10675 9152
0eadeb15 9153 extract_insn (insn);
e9a25f70 9154
0eadeb15 9155 if (recog_n_alternatives == 0 || recog_n_operands == 0)
1d300e19 9156 return 0;
e9a25f70
JL
9157
9158 /* Figure out which alternative currently matches. */
0eadeb15 9159 if (! constrain_operands (1))
b8705408 9160 fatal_insn_not_found (insn);
e9a25f70 9161
0eadeb15
BS
9162 alternative_reject = (int *) alloca (recog_n_alternatives * sizeof (int));
9163 alternative_nregs = (int *) alloca (recog_n_alternatives * sizeof (int));
9164 alternative_order = (int *) alloca (recog_n_alternatives * sizeof (int));
9165 bzero ((char *)alternative_reject, recog_n_alternatives * sizeof (int));
9166 bzero ((char *)alternative_nregs, recog_n_alternatives * sizeof (int));
e9a25f70 9167
0eadeb15 9168 for (i = 0; i < recog_n_operands; i++)
e9a25f70
JL
9169 {
9170 enum machine_mode mode;
9171 int regno;
9b3142b3 9172 const char *p;
e9a25f70 9173
0eadeb15
BS
9174 op_alt_regno[i] = (int *) alloca (recog_n_alternatives * sizeof (int));
9175 for (j = 0; j < recog_n_alternatives; j++)
e9a25f70
JL
9176 op_alt_regno[i][j] = -1;
9177
0eadeb15
BS
9178 p = constraints[i] = recog_constraints[i];
9179 mode = recog_operand_mode[i];
e9a25f70
JL
9180
9181 /* Add the reject values for each alternative given by the constraints
9182 for this operand. */
9183 j = 0;
9184 while (*p != '\0')
9185 {
9186 char c = *p++;
9187 if (c == ',')
9188 j++;
9189 else if (c == '?')
9190 alternative_reject[j] += 3;
9191 else if (c == '!')
9192 alternative_reject[j] += 300;
9193 }
9194
9195 /* We won't change operands which are already registers. We
9196 also don't want to modify output operands. */
9197 regno = true_regnum (recog_operand[i]);
9198 if (regno >= 0
9199 || constraints[i][0] == '='
9200 || constraints[i][0] == '+')
9201 continue;
9202
9203 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9204 {
9205 int class = (int) NO_REGS;
9206
9207 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
9208 continue;
9209
0254c561
JC
9210 REGNO (reg) = regno;
9211 PUT_MODE (reg, mode);
9212
e9a25f70
JL
9213 /* We found a register equal to this operand. Now look for all
9214 alternatives that can accept this register and have not been
9215 assigned a register they can use yet. */
9216 j = 0;
9217 p = constraints[i];
9218 for (;;)
31418d35 9219 {
e9a25f70 9220 char c = *p++;
05d10675 9221
e9a25f70 9222 switch (c)
31418d35 9223 {
e9a25f70
JL
9224 case '=': case '+': case '?':
9225 case '#': case '&': case '!':
05d10675 9226 case '*': case '%':
e9a25f70 9227 case '0': case '1': case '2': case '3': case '4':
c5c76735 9228 case '5': case '6': case '7': case '8': case '9':
e9a25f70
JL
9229 case 'm': case '<': case '>': case 'V': case 'o':
9230 case 'E': case 'F': case 'G': case 'H':
9231 case 's': case 'i': case 'n':
9232 case 'I': case 'J': case 'K': case 'L':
9233 case 'M': case 'N': case 'O': case 'P':
9234#ifdef EXTRA_CONSTRAINT
9235 case 'Q': case 'R': case 'S': case 'T': case 'U':
9236#endif
9237 case 'p': case 'X':
9238 /* These don't say anything we care about. */
9239 break;
9240
9241 case 'g': case 'r':
9242 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
9243 break;
9244
9245 default:
9246 class
e51712db 9247 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
e9a25f70 9248 break;
31418d35 9249
e9a25f70
JL
9250 case ',': case '\0':
9251 /* See if REGNO fits this alternative, and set it up as the
9252 replacement register if we don't have one for this
0254c561
JC
9253 alternative yet and the operand being replaced is not
9254 a cheap CONST_INT. */
e9a25f70 9255 if (op_alt_regno[i][j] == -1
0254c561
JC
9256 && reg_fits_class_p (reg, class, 0, mode)
9257 && (GET_CODE (recog_operand[i]) != CONST_INT
9258 || rtx_cost (recog_operand[i], SET) > rtx_cost (reg, SET)))
31418d35 9259 {
e9a25f70
JL
9260 alternative_nregs[j]++;
9261 op_alt_regno[i][j] = regno;
31418d35 9262 }
e9a25f70
JL
9263 j++;
9264 break;
31418d35
ILT
9265 }
9266
e9a25f70
JL
9267 if (c == '\0')
9268 break;
9269 }
9270 }
9271 }
9272
9273 /* Record all alternatives which are better or equal to the currently
9274 matching one in the alternative_order array. */
0eadeb15 9275 for (i = j = 0; i < recog_n_alternatives; i++)
e9a25f70
JL
9276 if (alternative_reject[i] <= alternative_reject[which_alternative])
9277 alternative_order[j++] = i;
0eadeb15 9278 recog_n_alternatives = j;
e9a25f70
JL
9279
9280 /* Sort it. Given a small number of alternatives, a dumb algorithm
9281 won't hurt too much. */
0eadeb15 9282 for (i = 0; i < recog_n_alternatives - 1; i++)
e9a25f70
JL
9283 {
9284 int best = i;
9285 int best_reject = alternative_reject[alternative_order[i]];
9286 int best_nregs = alternative_nregs[alternative_order[i]];
9287 int tmp;
9288
0eadeb15 9289 for (j = i + 1; j < recog_n_alternatives; j++)
e9a25f70
JL
9290 {
9291 int this_reject = alternative_reject[alternative_order[j]];
9292 int this_nregs = alternative_nregs[alternative_order[j]];
9293
9294 if (this_reject < best_reject
9295 || (this_reject == best_reject && this_nregs < best_nregs))
9296 {
9297 best = j;
9298 best_reject = this_reject;
9299 best_nregs = this_nregs;
31418d35 9300 }
2a9fb548 9301 }
05d10675 9302
e9a25f70
JL
9303 tmp = alternative_order[best];
9304 alternative_order[best] = alternative_order[i];
9305 alternative_order[i] = tmp;
9306 }
05d10675 9307
e9a25f70
JL
9308 /* Substitute the operands as determined by op_alt_regno for the best
9309 alternative. */
9310 j = alternative_order[0];
e9a25f70
JL
9311
9312 /* Pop back to the real obstacks while changing the insn. */
9313 pop_obstacks ();
9314
0eadeb15 9315 for (i = 0; i < recog_n_operands; i++)
e9a25f70 9316 {
0eadeb15 9317 enum machine_mode mode = recog_operand_mode[i];
e9a25f70
JL
9318 if (op_alt_regno[i][j] == -1)
9319 continue;
9320
e9a25f70 9321 validate_change (insn, recog_operand_loc[i],
38a448ca 9322 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
e9a25f70
JL
9323 }
9324
0eadeb15 9325 for (i = recog_n_dups - 1; i >= 0; i--)
e9a25f70
JL
9326 {
9327 int op = recog_dup_num[i];
0eadeb15 9328 enum machine_mode mode = recog_operand_mode[op];
e9a25f70
JL
9329
9330 if (op_alt_regno[op][j] == -1)
9331 continue;
9332
e9a25f70 9333 validate_change (insn, recog_dup_loc[i],
38a448ca 9334 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
2a9fb548 9335 }
e9a25f70
JL
9336
9337 /* Go back to the obstack we are using for temporary
9338 storage. */
9339 push_obstacks (&reload_obstack, &reload_obstack);
9340
9341 return apply_change_group ();
9342#else
9343 return 0;
9344#endif
2a9fb548
ILT
9345}
9346
9347/* These two variables are used to pass information from
9348 reload_cse_record_set to reload_cse_check_clobber. */
9349
9350static int reload_cse_check_clobbered;
9351static rtx reload_cse_check_src;
9352
9353/* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
9354 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
9355 second argument, which is passed by note_stores, is ignored. */
9356
9357static void
9358reload_cse_check_clobber (dest, ignore)
9359 rtx dest;
487a6e06 9360 rtx ignore ATTRIBUTE_UNUSED;
2a9fb548
ILT
9361{
9362 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
9363 reload_cse_check_clobbered = 1;
9364}
9365
9366/* Record the result of a SET instruction. SET is the set pattern.
9367 BODY is the pattern of the insn that it came from. */
9368
9369static void
9370reload_cse_record_set (set, body)
9371 rtx set;
9372 rtx body;
9373{
9e148ceb 9374 rtx dest, src, x;
2a9fb548
ILT
9375 int dreg, sreg;
9376 enum machine_mode dest_mode;
9377
9378 dest = SET_DEST (set);
9379 src = SET_SRC (set);
9380 dreg = true_regnum (dest);
9381 sreg = true_regnum (src);
9382 dest_mode = GET_MODE (dest);
9383
9e148ceb
ILT
9384 /* Some machines don't define AUTO_INC_DEC, but they still use push
9385 instructions. We need to catch that case here in order to
9386 invalidate the stack pointer correctly. Note that invalidating
9387 the stack pointer is different from invalidating DEST. */
9388 x = dest;
9389 while (GET_CODE (x) == SUBREG
9390 || GET_CODE (x) == ZERO_EXTRACT
9391 || GET_CODE (x) == SIGN_EXTRACT
9392 || GET_CODE (x) == STRICT_LOW_PART)
9393 x = XEXP (x, 0);
9394 if (push_operand (x, GET_MODE (x)))
9395 {
9396 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
9397 reload_cse_invalidate_rtx (dest, NULL_RTX);
9398 return;
9399 }
9400
2a9fb548
ILT
9401 /* We can only handle an assignment to a register, or a store of a
9402 register to a memory location. For other cases, we just clobber
9403 the destination. We also have to just clobber if there are side
9404 effects in SRC or DEST. */
9405 if ((dreg < 0 && GET_CODE (dest) != MEM)
9406 || side_effects_p (src)
9407 || side_effects_p (dest))
9408 {
9409 reload_cse_invalidate_rtx (dest, NULL_RTX);
9410 return;
9411 }
9412
9413#ifdef HAVE_cc0
9414 /* We don't try to handle values involving CC, because it's a pain
9415 to keep track of when they have to be invalidated. */
9416 if (reg_mentioned_p (cc0_rtx, src)
9417 || reg_mentioned_p (cc0_rtx, dest))
9418 {
9419 reload_cse_invalidate_rtx (dest, NULL_RTX);
9420 return;
9421 }
9422#endif
9423
9424 /* If BODY is a PARALLEL, then we need to see whether the source of
9425 SET is clobbered by some other instruction in the PARALLEL. */
9426 if (GET_CODE (body) == PARALLEL)
9427 {
9428 int i;
9429
9430 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
9431 {
9432 rtx x;
9433
9434 x = XVECEXP (body, 0, i);
9435 if (x == set)
9436 continue;
9437
9438 reload_cse_check_clobbered = 0;
9439 reload_cse_check_src = src;
9440 note_stores (x, reload_cse_check_clobber);
9441 if (reload_cse_check_clobbered)
9442 {
9443 reload_cse_invalidate_rtx (dest, NULL_RTX);
9444 return;
9445 }
9446 }
9447 }
9448
9449 if (dreg >= 0)
9450 {
9451 int i;
9452
9453 /* This is an assignment to a register. Update the value we
05d10675 9454 have stored for the register. */
2a9fb548 9455 if (sreg >= 0)
ad578014
ILT
9456 {
9457 rtx x;
9458
9459 /* This is a copy from one register to another. Any values
9460 which were valid for SREG are now valid for DREG. If the
9461 mode changes, we use gen_lowpart_common to extract only
9462 the part of the value that is copied. */
9463 reg_values[dreg] = 0;
9464 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
9465 {
9466 rtx tmp;
9467
9468 if (XEXP (x, 0) == 0)
9469 continue;
9470 if (dest_mode == GET_MODE (XEXP (x, 0)))
9471 tmp = XEXP (x, 0);
23e7786b 9472 else if (GET_MODE_BITSIZE (dest_mode)
05d10675 9473 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
23e7786b 9474 continue;
ad578014
ILT
9475 else
9476 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9477 if (tmp)
38a448ca
RH
9478 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
9479 reg_values[dreg]);
05d10675 9480 }
ad578014 9481 }
2a9fb548 9482 else
38a448ca 9483 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
2a9fb548
ILT
9484
9485 /* We've changed DREG, so invalidate any values held by other
05d10675 9486 registers that depend upon it. */
2a9fb548
ILT
9487 reload_cse_invalidate_regno (dreg, dest_mode, 0);
9488
9489 /* If this assignment changes more than one hard register,
05d10675 9490 forget anything we know about the others. */
2a9fb548
ILT
9491 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
9492 reg_values[dreg + i] = 0;
9493 }
9494 else if (GET_CODE (dest) == MEM)
9495 {
9496 /* Invalidate conflicting memory locations. */
9497 reload_cse_invalidate_mem (dest);
9498
9499 /* If we're storing a register to memory, add DEST to the list
05d10675 9500 in REG_VALUES. */
2a9fb548 9501 if (sreg >= 0 && ! side_effects_p (dest))
38a448ca 9502 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
2a9fb548
ILT
9503 reg_values[sreg]);
9504 }
9505 else
9506 {
9507 /* We should have bailed out earlier. */
9508 abort ();
9509 }
9510}
5adf6da0
R
9511\f
9512/* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
9513 addressing now.
9514 This code might also be useful when reload gave up on reg+reg addresssing
9515 because of clashes between the return register and INDEX_REG_CLASS. */
9516
9517/* The maximum number of uses of a register we can keep track of to
9518 replace them with reg+reg addressing. */
9519#define RELOAD_COMBINE_MAX_USES 6
9520
9521/* INSN is the insn where a register has ben used, and USEP points to the
9522 location of the register within the rtl. */
9523struct reg_use { rtx insn, *usep; };
9524
9525/* If the register is used in some unknown fashion, USE_INDEX is negative.
9526 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
9527 indicates where it becomes live again.
9528 Otherwise, USE_INDEX is the index of the last encountered use of the
9529 register (which is first among these we have seen since we scan backwards),
9530 OFFSET contains the constant offset that is added to the register in
9531 all encountered uses, and USE_RUID indicates the first encountered, i.e.
ed937a19
R
9532 last, of these uses.
9533 STORE_RUID is always meaningful if we only want to use a value in a
9534 register in a different place: it denotes the next insn in the insn
9535 stream (i.e. the last ecountered) that sets or clobbers the register. */
5adf6da0
R
9536static struct
9537 {
9538 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
9539 int use_index;
9540 rtx offset;
9541 int store_ruid;
9542 int use_ruid;
9543 } reg_state[FIRST_PSEUDO_REGISTER];
9544
9545/* Reverse linear uid. This is increased in reload_combine while scanning
9546 the instructions from last to first. It is used to set last_label_ruid
9547 and the store_ruid / use_ruid fields in reg_state. */
9548static int reload_combine_ruid;
9549
b0634509
R
9550#define LABEL_LIVE(LABEL) \
9551 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
9552
5adf6da0
R
9553static void
9554reload_combine ()
9555{
9556 rtx insn, set;
9557 int first_index_reg = 1, last_index_reg = 0;
9558 int i;
9559 int last_label_ruid;
b0634509
R
9560 int min_labelno, n_labels;
9561 HARD_REG_SET ever_live_at_start, *label_live;
5adf6da0
R
9562
9563 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
9564 reload has already used it where appropriate, so there is no use in
9565 trying to generate it now. */
03acd8f8 9566 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
5adf6da0
R
9567 return;
9568
9569 /* To avoid wasting too much time later searching for an index register,
9570 determine the minimum and maximum index register numbers. */
9571 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9572 {
9573 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
9574 {
9575 if (! last_index_reg)
9576 last_index_reg = i;
9577 first_index_reg = i;
9578 }
9579 }
9580 /* If no index register is available, we can quit now. */
9581 if (first_index_reg > last_index_reg)
9582 return;
9583
b0634509
R
9584 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
9585 information is a bit fuzzy immediately after reload, but it's
9586 still good enough to determine which registers are live at a jump
9587 destination. */
9588 min_labelno = get_first_label_num ();
9589 n_labels = max_label_num () - min_labelno;
9590 label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
9591 CLEAR_HARD_REG_SET (ever_live_at_start);
9592 for (i = n_basic_blocks - 1; i >= 0; i--)
9593 {
3b413743 9594 insn = BLOCK_HEAD (i);
b0634509
R
9595 if (GET_CODE (insn) == CODE_LABEL)
9596 {
9597 HARD_REG_SET live;
9598
e881bb1b
RH
9599 REG_SET_TO_HARD_REG_SET (live, BASIC_BLOCK (i)->global_live_at_start);
9600 compute_use_by_pseudos (&live, BASIC_BLOCK (i)->global_live_at_start);
b0634509
R
9601 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
9602 IOR_HARD_REG_SET (ever_live_at_start, live);
9603 }
9604 }
9605
5adf6da0
R
9606 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
9607 last_label_ruid = reload_combine_ruid = 0;
9608 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9609 {
ed937a19 9610 reg_state[i].store_ruid = reload_combine_ruid;
5adf6da0
R
9611 if (fixed_regs[i])
9612 reg_state[i].use_index = -1;
9613 else
ed937a19 9614 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
5adf6da0
R
9615 }
9616
9617 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
9618 {
9619 rtx note;
9620
9621 /* We cannot do our optimization across labels. Invalidating all the use
9622 information we have would be costly, so we just note where the label
05d10675 9623 is and then later disable any optimization that would cross it. */
5adf6da0
R
9624 if (GET_CODE (insn) == CODE_LABEL)
9625 last_label_ruid = reload_combine_ruid;
b0634509
R
9626 if (GET_CODE (insn) == BARRIER)
9627 {
9628 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9629 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9630 }
5adf6da0
R
9631 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9632 continue;
9633 reload_combine_ruid++;
9634
9635 /* Look for (set (REGX) (CONST_INT))
9636 (set (REGX) (PLUS (REGX) (REGY)))
9637 ...
9638 ... (MEM (REGX)) ...
9639 and convert it to
9640 (set (REGZ) (CONST_INT))
9641 ...
9642 ... (MEM (PLUS (REGZ) (REGY)))... .
9643
9644 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
9645 and that we know all uses of REGX before it dies. */
2abbc1bd
R
9646 set = single_set (insn);
9647 if (set != NULL_RTX
5adf6da0
R
9648 && GET_CODE (SET_DEST (set)) == REG
9649 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
9650 GET_MODE (SET_DEST (set)))
9651 == 1)
9652 && GET_CODE (SET_SRC (set)) == PLUS
9653 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
9654 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
9655 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
9656 {
9657 rtx reg = SET_DEST (set);
9658 rtx plus = SET_SRC (set);
9659 rtx base = XEXP (plus, 1);
9660 rtx prev = prev_nonnote_insn (insn);
9661 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
9662 int regno = REGNO (reg);
6a651371 9663 rtx const_reg = NULL_RTX;
5adf6da0
R
9664 rtx reg_sum = NULL_RTX;
9665
9666 /* Now, we need an index register.
9667 We'll set index_reg to this index register, const_reg to the
9668 register that is to be loaded with the constant
9669 (denoted as REGZ in the substitution illustration above),
9670 and reg_sum to the register-register that we want to use to
9671 substitute uses of REG (typically in MEMs) with.
9672 First check REG and BASE for being index registers;
9673 we can use them even if they are not dead. */
9674 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
9675 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
9676 REGNO (base)))
9677 {
9678 const_reg = reg;
9679 reg_sum = plus;
9680 }
9681 else
9682 {
05d10675
BS
9683 /* Otherwise, look for a free index register. Since we have
9684 checked above that neiter REG nor BASE are index registers,
9685 if we find anything at all, it will be different from these
9686 two registers. */
9687 for (i = first_index_reg; i <= last_index_reg; i++)
5adf6da0
R
9688 {
9689 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
9690 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
9691 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
9692 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
9693 {
9694 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
9695 const_reg = index_reg;
9696 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
9697 break;
9698 }
9699 }
9700 }
ed937a19
R
9701 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
9702 (REGY), i.e. BASE, is not clobbered before the last use we'll
9703 create. */
5adf6da0
R
9704 if (prev_set
9705 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
9706 && rtx_equal_p (SET_DEST (prev_set), reg)
9707 && reg_state[regno].use_index >= 0
ed937a19 9708 && reg_state[REGNO (base)].store_ruid <= reg_state[regno].use_ruid
5adf6da0
R
9709 && reg_sum)
9710 {
9711 int i;
9712
9713 /* Change destination register and - if necessary - the
9714 constant value in PREV, the constant loading instruction. */
9715 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
9716 if (reg_state[regno].offset != const0_rtx)
9717 validate_change (prev,
9718 &SET_SRC (prev_set),
9719 GEN_INT (INTVAL (SET_SRC (prev_set))
9720 + INTVAL (reg_state[regno].offset)),
9721 1);
9722 /* Now for every use of REG that we have recorded, replace REG
9723 with REG_SUM. */
9724 for (i = reg_state[regno].use_index;
9725 i < RELOAD_COMBINE_MAX_USES; i++)
9726 validate_change (reg_state[regno].reg_use[i].insn,
9727 reg_state[regno].reg_use[i].usep,
9728 reg_sum, 1);
9729
9730 if (apply_change_group ())
9731 {
9732 rtx *np;
9733
9734 /* Delete the reg-reg addition. */
9735 PUT_CODE (insn, NOTE);
9736 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9737 NOTE_SOURCE_FILE (insn) = 0;
9738
9739 if (reg_state[regno].offset != const0_rtx)
9740 {
9741 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
9742 are now invalid. */
9743 for (np = &REG_NOTES (prev); *np; )
9744 {
9745 if (REG_NOTE_KIND (*np) == REG_EQUAL
9746 || REG_NOTE_KIND (*np) == REG_EQUIV)
9747 *np = XEXP (*np, 1);
9748 else
9749 np = &XEXP (*np, 1);
9750 }
9751 }
9752 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9753 reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
9754 continue;
9755 }
9756 }
9757 }
9758 note_stores (PATTERN (insn), reload_combine_note_store);
9759 if (GET_CODE (insn) == CALL_INSN)
9760 {
9761 rtx link;
9762
9763 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9764 {
9765 if (call_used_regs[i])
9766 {
9767 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9768 reg_state[i].store_ruid = reload_combine_ruid;
9769 }
9770 }
9771 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
9772 link = XEXP (link, 1))
9773 {
9774 rtx use = XEXP (link, 0);
9775 int regno = REGNO (XEXP (use, 0));
9776 if (GET_CODE (use) == CLOBBER)
9777 {
9778 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9779 reg_state[regno].store_ruid = reload_combine_ruid;
9780 }
9781 else
9782 reg_state[regno].use_index = -1;
9783 }
9784 }
b0634509 9785 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) != RETURN)
5adf6da0
R
9786 {
9787 /* Non-spill registers might be used at the call destination in
9788 some unknown fashion, so we have to mark the unknown use. */
b0634509
R
9789 HARD_REG_SET *live;
9790 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
9791 && JUMP_LABEL (insn))
9792 live = &LABEL_LIVE (JUMP_LABEL (insn));
9793 else
9794 live = &ever_live_at_start;
5adf6da0
R
9795 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9796 {
b0634509 9797 if (TEST_HARD_REG_BIT (*live, i))
5adf6da0
R
9798 reg_state[i].use_index = -1;
9799 }
9800 }
9801 reload_combine_note_use (&PATTERN (insn), insn);
9802 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9803 {
9804 if (REG_NOTE_KIND (note) == REG_INC
9805 && GET_CODE (XEXP (note, 0)) == REG)
ed937a19
R
9806 {
9807 int regno = REGNO (XEXP (note, 0));
9808
9809 reg_state[regno].store_ruid = reload_combine_ruid;
9810 reg_state[regno].use_index = -1;
9811 }
5adf6da0
R
9812 }
9813 }
b0634509 9814 free (label_live);
5adf6da0
R
9815}
9816
9817/* Check if DST is a register or a subreg of a register; if it is,
9818 update reg_state[regno].store_ruid and reg_state[regno].use_index
f93233bb 9819 accordingly. Called via note_stores from reload_combine. */
5adf6da0
R
9820static void
9821reload_combine_note_store (dst, set)
f93233bb 9822 rtx dst, set;
5adf6da0
R
9823{
9824 int regno = 0;
9825 int i;
9826 unsigned size = GET_MODE_SIZE (GET_MODE (dst));
9827
9828 if (GET_CODE (dst) == SUBREG)
9829 {
9830 regno = SUBREG_WORD (dst);
9831 dst = SUBREG_REG (dst);
9832 }
9833 if (GET_CODE (dst) != REG)
9834 return;
9835 regno += REGNO (dst);
54ca6ffa 9836
5adf6da0 9837 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
05d10675 9838 careful with registers / register parts that are not full words.
54ca6ffa
JL
9839
9840 Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */
9841 if (GET_CODE (set) != SET
9842 || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
9843 || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT
9844 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
ed937a19 9845 {
43f854cf 9846 for (i = (size - 1) / UNITS_PER_WORD + regno; i >= regno; i--)
f93233bb
JL
9847 {
9848 reg_state[i].use_index = -1;
9849 reg_state[i].store_ruid = reload_combine_ruid;
9850 }
ed937a19 9851 }
5adf6da0
R
9852 else
9853 {
43f854cf 9854 for (i = (size - 1) / UNITS_PER_WORD + regno; i >= regno; i--)
5adf6da0
R
9855 {
9856 reg_state[i].store_ruid = reload_combine_ruid;
9857 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9858 }
9859 }
9860}
9861
9862/* XP points to a piece of rtl that has to be checked for any uses of
9863 registers.
9864 *XP is the pattern of INSN, or a part of it.
9865 Called from reload_combine, and recursively by itself. */
9866static void
9867reload_combine_note_use (xp, insn)
9868 rtx *xp, insn;
9869{
9870 rtx x = *xp;
9871 enum rtx_code code = x->code;
6f7d635c 9872 const char *fmt;
5adf6da0
R
9873 int i, j;
9874 rtx offset = const0_rtx; /* For the REG case below. */
9875
9876 switch (code)
9877 {
9878 case SET:
9879 if (GET_CODE (SET_DEST (x)) == REG)
9880 {
9881 reload_combine_note_use (&SET_SRC (x), insn);
9882 return;
9883 }
9884 break;
9885
9886 case CLOBBER:
9887 if (GET_CODE (SET_DEST (x)) == REG)
9888 return;
9889 break;
9890
9891 case PLUS:
9892 /* We are interested in (plus (reg) (const_int)) . */
9893 if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT)
9894 break;
9895 offset = XEXP (x, 1);
9896 x = XEXP (x, 0);
05d10675 9897 /* Fall through. */
5adf6da0
R
9898 case REG:
9899 {
9900 int regno = REGNO (x);
9901 int use_index;
9902
9903 /* Some spurious USEs of pseudo registers might remain.
9904 Just ignore them. */
9905 if (regno >= FIRST_PSEUDO_REGISTER)
9906 return;
9907
9908 /* If this register is already used in some unknown fashion, we
9909 can't do anything.
9910 If we decrement the index from zero to -1, we can't store more
9911 uses, so this register becomes used in an unknown fashion. */
9912 use_index = --reg_state[regno].use_index;
9913 if (use_index < 0)
9914 return;
9915
9916 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9917 {
9918 /* We have found another use for a register that is already
9919 used later. Check if the offsets match; if not, mark the
9920 register as used in an unknown fashion. */
9921 if (! rtx_equal_p (offset, reg_state[regno].offset))
9922 {
9923 reg_state[regno].use_index = -1;
9924 return;
9925 }
9926 }
9927 else
9928 {
9929 /* This is the first use of this register we have seen since we
9930 marked it as dead. */
9931 reg_state[regno].offset = offset;
9932 reg_state[regno].use_ruid = reload_combine_ruid;
9933 }
9934 reg_state[regno].reg_use[use_index].insn = insn;
9935 reg_state[regno].reg_use[use_index].usep = xp;
9936 return;
9937 }
9938
9939 default:
9940 break;
9941 }
9942
9943 /* Recursively process the components of X. */
9944 fmt = GET_RTX_FORMAT (code);
9945 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9946 {
9947 if (fmt[i] == 'e')
9948 reload_combine_note_use (&XEXP (x, i), insn);
9949 else if (fmt[i] == 'E')
9950 {
9951 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9952 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9953 }
9954 }
9955}
9956\f
9957/* See if we can reduce the cost of a constant by replacing a move with
9958 an add. */
9959/* We cannot do our optimization across labels. Invalidating all the
9960 information about register contents we have would be costly, so we
9961 use last_label_luid (local variable of reload_cse_move2add) to note
9962 where the label is and then later disable any optimization that would
9963 cross it.
9964 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9965 reg_set_luid[n] is larger than last_label_luid[n] . */
9966static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9967/* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
9968 reg_mode[n] to be valid.
9969 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
9970 has been set to reg_offset[n] in mode reg_mode[n] .
9971 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
9972 register n has been set to the sum of reg_offset[n] and register
9973 reg_base_reg[n], calculated in mode reg_mode[n] . */
9974static rtx reg_offset[FIRST_PSEUDO_REGISTER];
9975static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9976static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9977/* move2add_luid is linearily increased while scanning the instructions
9978 from first to last. It is used to set reg_set_luid in
6764d250 9979 reload_cse_move2add and move2add_note_store. */
5adf6da0
R
9980static int move2add_luid;
9981
ccc4ae07
AS
9982/* Generate a CONST_INT and force it in the range of MODE. */
9983static rtx
9984gen_mode_int (mode, value)
9985 enum machine_mode mode;
9986 HOST_WIDE_INT value;
9987{
9988 HOST_WIDE_INT cval = value & GET_MODE_MASK (mode);
9989 int width = GET_MODE_BITSIZE (mode);
9990
9991 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number,
9992 sign extend it. */
9993 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
9994 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
9995 cval |= (HOST_WIDE_INT) -1 << width;
9996
9997 return GEN_INT (cval);
9998}
9999
5adf6da0
R
10000static void
10001reload_cse_move2add (first)
10002 rtx first;
10003{
10004 int i;
10005 rtx insn;
10006 int last_label_luid;
5adf6da0
R
10007
10008 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
6764d250
BS
10009 reg_set_luid[i] = 0;
10010
5adf6da0
R
10011 last_label_luid = 0;
10012 move2add_luid = 1;
10013 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
10014 {
10015 rtx pat, note;
10016
10017 if (GET_CODE (insn) == CODE_LABEL)
10018 last_label_luid = move2add_luid;
10019 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
10020 continue;
10021 pat = PATTERN (insn);
10022 /* For simplicity, we only perform this optimization on
10023 straightforward SETs. */
10024 if (GET_CODE (pat) == SET
10025 && GET_CODE (SET_DEST (pat)) == REG)
10026 {
10027 rtx reg = SET_DEST (pat);
10028 int regno = REGNO (reg);
10029 rtx src = SET_SRC (pat);
10030
10031 /* Check if we have valid information on the contents of this
10032 register in the mode of REG. */
10033 /* ??? We don't know how zero / sign extension is handled, hence
10034 we can't go from a narrower to a wider mode. */
10035 if (reg_set_luid[regno] > last_label_luid
05d10675
BS
10036 && (GET_MODE_SIZE (GET_MODE (reg))
10037 <= GET_MODE_SIZE (reg_mode[regno]))
10038 && GET_CODE (reg_offset[regno]) == CONST_INT)
5adf6da0
R
10039 {
10040 /* Try to transform (set (REGX) (CONST_INT A))
10041 ...
10042 (set (REGX) (CONST_INT B))
10043 to
10044 (set (REGX) (CONST_INT A))
10045 ...
10046 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
10047
10048 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
10049 {
10050 int success = 0;
ccc4ae07
AS
10051 rtx new_src
10052 = gen_mode_int (GET_MODE (reg),
10053 INTVAL (src) - INTVAL (reg_offset[regno]));
5adf6da0
R
10054 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
10055 use (set (reg) (reg)) instead.
10056 We don't delete this insn, nor do we convert it into a
10057 note, to avoid losing register notes or the return
10058 value flag. jump2 already knowns how to get rid of
10059 no-op moves. */
10060 if (new_src == const0_rtx)
10061 success = validate_change (insn, &SET_SRC (pat), reg, 0);
10062 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
10063 && have_add2_insn (GET_MODE (reg)))
10064 success = validate_change (insn, &PATTERN (insn),
10065 gen_add2_insn (reg, new_src), 0);
5adf6da0
R
10066 reg_set_luid[regno] = move2add_luid;
10067 reg_mode[regno] = GET_MODE (reg);
10068 reg_offset[regno] = src;
10069 continue;
10070 }
10071
10072 /* Try to transform (set (REGX) (REGY))
10073 (set (REGX) (PLUS (REGX) (CONST_INT A)))
10074 ...
10075 (set (REGX) (REGY))
10076 (set (REGX) (PLUS (REGX) (CONST_INT B)))
10077 to
10078 (REGX) (REGY))
10079 (set (REGX) (PLUS (REGX) (CONST_INT A)))
10080 ...
10081 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
10082 else if (GET_CODE (src) == REG
10083 && reg_base_reg[regno] == REGNO (src)
10084 && reg_set_luid[regno] > reg_set_luid[REGNO (src)])
10085 {
10086 rtx next = next_nonnote_insn (insn);
6a651371 10087 rtx set = NULL_RTX;
5adf6da0
R
10088 if (next)
10089 set = single_set (next);
10090 if (next
10091 && set
10092 && SET_DEST (set) == reg
10093 && GET_CODE (SET_SRC (set)) == PLUS
10094 && XEXP (SET_SRC (set), 0) == reg
10095 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
10096 {
5adf6da0 10097 rtx src3 = XEXP (SET_SRC (set), 1);
ccc4ae07
AS
10098 rtx new_src
10099 = gen_mode_int (GET_MODE (reg),
10100 INTVAL (src3)
10101 - INTVAL (reg_offset[regno]));
5adf6da0
R
10102 int success = 0;
10103
10104 if (new_src == const0_rtx)
10105 /* See above why we create (set (reg) (reg)) here. */
10106 success
10107 = validate_change (next, &SET_SRC (set), reg, 0);
10108 else if ((rtx_cost (new_src, PLUS)
10109 < 2 + rtx_cost (src3, SET))
10110 && have_add2_insn (GET_MODE (reg)))
10111 success
10112 = validate_change (next, &PATTERN (next),
10113 gen_add2_insn (reg, new_src), 0);
10114 if (success)
10115 {
5adf6da0
R
10116 /* INSN might be the first insn in a basic block
10117 if the preceding insn is a conditional jump
10118 or a possible-throwing call. */
10119 PUT_CODE (insn, NOTE);
10120 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
10121 NOTE_SOURCE_FILE (insn) = 0;
10122 }
10123 insn = next;
10124 reg_set_luid[regno] = move2add_luid;
10125 reg_mode[regno] = GET_MODE (reg);
10126 reg_offset[regno] = src3;
10127 continue;
10128 }
10129 }
10130 }
10131 }
10132
10133 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
10134 {
10135 if (REG_NOTE_KIND (note) == REG_INC
10136 && GET_CODE (XEXP (note, 0)) == REG)
10137 {
10138 /* Indicate that this register has been recently written to,
10139 but the exact contents are not available. */
10140 int regno = REGNO (XEXP (note, 0));
10141 if (regno < FIRST_PSEUDO_REGISTER)
10142 {
10143 reg_set_luid[regno] = move2add_luid;
10144 reg_offset[regno] = note;
10145 }
10146 }
5adf6da0
R
10147 }
10148 note_stores (PATTERN (insn), move2add_note_store);
10149 /* If this is a CALL_INSN, all call used registers are stored with
10150 unknown values. */
10151 if (GET_CODE (insn) == CALL_INSN)
10152 {
10153 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
10154 {
10155 if (call_used_regs[i])
10156 {
10157 reg_set_luid[i] = move2add_luid;
10158 reg_offset[i] = insn; /* Invalidate contents. */
10159 }
10160 }
10161 }
10162 }
10163}
10164
10165/* SET is a SET or CLOBBER that sets DST.
10166 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
10167 Called from reload_cse_move2add via note_stores. */
10168static void
10169move2add_note_store (dst, set)
10170 rtx dst, set;
10171{
10172 int regno = 0;
10173 int i;
10174
10175 enum machine_mode mode = GET_MODE (dst);
10176 if (GET_CODE (dst) == SUBREG)
10177 {
10178 regno = SUBREG_WORD (dst);
10179 dst = SUBREG_REG (dst);
10180 }
10181 if (GET_CODE (dst) != REG)
10182 return;
10183
10184 regno += REGNO (dst);
10185
f93233bb
JL
10186 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET
10187 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
10188 && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT
10189 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
5adf6da0
R
10190 {
10191 rtx src = SET_SRC (set);
10192
10193 reg_mode[regno] = mode;
10194 switch (GET_CODE (src))
10195 {
10196 case PLUS:
10197 {
10198 rtx src0 = XEXP (src, 0);
10199 if (GET_CODE (src0) == REG)
10200 {
10201 if (REGNO (src0) != regno
10202 || reg_offset[regno] != const0_rtx)
10203 {
10204 reg_base_reg[regno] = REGNO (src0);
10205 reg_set_luid[regno] = move2add_luid;
10206 }
10207 reg_offset[regno] = XEXP (src, 1);
10208 break;
10209 }
10210 reg_set_luid[regno] = move2add_luid;
10211 reg_offset[regno] = set; /* Invalidate contents. */
10212 break;
10213 }
10214
10215 case REG:
10216 reg_base_reg[regno] = REGNO (SET_SRC (set));
10217 reg_offset[regno] = const0_rtx;
10218 reg_set_luid[regno] = move2add_luid;
10219 break;
10220
10221 default:
10222 reg_base_reg[regno] = -1;
10223 reg_offset[regno] = SET_SRC (set);
10224 reg_set_luid[regno] = move2add_luid;
10225 break;
10226 }
10227 }
10228 else
10229 {
10230 for (i = regno + HARD_REGNO_NREGS (regno, mode) - 1; i >= regno; i--)
10231 {
10232 /* Indicate that this register has been recently written to,
10233 but the exact contents are not available. */
10234 reg_set_luid[i] = move2add_luid;
10235 reg_offset[i] = dst;
10236 }
10237 }
10238}
2dfa9a87
MH
10239
10240#ifdef AUTO_INC_DEC
10241static void
10242add_auto_inc_notes (insn, x)
10243 rtx insn;
10244 rtx x;
10245{
10246 enum rtx_code code = GET_CODE (x);
6f7d635c 10247 const char *fmt;
2dfa9a87
MH
10248 int i, j;
10249
10250 if (code == MEM && auto_inc_p (XEXP (x, 0)))
10251 {
10252 REG_NOTES (insn)
10253 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
10254 return;
10255 }
10256
10257 /* Scan all the operand sub-expressions. */
10258 fmt = GET_RTX_FORMAT (code);
10259 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10260 {
10261 if (fmt[i] == 'e')
10262 add_auto_inc_notes (insn, XEXP (x, i));
10263 else if (fmt[i] == 'E')
10264 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10265 add_auto_inc_notes (insn, XVECEXP (x, i, j));
10266 }
10267}
10268#endif