]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/reload1.c
formatting tweaks
[thirdparty/gcc.git] / gcc / reload1.c
CommitLineData
32131a9c 1/* Reload pseudo regs into hard regs for insns that require hard regs.
2c5d9e37 2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
32131a9c
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
e99215a3
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
32131a9c
RK
20
21
ff2da9fc 22#include <stdio.h>
32131a9c
RK
23#include "config.h"
24#include "rtl.h"
25#include "obstack.h"
26#include "insn-config.h"
27#include "insn-flags.h"
28#include "insn-codes.h"
29#include "flags.h"
30#include "expr.h"
31#include "regs.h"
32#include "hard-reg-set.h"
33#include "reload.h"
34#include "recog.h"
35#include "basic-block.h"
36#include "output.h"
a9c366bf 37#include "real.h"
32131a9c
RK
38
39/* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
546b63fb
RK
71
72
73#ifndef REGISTER_MOVE_COST
74#define REGISTER_MOVE_COST(x, y) 2
75#endif
76
77#ifndef MEMORY_MOVE_COST
78#define MEMORY_MOVE_COST(x) 4
79#endif
32131a9c
RK
80\f
81/* During reload_as_needed, element N contains a REG rtx for the hard reg
0f41302f 82 into which reg N has been reloaded (perhaps for a previous insn). */
32131a9c
RK
83static rtx *reg_last_reload_reg;
84
85/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87static char *reg_has_output_reload;
88
89/* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91static HARD_REG_SET reg_is_output_reload;
92
93/* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97rtx *reg_equiv_constant;
98
99/* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
4803a34a 103rtx *reg_equiv_memory_loc;
32131a9c
RK
104
105/* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108rtx *reg_equiv_address;
109
110/* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112rtx *reg_equiv_mem;
113
114/* Widest width in which each pseudo reg is referred to (via subreg). */
115static int *reg_max_ref_width;
116
117/* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119static rtx *reg_equiv_init;
120
121/* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128/* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134/* Number of spill-regs so far; number of valid elements of spill_regs. */
135static int n_spills;
136
137/* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143/* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148/* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154/* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157HARD_REG_SET forbidden_regs;
158
159/* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
546b63fb
RK
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
32131a9c
RK
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166static HARD_REG_SET bad_spill_regs;
167
168/* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171static short spill_regs[FIRST_PSEUDO_REGISTER];
172
8b4f9969
JW
173/* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178HARD_REG_SET used_spill_regs;
179
4079cd63
JW
180/* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183static int last_spill_reg;
184
32131a9c
RK
185/* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192/* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197/* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199static HARD_REG_SET counted_for_groups;
200
201/* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205static HARD_REG_SET counted_for_nongroups;
206
208dffa5
RS
207/* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211static char *cannot_omit_stores;
212
32131a9c
RK
213/* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220static char spill_indirect_levels;
221
222/* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226char indirect_symref_ok;
227
228/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230char double_reg_address_ok;
231
232/* Record the stack slot for each spilled hard register. */
233
234static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236/* Width allocated so far for that stack slot. */
237
238static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240/* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245char *basic_block_needs[N_REG_CLASSES];
246
247/* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249int reload_first_uid;
250
251/* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254int caller_save_needed;
255
256/* Set to 1 while reload_as_needed is operating.
257 Required by some machines to handle any generated moves differently. */
258
259int reload_in_progress = 0;
260
261/* These arrays record the insn_code of insns that may be needed to
262 perform input and output reloads of special objects. They provide a
263 place to pass a scratch register. */
264
265enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266enum insn_code reload_out_optab[NUM_MACHINE_MODES];
267
d45cf215 268/* This obstack is used for allocation of rtl during register elimination.
32131a9c
RK
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
271
272struct obstack reload_obstack;
273char *reload_firstobj;
274
275#define obstack_chunk_alloc xmalloc
276#define obstack_chunk_free free
277
32131a9c
RK
278/* List of labels that must never be deleted. */
279extern rtx forced_labels;
2c5d9e37
RK
280
281/* Allocation number table from global register allocation. */
282extern int *reg_allocno;
32131a9c
RK
283\f
284/* This structure is used to record information about register eliminations.
285 Each array entry describes one possible way of eliminating a register
286 in favor of another. If there is more than one way of eliminating a
287 particular register, the most preferred should be specified first. */
288
289static struct elim_table
290{
0f41302f
MS
291 int from; /* Register number to be eliminated. */
292 int to; /* Register number used as replacement. */
293 int initial_offset; /* Initial difference between values. */
294 int can_eliminate; /* Non-zero if this elimination can be done. */
32131a9c 295 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
0f41302f
MS
296 insns made by reload. */
297 int offset; /* Current offset between the two regs. */
298 int max_offset; /* Maximum offset between the two regs. */
299 int previous_offset; /* Offset at end of previous insn. */
300 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
32131a9c
RK
301 rtx from_rtx; /* REG rtx for the register to be eliminated.
302 We cannot simply compare the number since
303 we might then spuriously replace a hard
304 register corresponding to a pseudo
0f41302f
MS
305 assigned to the reg to be eliminated. */
306 rtx to_rtx; /* REG rtx for the replacement. */
32131a9c
RK
307} reg_eliminate[] =
308
309/* If a set of eliminable registers was specified, define the table from it.
310 Otherwise, default to the normal case of the frame pointer being
311 replaced by the stack pointer. */
312
313#ifdef ELIMINABLE_REGS
314 ELIMINABLE_REGS;
315#else
316 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
317#endif
318
319#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
320
321/* Record the number of pending eliminations that have an offset not equal
322 to their initial offset. If non-zero, we use a new copy of each
323 replacement result in any insns encountered. */
324static int num_not_at_initial_offset;
325
326/* Count the number of registers that we may be able to eliminate. */
327static int num_eliminable;
328
329/* For each label, we record the offset of each elimination. If we reach
330 a label by more than one path and an offset differs, we cannot do the
331 elimination. This information is indexed by the number of the label.
332 The first table is an array of flags that records whether we have yet
333 encountered a label and the second table is an array of arrays, one
334 entry in the latter array for each elimination. */
335
336static char *offsets_known_at;
337static int (*offsets_at)[NUM_ELIMINABLE_REGS];
338
339/* Number of labels in the current function. */
340
341static int num_labels;
546b63fb
RK
342
343struct hard_reg_n_uses { int regno; int uses; };
32131a9c 344\f
546b63fb
RK
345static int possible_group_p PROTO((int, int *));
346static void count_possible_groups PROTO((int *, enum machine_mode *,
066aca28 347 int *, int));
546b63fb
RK
348static int modes_equiv_for_class_p PROTO((enum machine_mode,
349 enum machine_mode,
350 enum reg_class));
351static void spill_failure PROTO((rtx));
352static int new_spill_reg PROTO((int, int, int *, int *, int,
353 FILE *));
354static void delete_dead_insn PROTO((rtx));
355static void alter_reg PROTO((int, int));
c307c237 356static void mark_scratch_live PROTO((rtx));
546b63fb
RK
357static void set_label_offsets PROTO((rtx, rtx, int));
358static int eliminate_regs_in_insn PROTO((rtx, int));
359static void mark_not_eliminable PROTO((rtx, rtx));
360static int spill_hard_reg PROTO((int, int, FILE *, int));
361static void scan_paradoxical_subregs PROTO((rtx));
362static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
363 struct hard_reg_n_uses *));
2c5d9e37 364static void order_regs_for_reload PROTO((int));
1d1a832c 365static int compare_spill_regs PROTO((short *, short *));
546b63fb 366static void reload_as_needed PROTO((rtx, int));
9a881562 367static void forget_old_reloads_1 PROTO((rtx, rtx));
546b63fb
RK
368static int reload_reg_class_lower PROTO((short *, short *));
369static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
370 enum machine_mode));
be7ae2a4
RK
371static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
372 enum machine_mode));
546b63fb
RK
373static int reload_reg_free_p PROTO((int, int, enum reload_type));
374static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
375static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
351aa1c1 376static int reloads_conflict PROTO((int, int));
546b63fb
RK
377static int allocate_reload_reg PROTO((int, rtx, int, int));
378static void choose_reload_regs PROTO((rtx, rtx));
379static void merge_assigned_reloads PROTO((rtx));
380static void emit_reload_insns PROTO((rtx));
381static void delete_output_reload PROTO((rtx, int, rtx));
382static void inc_for_reload PROTO((rtx, rtx, int));
383static int constraint_accepts_reg_p PROTO((char *, rtx));
384static int count_occurrences PROTO((rtx, rtx));
32131a9c 385\f
546b63fb
RK
386/* Initialize the reload pass once per compilation. */
387
32131a9c
RK
388void
389init_reload ()
390{
391 register int i;
392
393 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
394 Set spill_indirect_levels to the number of levels such addressing is
395 permitted, zero if it is not permitted at all. */
396
397 register rtx tem
398 = gen_rtx (MEM, Pmode,
399 gen_rtx (PLUS, Pmode,
400 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
fb3821f7 401 GEN_INT (4)));
32131a9c
RK
402 spill_indirect_levels = 0;
403
404 while (memory_address_p (QImode, tem))
405 {
406 spill_indirect_levels++;
407 tem = gen_rtx (MEM, Pmode, tem);
408 }
409
410 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
411
412 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
413 indirect_symref_ok = memory_address_p (QImode, tem);
414
415 /* See if reg+reg is a valid (and offsettable) address. */
416
65701fd2 417 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
57caa638
RS
418 {
419 tem = gen_rtx (PLUS, Pmode,
3ec2ea3e 420 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
57caa638
RS
421 gen_rtx (REG, Pmode, i));
422 /* This way, we make sure that reg+reg is an offsettable address. */
423 tem = plus_constant (tem, 4);
424
425 if (memory_address_p (QImode, tem))
426 {
427 double_reg_address_ok = 1;
428 break;
429 }
430 }
32131a9c 431
0f41302f 432 /* Initialize obstack for our rtl allocation. */
32131a9c
RK
433 gcc_obstack_init (&reload_obstack);
434 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
32131a9c
RK
435}
436
546b63fb 437/* Main entry point for the reload pass.
32131a9c
RK
438
439 FIRST is the first insn of the function being compiled.
440
441 GLOBAL nonzero means we were called from global_alloc
442 and should attempt to reallocate any pseudoregs that we
443 displace from hard regs we will use for reloads.
444 If GLOBAL is zero, we do not have enough information to do that,
445 so any pseudo reg that is spilled must go to the stack.
446
447 DUMPFILE is the global-reg debugging dump file stream, or 0.
448 If it is nonzero, messages are written to it to describe
449 which registers are seized as reload regs, which pseudo regs
5352b11a 450 are spilled from them, and where the pseudo regs are reallocated to.
32131a9c 451
5352b11a
RS
452 Return value is nonzero if reload failed
453 and we must not do any more for this function. */
454
455int
32131a9c
RK
456reload (first, global, dumpfile)
457 rtx first;
458 int global;
459 FILE *dumpfile;
460{
461 register int class;
8b3e912b 462 register int i, j, k;
32131a9c
RK
463 register rtx insn;
464 register struct elim_table *ep;
465
466 int something_changed;
467 int something_needs_reloads;
468 int something_needs_elimination;
469 int new_basic_block_needs;
a8efe40d
RK
470 enum reg_class caller_save_spill_class = NO_REGS;
471 int caller_save_group_size = 1;
32131a9c 472
5352b11a
RS
473 /* Nonzero means we couldn't get enough spill regs. */
474 int failure = 0;
475
32131a9c
RK
476 /* The basic block number currently being processed for INSN. */
477 int this_block;
478
479 /* Make sure even insns with volatile mem refs are recognizable. */
480 init_recog ();
481
482 /* Enable find_equiv_reg to distinguish insns made by reload. */
483 reload_first_uid = get_max_uid ();
484
485 for (i = 0; i < N_REG_CLASSES; i++)
486 basic_block_needs[i] = 0;
487
0dadecf6
RK
488#ifdef SECONDARY_MEMORY_NEEDED
489 /* Initialize the secondary memory table. */
490 clear_secondary_mem ();
491#endif
492
32131a9c
RK
493 /* Remember which hard regs appear explicitly
494 before we merge into `regs_ever_live' the ones in which
495 pseudo regs have been allocated. */
496 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
497
498 /* We don't have a stack slot for any spill reg yet. */
4c9a05bc
RK
499 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
500 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
32131a9c 501
a8efe40d
RK
502 /* Initialize the save area information for caller-save, in case some
503 are needed. */
504 init_save_areas ();
a8fdc208 505
32131a9c
RK
506 /* Compute which hard registers are now in use
507 as homes for pseudo registers.
508 This is done here rather than (eg) in global_alloc
509 because this point is reached even if not optimizing. */
510
511 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
512 mark_home_live (i);
513
c307c237
RK
514 for (i = 0; i < scratch_list_length; i++)
515 if (scratch_list[i])
516 mark_scratch_live (scratch_list[i]);
517
32131a9c
RK
518 /* Make sure that the last insn in the chain
519 is not something that needs reloading. */
fb3821f7 520 emit_note (NULL_PTR, NOTE_INSN_DELETED);
32131a9c
RK
521
522 /* Find all the pseudo registers that didn't get hard regs
523 but do have known equivalent constants or memory slots.
524 These include parameters (known equivalent to parameter slots)
525 and cse'd or loop-moved constant memory addresses.
526
527 Record constant equivalents in reg_equiv_constant
528 so they will be substituted by find_reloads.
529 Record memory equivalents in reg_mem_equiv so they can
530 be substituted eventually by altering the REG-rtx's. */
531
532 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 533 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
32131a9c 534 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 535 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
32131a9c 536 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 537 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
32131a9c 538 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 539 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
32131a9c 540 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 541 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
32131a9c 542 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
4c9a05bc 543 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
208dffa5
RS
544 cannot_omit_stores = (char *) alloca (max_regno);
545 bzero (cannot_omit_stores, max_regno);
32131a9c 546
56f58d3a
RK
547#ifdef SMALL_REGISTER_CLASSES
548 CLEAR_HARD_REG_SET (forbidden_regs);
549#endif
550
32131a9c 551 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
56f58d3a
RK
552 Also find all paradoxical subregs and find largest such for each pseudo.
553 On machines with small register classes, record hard registers that
b453cb0b
RK
554 are used for user variables. These can never be used for spills.
555 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
556 caller-saved registers must be marked live. */
32131a9c
RK
557
558 for (insn = first; insn; insn = NEXT_INSN (insn))
559 {
560 rtx set = single_set (insn);
561
b453cb0b
RK
562 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
563 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
564 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
565 if (! call_used_regs[i])
566 regs_ever_live[i] = 1;
567
32131a9c
RK
568 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
569 {
fb3821f7 570 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
a8efe40d
RK
571 if (note
572#ifdef LEGITIMATE_PIC_OPERAND_P
a8fdc208 573 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
a8efe40d
RK
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
575#endif
576 )
32131a9c
RK
577 {
578 rtx x = XEXP (note, 0);
579 i = REGNO (SET_DEST (set));
580 if (i > LAST_VIRTUAL_REGISTER)
581 {
582 if (GET_CODE (x) == MEM)
583 reg_equiv_memory_loc[i] = x;
584 else if (CONSTANT_P (x))
585 {
586 if (LEGITIMATE_CONSTANT_P (x))
587 reg_equiv_constant[i] = x;
588 else
589 reg_equiv_memory_loc[i]
d445b551 590 = force_const_mem (GET_MODE (SET_DEST (set)), x);
32131a9c
RK
591 }
592 else
593 continue;
594
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x) != MEM
600 || rtx_equal_p (SET_SRC (set), x))
601 reg_equiv_init[i] = insn;
602 }
603 }
604 }
605
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set && GET_CODE (SET_DEST (set)) == MEM
609 && GET_CODE (SET_SRC (set)) == REG
610 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
611 && rtx_equal_p (SET_DEST (set),
612 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
613 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
614
615 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn));
617 }
618
619 /* Does this function require a frame pointer? */
620
621 frame_pointer_needed = (! flag_omit_frame_pointer
622#ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK)
630#endif
631 || FRAME_POINTER_REQUIRED);
632
633 num_eliminable = 0;
634
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637#ifdef ELIMINABLE_REGS
638 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
639 {
640 ep->can_eliminate = ep->can_eliminate_previous
641 = (CAN_ELIMINATE (ep->from, ep->to)
9ff3516a 642 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
32131a9c
RK
643 }
644#else
645 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
646 = ! frame_pointer_needed;
647#endif
648
649 /* Count the number of eliminable registers and build the FROM and TO
a8fdc208 650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
32131a9c
RK
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
654 {
655 num_eliminable += ep->can_eliminate;
656 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
657 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
658 }
659
660 num_labels = max_label_num () - get_first_label_num ();
661
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at = (char *) alloca (num_labels);
664 offsets_at
665 = (int (*)[NUM_ELIMINABLE_REGS])
666 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
667
668 offsets_known_at -= get_first_label_num ();
669 offsets_at -= get_first_label_num ();
670
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
674
675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
676 alter_reg (i, -1);
677
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode, 0, 0);
682
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
688 cannot be done. */
689 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
691 || GET_CODE (insn) == CALL_INSN)
692 note_stores (PATTERN (insn), mark_not_eliminable);
693
694#ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
700
701 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
702 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
703 break;
704
b8093d02 705 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
32131a9c
RK
706 return;
707#endif
708
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
711
2c5d9e37 712 order_regs_for_reload (global);
32131a9c
RK
713
714 /* So far, no hard regs have been spilled. */
715 n_spills = 0;
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 spill_reg_order[i] = -1;
718
4079cd63
JW
719 /* Initialize to -1, which means take the first spill register. */
720 last_spill_reg = -1;
721
32131a9c
RK
722 /* On most machines, we can't use any register explicitly used in the
723 rtl as a spill register. But on some, we have to. Those will have
724 taken care to keep the life of hard regs as short as possible. */
725
56f58d3a 726#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
727 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
728#endif
729
730 /* Spill any hard regs that we know we can't eliminate. */
731 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
732 if (! ep->can_eliminate)
9ff3516a
RK
733 spill_hard_reg (ep->from, global, dumpfile, 1);
734
735#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
736 if (frame_pointer_needed)
737 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
738#endif
32131a9c
RK
739
740 if (global)
741 for (i = 0; i < N_REG_CLASSES; i++)
742 {
4c9a05bc 743 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
32131a9c
RK
744 bzero (basic_block_needs[i], n_basic_blocks);
745 }
746
b2f15f94
RK
747 /* From now on, we need to emit any moves without making new pseudos. */
748 reload_in_progress = 1;
749
32131a9c
RK
750 /* This loop scans the entire function each go-round
751 and repeats until one repetition spills no additional hard regs. */
752
d45cf215 753 /* This flag is set when a pseudo reg is spilled,
32131a9c
RK
754 to require another pass. Note that getting an additional reload
755 reg does not necessarily imply any pseudo reg was spilled;
756 sometimes we find a reload reg that no pseudo reg was allocated in. */
757 something_changed = 1;
758 /* This flag is set if there are any insns that require reloading. */
759 something_needs_reloads = 0;
760 /* This flag is set if there are any insns that require register
761 eliminations. */
762 something_needs_elimination = 0;
763 while (something_changed)
764 {
765 rtx after_call = 0;
766
767 /* For each class, number of reload regs needed in that class.
768 This is the maximum over all insns of the needs in that class
769 of the individual insn. */
770 int max_needs[N_REG_CLASSES];
771 /* For each class, size of group of consecutive regs
772 that is needed for the reloads of this class. */
773 int group_size[N_REG_CLASSES];
774 /* For each class, max number of consecutive groups needed.
775 (Each group contains group_size[CLASS] consecutive registers.) */
776 int max_groups[N_REG_CLASSES];
777 /* For each class, max number needed of regs that don't belong
778 to any of the groups. */
779 int max_nongroups[N_REG_CLASSES];
780 /* For each class, the machine mode which requires consecutive
781 groups of regs of that class.
782 If two different modes ever require groups of one class,
783 they must be the same size and equally restrictive for that class,
784 otherwise we can't handle the complexity. */
785 enum machine_mode group_mode[N_REG_CLASSES];
5352b11a
RS
786 /* Record the insn where each maximum need is first found. */
787 rtx max_needs_insn[N_REG_CLASSES];
788 rtx max_groups_insn[N_REG_CLASSES];
789 rtx max_nongroups_insn[N_REG_CLASSES];
32131a9c 790 rtx x;
0dadecf6 791 int starting_frame_size = get_frame_size ();
9ff3516a 792 int previous_frame_pointer_needed = frame_pointer_needed;
e404a39a 793 static char *reg_class_names[] = REG_CLASS_NAMES;
32131a9c
RK
794
795 something_changed = 0;
4c9a05bc
RK
796 bzero ((char *) max_needs, sizeof max_needs);
797 bzero ((char *) max_groups, sizeof max_groups);
798 bzero ((char *) max_nongroups, sizeof max_nongroups);
799 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
800 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
801 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
802 bzero ((char *) group_size, sizeof group_size);
32131a9c
RK
803 for (i = 0; i < N_REG_CLASSES; i++)
804 group_mode[i] = VOIDmode;
805
806 /* Keep track of which basic blocks are needing the reloads. */
807 this_block = 0;
808
809 /* Remember whether any element of basic_block_needs
810 changes from 0 to 1 in this pass. */
811 new_basic_block_needs = 0;
812
813 /* Reset all offsets on eliminable registers to their initial values. */
814#ifdef ELIMINABLE_REGS
815 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
816 {
817 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
a8efe40d
RK
818 ep->previous_offset = ep->offset
819 = ep->max_offset = ep->initial_offset;
32131a9c
RK
820 }
821#else
822#ifdef INITIAL_FRAME_POINTER_OFFSET
823 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
824#else
825 if (!FRAME_POINTER_REQUIRED)
826 abort ();
827 reg_eliminate[0].initial_offset = 0;
828#endif
a8efe40d 829 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
32131a9c
RK
830 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
831#endif
832
833 num_not_at_initial_offset = 0;
834
4c9a05bc 835 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
32131a9c
RK
836
837 /* Set a known offset for each forced label to be at the initial offset
838 of each elimination. We do this because we assume that all
839 computed jumps occur from a location where each elimination is
840 at its initial offset. */
841
842 for (x = forced_labels; x; x = XEXP (x, 1))
843 if (XEXP (x, 0))
fb3821f7 844 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
32131a9c
RK
845
846 /* For each pseudo register that has an equivalent location defined,
847 try to eliminate any eliminable registers (such as the frame pointer)
848 assuming initial offsets for the replacement register, which
849 is the normal case.
850
851 If the resulting location is directly addressable, substitute
852 the MEM we just got directly for the old REG.
853
854 If it is not addressable but is a constant or the sum of a hard reg
855 and constant, it is probably not addressable because the constant is
856 out of range, in that case record the address; we will generate
857 hairy code to compute the address in a register each time it is
6491dbbb
RK
858 needed. Similarly if it is a hard register, but one that is not
859 valid as an address register.
32131a9c
RK
860
861 If the location is not addressable, but does not have one of the
862 above forms, assign a stack slot. We have to do this to avoid the
863 potential of producing lots of reloads if, e.g., a location involves
864 a pseudo that didn't get a hard register and has an equivalent memory
865 location that also involves a pseudo that didn't get a hard register.
866
867 Perhaps at some point we will improve reload_when_needed handling
868 so this problem goes away. But that's very hairy. */
869
870 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
871 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
872 {
fb3821f7 873 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
32131a9c
RK
874
875 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
876 XEXP (x, 0)))
877 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
878 else if (CONSTANT_P (XEXP (x, 0))
6491dbbb
RK
879 || (GET_CODE (XEXP (x, 0)) == REG
880 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
881 || (GET_CODE (XEXP (x, 0)) == PLUS
882 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
883 && (REGNO (XEXP (XEXP (x, 0), 0))
884 < FIRST_PSEUDO_REGISTER)
885 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
886 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
887 else
888 {
889 /* Make a new stack slot. Then indicate that something
a8fdc208 890 changed so we go back and recompute offsets for
32131a9c
RK
891 eliminable registers because the allocation of memory
892 below might change some offset. reg_equiv_{mem,address}
893 will be set up for this pseudo on the next pass around
894 the loop. */
895 reg_equiv_memory_loc[i] = 0;
896 reg_equiv_init[i] = 0;
897 alter_reg (i, -1);
898 something_changed = 1;
899 }
900 }
a8fdc208 901
d45cf215 902 /* If we allocated another pseudo to the stack, redo elimination
32131a9c
RK
903 bookkeeping. */
904 if (something_changed)
905 continue;
906
a8efe40d
RK
907 /* If caller-saves needs a group, initialize the group to include
908 the size and mode required for caller-saves. */
909
910 if (caller_save_group_size > 1)
911 {
912 group_mode[(int) caller_save_spill_class] = Pmode;
913 group_size[(int) caller_save_spill_class] = caller_save_group_size;
914 }
915
32131a9c
RK
916 /* Compute the most additional registers needed by any instruction.
917 Collect information separately for each class of regs. */
918
919 for (insn = first; insn; insn = NEXT_INSN (insn))
920 {
921 if (global && this_block + 1 < n_basic_blocks
922 && insn == basic_block_head[this_block+1])
923 ++this_block;
924
925 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
926 might include REG_LABEL), we need to see what effects this
927 has on the known offsets at labels. */
928
929 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
930 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
931 && REG_NOTES (insn) != 0))
932 set_label_offsets (insn, insn, 0);
933
934 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
935 {
936 /* Nonzero means don't use a reload reg that overlaps
937 the place where a function value can be returned. */
938 rtx avoid_return_reg = 0;
939
940 rtx old_body = PATTERN (insn);
941 int old_code = INSN_CODE (insn);
942 rtx old_notes = REG_NOTES (insn);
943 int did_elimination = 0;
546b63fb
RK
944
945 /* To compute the number of reload registers of each class
9faa82d8 946 needed for an insn, we must simulate what choose_reload_regs
546b63fb
RK
947 can do. We do this by splitting an insn into an "input" and
948 an "output" part. RELOAD_OTHER reloads are used in both.
949 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
950 which must be live over the entire input section of reloads,
951 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
952 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
953 inputs.
954
955 The registers needed for output are RELOAD_OTHER and
956 RELOAD_FOR_OUTPUT, which are live for the entire output
957 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
958 reloads for each operand.
959
960 The total number of registers needed is the maximum of the
961 inputs and outputs. */
962
8b3e912b 963 struct needs
32131a9c 964 {
8b3e912b
RK
965 /* [0] is normal, [1] is nongroup. */
966 int regs[2][N_REG_CLASSES];
967 int groups[N_REG_CLASSES];
968 };
969
970 /* Each `struct needs' corresponds to one RELOAD_... type. */
971 struct {
972 struct needs other;
973 struct needs input;
974 struct needs output;
975 struct needs insn;
976 struct needs other_addr;
977 struct needs op_addr;
893bc853 978 struct needs op_addr_reload;
8b3e912b
RK
979 struct needs in_addr[MAX_RECOG_OPERANDS];
980 struct needs out_addr[MAX_RECOG_OPERANDS];
981 } insn_needs;
32131a9c
RK
982
983 /* If needed, eliminate any eliminable registers. */
984 if (num_eliminable)
985 did_elimination = eliminate_regs_in_insn (insn, 0);
986
987#ifdef SMALL_REGISTER_CLASSES
988 /* Set avoid_return_reg if this is an insn
989 that might use the value of a function call. */
990 if (GET_CODE (insn) == CALL_INSN)
991 {
992 if (GET_CODE (PATTERN (insn)) == SET)
993 after_call = SET_DEST (PATTERN (insn));
994 else if (GET_CODE (PATTERN (insn)) == PARALLEL
995 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
996 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
997 else
998 after_call = 0;
999 }
1000 else if (after_call != 0
1001 && !(GET_CODE (PATTERN (insn)) == SET
1002 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1003 {
2b979c57 1004 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
1005 avoid_return_reg = after_call;
1006 after_call = 0;
1007 }
1008#endif /* SMALL_REGISTER_CLASSES */
1009
1010 /* Analyze the instruction. */
1011 find_reloads (insn, 0, spill_indirect_levels, global,
1012 spill_reg_order);
1013
1014 /* Remember for later shortcuts which insns had any reloads or
1015 register eliminations.
1016
1017 One might think that it would be worthwhile to mark insns
1018 that need register replacements but not reloads, but this is
1019 not safe because find_reloads may do some manipulation of
1020 the insn (such as swapping commutative operands), which would
1021 be lost when we restore the old pattern after register
1022 replacement. So the actions of find_reloads must be redone in
1023 subsequent passes or in reload_as_needed.
1024
1025 However, it is safe to mark insns that need reloads
1026 but not register replacement. */
1027
1028 PUT_MODE (insn, (did_elimination ? QImode
1029 : n_reloads ? HImode
546b63fb 1030 : GET_MODE (insn) == DImode ? DImode
32131a9c
RK
1031 : VOIDmode));
1032
1033 /* Discard any register replacements done. */
1034 if (did_elimination)
1035 {
1036 obstack_free (&reload_obstack, reload_firstobj);
1037 PATTERN (insn) = old_body;
1038 INSN_CODE (insn) = old_code;
1039 REG_NOTES (insn) = old_notes;
1040 something_needs_elimination = 1;
1041 }
1042
a8efe40d 1043 /* If this insn has no reloads, we need not do anything except
a8fdc208 1044 in the case of a CALL_INSN when we have caller-saves and
a8efe40d
RK
1045 caller-save needs reloads. */
1046
1047 if (n_reloads == 0
1048 && ! (GET_CODE (insn) == CALL_INSN
1049 && caller_save_spill_class != NO_REGS))
32131a9c
RK
1050 continue;
1051
1052 something_needs_reloads = 1;
4c9a05bc 1053 bzero ((char *) &insn_needs, sizeof insn_needs);
32131a9c
RK
1054
1055 /* Count each reload once in every class
1056 containing the reload's own class. */
1057
1058 for (i = 0; i < n_reloads; i++)
1059 {
1060 register enum reg_class *p;
e85ddd99 1061 enum reg_class class = reload_reg_class[i];
32131a9c
RK
1062 int size;
1063 enum machine_mode mode;
ce0e109b 1064 int nongroup_need;
8b3e912b 1065 struct needs *this_needs;
32131a9c
RK
1066
1067 /* Don't count the dummy reloads, for which one of the
1068 regs mentioned in the insn can be used for reloading.
1069 Don't count optional reloads.
1070 Don't count reloads that got combined with others. */
1071 if (reload_reg_rtx[i] != 0
1072 || reload_optional[i] != 0
1073 || (reload_out[i] == 0 && reload_in[i] == 0
1074 && ! reload_secondary_p[i]))
1075 continue;
1076
e85ddd99
RK
1077 /* Show that a reload register of this class is needed
1078 in this basic block. We do not use insn_needs and
1079 insn_groups because they are overly conservative for
1080 this purpose. */
1081 if (global && ! basic_block_needs[(int) class][this_block])
1082 {
1083 basic_block_needs[(int) class][this_block] = 1;
1084 new_basic_block_needs = 1;
1085 }
1086
ee249c09
RK
1087
1088 mode = reload_inmode[i];
1089 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1090 mode = reload_outmode[i];
1091 size = CLASS_MAX_NREGS (class, mode);
1092
8b3e912b
RK
1093 /* If this class doesn't want a group, determine if we have
1094 a nongroup need or a regular need. We have a nongroup
1095 need if this reload conflicts with a group reload whose
1096 class intersects with this reload's class. */
ce0e109b
RK
1097
1098 nongroup_need = 0;
ee249c09 1099 if (size == 1)
b8f4c738
RK
1100 for (j = 0; j < n_reloads; j++)
1101 if ((CLASS_MAX_NREGS (reload_reg_class[j],
255cf280
RK
1102 (GET_MODE_SIZE (reload_outmode[j])
1103 > GET_MODE_SIZE (reload_inmode[j]))
1104 ? reload_outmode[j]
1105 : reload_inmode[j])
b8f4c738 1106 > 1)
893bc853
RK
1107 && (!reload_optional[j])
1108 && (reload_in[j] != 0 || reload_out[j] != 0
1109 || reload_secondary_p[j])
b8f4c738 1110 && reloads_conflict (i, j)
ce0e109b
RK
1111 && reg_classes_intersect_p (class,
1112 reload_reg_class[j]))
1113 {
1114 nongroup_need = 1;
1115 break;
1116 }
1117
32131a9c
RK
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed[i])
1120 {
1121 case RELOAD_OTHER:
8b3e912b 1122 this_needs = &insn_needs.other;
32131a9c 1123 break;
546b63fb 1124 case RELOAD_FOR_INPUT:
8b3e912b 1125 this_needs = &insn_needs.input;
32131a9c 1126 break;
546b63fb 1127 case RELOAD_FOR_OUTPUT:
8b3e912b 1128 this_needs = &insn_needs.output;
32131a9c 1129 break;
546b63fb 1130 case RELOAD_FOR_INSN:
8b3e912b 1131 this_needs = &insn_needs.insn;
546b63fb 1132 break;
546b63fb 1133 case RELOAD_FOR_OTHER_ADDRESS:
8b3e912b 1134 this_needs = &insn_needs.other_addr;
546b63fb 1135 break;
546b63fb 1136 case RELOAD_FOR_INPUT_ADDRESS:
8b3e912b 1137 this_needs = &insn_needs.in_addr[reload_opnum[i]];
546b63fb 1138 break;
546b63fb 1139 case RELOAD_FOR_OUTPUT_ADDRESS:
8b3e912b 1140 this_needs = &insn_needs.out_addr[reload_opnum[i]];
546b63fb 1141 break;
32131a9c 1142 case RELOAD_FOR_OPERAND_ADDRESS:
8b3e912b 1143 this_needs = &insn_needs.op_addr;
32131a9c 1144 break;
893bc853
RK
1145 case RELOAD_FOR_OPADDR_ADDR:
1146 this_needs = &insn_needs.op_addr_reload;
1147 break;
32131a9c
RK
1148 }
1149
32131a9c
RK
1150 if (size > 1)
1151 {
1152 enum machine_mode other_mode, allocate_mode;
1153
1154 /* Count number of groups needed separately from
1155 number of individual regs needed. */
8b3e912b 1156 this_needs->groups[(int) class]++;
e85ddd99 1157 p = reg_class_superclasses[(int) class];
32131a9c 1158 while (*p != LIM_REG_CLASSES)
8b3e912b 1159 this_needs->groups[(int) *p++]++;
32131a9c
RK
1160
1161 /* Record size and mode of a group of this class. */
1162 /* If more than one size group is needed,
1163 make all groups the largest needed size. */
e85ddd99 1164 if (group_size[(int) class] < size)
32131a9c 1165 {
e85ddd99 1166 other_mode = group_mode[(int) class];
32131a9c
RK
1167 allocate_mode = mode;
1168
e85ddd99
RK
1169 group_size[(int) class] = size;
1170 group_mode[(int) class] = mode;
32131a9c
RK
1171 }
1172 else
1173 {
1174 other_mode = mode;
e85ddd99 1175 allocate_mode = group_mode[(int) class];
32131a9c
RK
1176 }
1177
1178 /* Crash if two dissimilar machine modes both need
1179 groups of consecutive regs of the same class. */
1180
8b3e912b 1181 if (other_mode != VOIDmode && other_mode != allocate_mode
32131a9c 1182 && ! modes_equiv_for_class_p (allocate_mode,
8b3e912b 1183 other_mode, class))
a89b2cc4
MM
1184 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1185 insn);
32131a9c
RK
1186 }
1187 else if (size == 1)
1188 {
8b3e912b 1189 this_needs->regs[nongroup_need][(int) class] += 1;
e85ddd99 1190 p = reg_class_superclasses[(int) class];
32131a9c 1191 while (*p != LIM_REG_CLASSES)
8b3e912b 1192 this_needs->regs[nongroup_need][(int) *p++] += 1;
32131a9c
RK
1193 }
1194 else
1195 abort ();
1196 }
1197
1198 /* All reloads have been counted for this insn;
1199 now merge the various times of use.
1200 This sets insn_needs, etc., to the maximum total number
1201 of registers needed at any point in this insn. */
1202
1203 for (i = 0; i < N_REG_CLASSES; i++)
1204 {
546b63fb
RK
1205 int in_max, out_max;
1206
8b3e912b
RK
1207 /* Compute normal and nongroup needs. */
1208 for (j = 0; j <= 1; j++)
546b63fb 1209 {
8b3e912b
RK
1210 for (in_max = 0, out_max = 0, k = 0;
1211 k < reload_n_operands; k++)
1212 {
1213 in_max
1214 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1215 out_max
1216 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1217 }
546b63fb 1218
8b3e912b
RK
1219 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1220 and operand addresses but not things used to reload
1221 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1222 don't conflict with things needed to reload inputs or
0f41302f 1223 outputs. */
546b63fb 1224
893bc853
RK
1225 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1226 insn_needs.op_addr_reload.regs[j][i]),
1227 in_max);
1228
8b3e912b 1229 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
546b63fb 1230
8b3e912b
RK
1231 insn_needs.input.regs[j][i]
1232 = MAX (insn_needs.input.regs[j][i]
1233 + insn_needs.op_addr.regs[j][i]
1234 + insn_needs.insn.regs[j][i],
1235 in_max + insn_needs.input.regs[j][i]);
546b63fb 1236
8b3e912b
RK
1237 insn_needs.output.regs[j][i] += out_max;
1238 insn_needs.other.regs[j][i]
1239 += MAX (MAX (insn_needs.input.regs[j][i],
1240 insn_needs.output.regs[j][i]),
1241 insn_needs.other_addr.regs[j][i]);
546b63fb 1242
ce0e109b
RK
1243 }
1244
8b3e912b 1245 /* Now compute group needs. */
546b63fb
RK
1246 for (in_max = 0, out_max = 0, j = 0;
1247 j < reload_n_operands; j++)
1248 {
8b3e912b
RK
1249 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1250 out_max
1251 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
546b63fb
RK
1252 }
1253
893bc853
RK
1254 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1255 insn_needs.op_addr_reload.groups[i]),
1256 in_max);
8b3e912b 1257 out_max = MAX (out_max, insn_needs.insn.groups[i]);
546b63fb 1258
8b3e912b
RK
1259 insn_needs.input.groups[i]
1260 = MAX (insn_needs.input.groups[i]
1261 + insn_needs.op_addr.groups[i]
1262 + insn_needs.insn.groups[i],
1263 in_max + insn_needs.input.groups[i]);
546b63fb 1264
8b3e912b
RK
1265 insn_needs.output.groups[i] += out_max;
1266 insn_needs.other.groups[i]
1267 += MAX (MAX (insn_needs.input.groups[i],
1268 insn_needs.output.groups[i]),
1269 insn_needs.other_addr.groups[i]);
546b63fb
RK
1270 }
1271
a8efe40d
RK
1272 /* If this is a CALL_INSN and caller-saves will need
1273 a spill register, act as if the spill register is
1274 needed for this insn. However, the spill register
1275 can be used by any reload of this insn, so we only
1276 need do something if no need for that class has
a8fdc208 1277 been recorded.
a8efe40d
RK
1278
1279 The assumption that every CALL_INSN will trigger a
1280 caller-save is highly conservative, however, the number
1281 of cases where caller-saves will need a spill register but
1282 a block containing a CALL_INSN won't need a spill register
1283 of that class should be quite rare.
1284
1285 If a group is needed, the size and mode of the group will
d45cf215 1286 have been set up at the beginning of this loop. */
a8efe40d
RK
1287
1288 if (GET_CODE (insn) == CALL_INSN
1289 && caller_save_spill_class != NO_REGS)
1290 {
8b3e912b
RK
1291 /* See if this register would conflict with any reload
1292 that needs a group. */
1293 int nongroup_need = 0;
1294 int *caller_save_needs;
1295
1296 for (j = 0; j < n_reloads; j++)
1297 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1298 (GET_MODE_SIZE (reload_outmode[j])
1299 > GET_MODE_SIZE (reload_inmode[j]))
1300 ? reload_outmode[j]
1301 : reload_inmode[j])
1302 > 1)
1303 && reg_classes_intersect_p (caller_save_spill_class,
1304 reload_reg_class[j]))
1305 {
1306 nongroup_need = 1;
1307 break;
1308 }
1309
1310 caller_save_needs
1311 = (caller_save_group_size > 1
1312 ? insn_needs.other.groups
1313 : insn_needs.other.regs[nongroup_need]);
a8efe40d
RK
1314
1315 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1316 {
1317 register enum reg_class *p
1318 = reg_class_superclasses[(int) caller_save_spill_class];
1319
1320 caller_save_needs[(int) caller_save_spill_class]++;
1321
1322 while (*p != LIM_REG_CLASSES)
0aaa6af8 1323 caller_save_needs[(int) *p++] += 1;
a8efe40d
RK
1324 }
1325
8b3e912b 1326 /* Show that this basic block will need a register of
d1c1397e
RS
1327 this class. */
1328
8b3e912b
RK
1329 if (global
1330 && ! (basic_block_needs[(int) caller_save_spill_class]
1331 [this_block]))
1332 {
1333 basic_block_needs[(int) caller_save_spill_class]
1334 [this_block] = 1;
1335 new_basic_block_needs = 1;
1336 }
a8efe40d
RK
1337 }
1338
32131a9c
RK
1339#ifdef SMALL_REGISTER_CLASSES
1340 /* If this insn stores the value of a function call,
1341 and that value is in a register that has been spilled,
1342 and if the insn needs a reload in a class
1343 that might use that register as the reload register,
1344 then add add an extra need in that class.
1345 This makes sure we have a register available that does
1346 not overlap the return value. */
8b3e912b 1347
32131a9c
RK
1348 if (avoid_return_reg)
1349 {
1350 int regno = REGNO (avoid_return_reg);
1351 int nregs
1352 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1353 int r;
546b63fb
RK
1354 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1355
1356 /* First compute the "basic needs", which counts a
1357 need only in the smallest class in which it
1358 is required. */
1359
9b232232
RK
1360 bcopy ((char *) insn_needs.other.regs[0],
1361 (char *) basic_needs, sizeof basic_needs);
1362 bcopy ((char *) insn_needs.other.groups,
1363 (char *) basic_groups, sizeof basic_groups);
546b63fb
RK
1364
1365 for (i = 0; i < N_REG_CLASSES; i++)
1366 {
1367 enum reg_class *p;
1368
1369 if (basic_needs[i] >= 0)
1370 for (p = reg_class_superclasses[i];
1371 *p != LIM_REG_CLASSES; p++)
1372 basic_needs[(int) *p] -= basic_needs[i];
1373
1374 if (basic_groups[i] >= 0)
1375 for (p = reg_class_superclasses[i];
1376 *p != LIM_REG_CLASSES; p++)
1377 basic_groups[(int) *p] -= basic_groups[i];
1378 }
1379
1380 /* Now count extra regs if there might be a conflict with
0f41302f 1381 the return value register. */
546b63fb 1382
32131a9c
RK
1383 for (r = regno; r < regno + nregs; r++)
1384 if (spill_reg_order[r] >= 0)
1385 for (i = 0; i < N_REG_CLASSES; i++)
1386 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1387 {
af432130 1388 if (basic_needs[i] > 0)
546b63fb
RK
1389 {
1390 enum reg_class *p;
1391
8b3e912b 1392 insn_needs.other.regs[0][i]++;
546b63fb
RK
1393 p = reg_class_superclasses[i];
1394 while (*p != LIM_REG_CLASSES)
8b3e912b 1395 insn_needs.other.regs[0][(int) *p++]++;
546b63fb 1396 }
af432130
RK
1397 if (basic_groups[i] > 0)
1398 {
1399 enum reg_class *p;
1400
1401 insn_needs.other.groups[i]++;
1402 p = reg_class_superclasses[i];
1403 while (*p != LIM_REG_CLASSES)
1404 insn_needs.other.groups[(int) *p++]++;
1405 }
32131a9c 1406 }
32131a9c
RK
1407 }
1408#endif /* SMALL_REGISTER_CLASSES */
1409
1410 /* For each class, collect maximum need of any insn. */
1411
1412 for (i = 0; i < N_REG_CLASSES; i++)
1413 {
8b3e912b 1414 if (max_needs[i] < insn_needs.other.regs[0][i])
5352b11a 1415 {
8b3e912b 1416 max_needs[i] = insn_needs.other.regs[0][i];
5352b11a
RS
1417 max_needs_insn[i] = insn;
1418 }
8b3e912b 1419 if (max_groups[i] < insn_needs.other.groups[i])
5352b11a 1420 {
8b3e912b 1421 max_groups[i] = insn_needs.other.groups[i];
5352b11a
RS
1422 max_groups_insn[i] = insn;
1423 }
8b3e912b 1424 if (max_nongroups[i] < insn_needs.other.regs[1][i])
ce0e109b 1425 {
8b3e912b 1426 max_nongroups[i] = insn_needs.other.regs[1][i];
ce0e109b
RK
1427 max_nongroups_insn[i] = insn;
1428 }
32131a9c
RK
1429 }
1430 }
1431 /* Note that there is a continue statement above. */
1432 }
1433
0dadecf6
RK
1434 /* If we allocated any new memory locations, make another pass
1435 since it might have changed elimination offsets. */
1436 if (starting_frame_size != get_frame_size ())
1437 something_changed = 1;
1438
e404a39a
RK
1439 if (dumpfile)
1440 for (i = 0; i < N_REG_CLASSES; i++)
1441 {
1442 if (max_needs[i] > 0)
1443 fprintf (dumpfile,
1444 ";; Need %d reg%s of class %s (for insn %d).\n",
1445 max_needs[i], max_needs[i] == 1 ? "" : "s",
1446 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1447 if (max_nongroups[i] > 0)
1448 fprintf (dumpfile,
1449 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1450 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1451 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1452 if (max_groups[i] > 0)
1453 fprintf (dumpfile,
1454 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1455 max_groups[i], max_groups[i] == 1 ? "" : "s",
1456 mode_name[(int) group_mode[i]],
1457 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1458 }
1459
d445b551 1460 /* If we have caller-saves, set up the save areas and see if caller-save
a8efe40d 1461 will need a spill register. */
32131a9c 1462
d445b551 1463 if (caller_save_needed
a8efe40d
RK
1464 && ! setup_save_areas (&something_changed)
1465 && caller_save_spill_class == NO_REGS)
32131a9c 1466 {
a8efe40d
RK
1467 /* The class we will need depends on whether the machine
1468 supports the sum of two registers for an address; see
1469 find_address_reloads for details. */
1470
a8fdc208 1471 caller_save_spill_class
a8efe40d
RK
1472 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1473 caller_save_group_size
1474 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1475 something_changed = 1;
32131a9c
RK
1476 }
1477
5c23c401
RK
1478 /* See if anything that happened changes which eliminations are valid.
1479 For example, on the Sparc, whether or not the frame pointer can
1480 be eliminated can depend on what registers have been used. We need
1481 not check some conditions again (such as flag_omit_frame_pointer)
1482 since they can't have changed. */
1483
1484 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3ec2ea3e 1485 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
5c23c401
RK
1486#ifdef ELIMINABLE_REGS
1487 || ! CAN_ELIMINATE (ep->from, ep->to)
1488#endif
1489 )
1490 ep->can_eliminate = 0;
1491
32131a9c
RK
1492 /* Look for the case where we have discovered that we can't replace
1493 register A with register B and that means that we will now be
1494 trying to replace register A with register C. This means we can
1495 no longer replace register C with register B and we need to disable
1496 such an elimination, if it exists. This occurs often with A == ap,
1497 B == sp, and C == fp. */
a8fdc208 1498
32131a9c
RK
1499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1500 {
1501 struct elim_table *op;
1502 register int new_to = -1;
1503
1504 if (! ep->can_eliminate && ep->can_eliminate_previous)
1505 {
1506 /* Find the current elimination for ep->from, if there is a
1507 new one. */
1508 for (op = reg_eliminate;
1509 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1510 if (op->from == ep->from && op->can_eliminate)
1511 {
1512 new_to = op->to;
1513 break;
1514 }
1515
1516 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1517 disable it. */
1518 for (op = reg_eliminate;
1519 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1520 if (op->from == new_to && op->to == ep->to)
1521 op->can_eliminate = 0;
1522 }
1523 }
1524
1525 /* See if any registers that we thought we could eliminate the previous
1526 time are no longer eliminable. If so, something has changed and we
1527 must spill the register. Also, recompute the number of eliminable
1528 registers and see if the frame pointer is needed; it is if there is
1529 no elimination of the frame pointer that we can perform. */
1530
1531 frame_pointer_needed = 1;
1532 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1533 {
3ec2ea3e
DE
1534 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1535 && ep->to != HARD_FRAME_POINTER_REGNUM)
32131a9c
RK
1536 frame_pointer_needed = 0;
1537
1538 if (! ep->can_eliminate && ep->can_eliminate_previous)
1539 {
1540 ep->can_eliminate_previous = 0;
1541 spill_hard_reg (ep->from, global, dumpfile, 1);
32131a9c
RK
1542 something_changed = 1;
1543 num_eliminable--;
1544 }
1545 }
1546
9ff3516a
RK
1547#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1548 /* If we didn't need a frame pointer last time, but we do now, spill
1549 the hard frame pointer. */
1550 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1551 {
1552 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1553 something_changed = 1;
1554 }
1555#endif
1556
32131a9c
RK
1557 /* If all needs are met, we win. */
1558
1559 for (i = 0; i < N_REG_CLASSES; i++)
1560 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1561 break;
1562 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1563 break;
1564
546b63fb
RK
1565 /* Not all needs are met; must spill some hard regs. */
1566
1567 /* Put all registers spilled so far back in potential_reload_regs, but
1568 put them at the front, since we've already spilled most of the
9faa82d8 1569 pseudos in them (we might have left some pseudos unspilled if they
546b63fb
RK
1570 were in a block that didn't need any spill registers of a conflicting
1571 class. We used to try to mark off the need for those registers,
1572 but doing so properly is very complex and reallocating them is the
1573 simpler approach. First, "pack" potential_reload_regs by pushing
1574 any nonnegative entries towards the end. That will leave room
1575 for the registers we already spilled.
1576
1577 Also, undo the marking of the spill registers from the last time
1578 around in FORBIDDEN_REGS since we will be probably be allocating
1579 them again below.
1580
1581 ??? It is theoretically possible that we might end up not using one
1582 of our previously-spilled registers in this allocation, even though
1583 they are at the head of the list. It's not clear what to do about
1584 this, but it was no better before, when we marked off the needs met
1585 by the previously-spilled registers. With the current code, globals
1586 can be allocated into these registers, but locals cannot. */
1587
1588 if (n_spills)
1589 {
1590 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1591 if (potential_reload_regs[i] != -1)
1592 potential_reload_regs[j--] = potential_reload_regs[i];
32131a9c 1593
546b63fb
RK
1594 for (i = 0; i < n_spills; i++)
1595 {
1596 potential_reload_regs[i] = spill_regs[i];
1597 spill_reg_order[spill_regs[i]] = -1;
1598 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1599 }
32131a9c 1600
546b63fb
RK
1601 n_spills = 0;
1602 }
32131a9c
RK
1603
1604 /* Now find more reload regs to satisfy the remaining need
1605 Do it by ascending class number, since otherwise a reg
1606 might be spilled for a big class and might fail to count
1607 for a smaller class even though it belongs to that class.
1608
1609 Count spilled regs in `spills', and add entries to
1610 `spill_regs' and `spill_reg_order'.
1611
1612 ??? Note there is a problem here.
1613 When there is a need for a group in a high-numbered class,
1614 and also need for non-group regs that come from a lower class,
1615 the non-group regs are chosen first. If there aren't many regs,
1616 they might leave no room for a group.
1617
1618 This was happening on the 386. To fix it, we added the code
1619 that calls possible_group_p, so that the lower class won't
1620 break up the last possible group.
1621
1622 Really fixing the problem would require changes above
1623 in counting the regs already spilled, and in choose_reload_regs.
1624 It might be hard to avoid introducing bugs there. */
1625
546b63fb
RK
1626 CLEAR_HARD_REG_SET (counted_for_groups);
1627 CLEAR_HARD_REG_SET (counted_for_nongroups);
1628
32131a9c
RK
1629 for (class = 0; class < N_REG_CLASSES; class++)
1630 {
1631 /* First get the groups of registers.
1632 If we got single registers first, we might fragment
1633 possible groups. */
1634 while (max_groups[class] > 0)
1635 {
1636 /* If any single spilled regs happen to form groups,
1637 count them now. Maybe we don't really need
1638 to spill another group. */
066aca28
RK
1639 count_possible_groups (group_size, group_mode, max_groups,
1640 class);
32131a9c 1641
93193ab5
RK
1642 if (max_groups[class] <= 0)
1643 break;
1644
32131a9c
RK
1645 /* Groups of size 2 (the only groups used on most machines)
1646 are treated specially. */
1647 if (group_size[class] == 2)
1648 {
1649 /* First, look for a register that will complete a group. */
1650 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1651 {
32131a9c 1652 int other;
546b63fb
RK
1653
1654 j = potential_reload_regs[i];
32131a9c
RK
1655 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1656 &&
1657 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1658 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1659 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1660 && HARD_REGNO_MODE_OK (other, group_mode[class])
1661 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1662 other)
1663 /* We don't want one part of another group.
1664 We could get "two groups" that overlap! */
1665 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1666 ||
1667 (j < FIRST_PSEUDO_REGISTER - 1
1668 && (other = j + 1, spill_reg_order[other] >= 0)
1669 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1670 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1671 && HARD_REGNO_MODE_OK (j, group_mode[class])
1672 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1673 other)
1674 && ! TEST_HARD_REG_BIT (counted_for_groups,
1675 other))))
1676 {
1677 register enum reg_class *p;
1678
1679 /* We have found one that will complete a group,
1680 so count off one group as provided. */
1681 max_groups[class]--;
1682 p = reg_class_superclasses[class];
1683 while (*p != LIM_REG_CLASSES)
d601d5da
JW
1684 {
1685 if (group_size [(int) *p] <= group_size [class])
1686 max_groups[(int) *p]--;
1687 p++;
1688 }
32131a9c
RK
1689
1690 /* Indicate both these regs are part of a group. */
1691 SET_HARD_REG_BIT (counted_for_groups, j);
1692 SET_HARD_REG_BIT (counted_for_groups, other);
1693 break;
1694 }
1695 }
1696 /* We can't complete a group, so start one. */
92b0556d
RS
1697#ifdef SMALL_REGISTER_CLASSES
1698 /* Look for a pair neither of which is explicitly used. */
1699 if (i == FIRST_PSEUDO_REGISTER)
1700 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1701 {
1702 int k;
1703 j = potential_reload_regs[i];
1704 /* Verify that J+1 is a potential reload reg. */
1705 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1706 if (potential_reload_regs[k] == j + 1)
1707 break;
1708 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1709 && k < FIRST_PSEUDO_REGISTER
1710 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1711 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1712 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1713 && HARD_REGNO_MODE_OK (j, group_mode[class])
1714 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1715 j + 1)
1716 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1717 /* Reject J at this stage
1718 if J+1 was explicitly used. */
1719 && ! regs_explicitly_used[j + 1])
1720 break;
1721 }
1722#endif
1723 /* Now try any group at all
1724 whose registers are not in bad_spill_regs. */
32131a9c
RK
1725 if (i == FIRST_PSEUDO_REGISTER)
1726 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1727 {
57697575 1728 int k;
546b63fb 1729 j = potential_reload_regs[i];
57697575
RS
1730 /* Verify that J+1 is a potential reload reg. */
1731 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1732 if (potential_reload_regs[k] == j + 1)
1733 break;
32131a9c 1734 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
57697575 1735 && k < FIRST_PSEUDO_REGISTER
32131a9c
RK
1736 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1737 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1738 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1739 && HARD_REGNO_MODE_OK (j, group_mode[class])
1740 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
ee9f63c6
RS
1741 j + 1)
1742 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
32131a9c
RK
1743 break;
1744 }
1745
1746 /* I should be the index in potential_reload_regs
1747 of the new reload reg we have found. */
1748
5352b11a
RS
1749 if (i >= FIRST_PSEUDO_REGISTER)
1750 {
1751 /* There are no groups left to spill. */
1752 spill_failure (max_groups_insn[class]);
1753 failure = 1;
1754 goto failed;
1755 }
1756 else
1757 something_changed
fb3821f7 1758 |= new_spill_reg (i, class, max_needs, NULL_PTR,
5352b11a 1759 global, dumpfile);
32131a9c
RK
1760 }
1761 else
1762 {
1763 /* For groups of more than 2 registers,
1764 look for a sufficient sequence of unspilled registers,
1765 and spill them all at once. */
1766 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1767 {
32131a9c 1768 int k;
546b63fb
RK
1769
1770 j = potential_reload_regs[i];
9d1a4667
RS
1771 if (j >= 0
1772 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
32131a9c
RK
1773 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1774 {
1775 /* Check each reg in the sequence. */
1776 for (k = 0; k < group_size[class]; k++)
1777 if (! (spill_reg_order[j + k] < 0
1778 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1779 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1780 break;
1781 /* We got a full sequence, so spill them all. */
1782 if (k == group_size[class])
1783 {
1784 register enum reg_class *p;
1785 for (k = 0; k < group_size[class]; k++)
1786 {
1787 int idx;
1788 SET_HARD_REG_BIT (counted_for_groups, j + k);
1789 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1790 if (potential_reload_regs[idx] == j + k)
1791 break;
9d1a4667
RS
1792 something_changed
1793 |= new_spill_reg (idx, class,
1794 max_needs, NULL_PTR,
1795 global, dumpfile);
32131a9c
RK
1796 }
1797
1798 /* We have found one that will complete a group,
1799 so count off one group as provided. */
1800 max_groups[class]--;
1801 p = reg_class_superclasses[class];
1802 while (*p != LIM_REG_CLASSES)
d601d5da
JW
1803 {
1804 if (group_size [(int) *p]
1805 <= group_size [class])
1806 max_groups[(int) *p]--;
1807 p++;
1808 }
32131a9c
RK
1809 break;
1810 }
1811 }
1812 }
fa52261e 1813 /* We couldn't find any registers for this reload.
9d1a4667
RS
1814 Avoid going into an infinite loop. */
1815 if (i >= FIRST_PSEUDO_REGISTER)
1816 {
1817 /* There are no groups left. */
1818 spill_failure (max_groups_insn[class]);
1819 failure = 1;
1820 goto failed;
1821 }
32131a9c
RK
1822 }
1823 }
1824
1825 /* Now similarly satisfy all need for single registers. */
1826
1827 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1828 {
9a6cde3a
RS
1829 /* If we spilled enough regs, but they weren't counted
1830 against the non-group need, see if we can count them now.
1831 If so, we can avoid some actual spilling. */
1832 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1833 for (i = 0; i < n_spills; i++)
1834 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1835 spill_regs[i])
1836 && !TEST_HARD_REG_BIT (counted_for_groups,
1837 spill_regs[i])
1838 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1839 spill_regs[i])
1840 && max_nongroups[class] > 0)
1841 {
1842 register enum reg_class *p;
1843
1844 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1845 max_nongroups[class]--;
1846 p = reg_class_superclasses[class];
1847 while (*p != LIM_REG_CLASSES)
1848 max_nongroups[(int) *p++]--;
1849 }
1850 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1851 break;
9a6cde3a 1852
32131a9c
RK
1853 /* Consider the potential reload regs that aren't
1854 yet in use as reload regs, in order of preference.
1855 Find the most preferred one that's in this class. */
1856
1857 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1858 if (potential_reload_regs[i] >= 0
1859 && TEST_HARD_REG_BIT (reg_class_contents[class],
1860 potential_reload_regs[i])
1861 /* If this reg will not be available for groups,
1862 pick one that does not foreclose possible groups.
1863 This is a kludge, and not very general,
1864 but it should be sufficient to make the 386 work,
1865 and the problem should not occur on machines with
1866 more registers. */
1867 && (max_nongroups[class] == 0
1868 || possible_group_p (potential_reload_regs[i], max_groups)))
1869 break;
1870
e404a39a
RK
1871 /* If we couldn't get a register, try to get one even if we
1872 might foreclose possible groups. This may cause problems
1873 later, but that's better than aborting now, since it is
1874 possible that we will, in fact, be able to form the needed
1875 group even with this allocation. */
1876
1877 if (i >= FIRST_PSEUDO_REGISTER
1878 && (asm_noperands (max_needs[class] > 0
1879 ? max_needs_insn[class]
1880 : max_nongroups_insn[class])
1881 < 0))
1882 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1883 if (potential_reload_regs[i] >= 0
1884 && TEST_HARD_REG_BIT (reg_class_contents[class],
1885 potential_reload_regs[i]))
1886 break;
1887
32131a9c
RK
1888 /* I should be the index in potential_reload_regs
1889 of the new reload reg we have found. */
1890
5352b11a
RS
1891 if (i >= FIRST_PSEUDO_REGISTER)
1892 {
1893 /* There are no possible registers left to spill. */
1894 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1895 : max_nongroups_insn[class]);
1896 failure = 1;
1897 goto failed;
1898 }
1899 else
1900 something_changed
1901 |= new_spill_reg (i, class, max_needs, max_nongroups,
1902 global, dumpfile);
32131a9c
RK
1903 }
1904 }
1905 }
1906
1907 /* If global-alloc was run, notify it of any register eliminations we have
1908 done. */
1909 if (global)
1910 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1911 if (ep->can_eliminate)
1912 mark_elimination (ep->from, ep->to);
1913
32131a9c 1914 /* Insert code to save and restore call-clobbered hard regs
a8efe40d
RK
1915 around calls. Tell if what mode to use so that we will process
1916 those insns in reload_as_needed if we have to. */
32131a9c
RK
1917
1918 if (caller_save_needed)
a8efe40d
RK
1919 save_call_clobbered_regs (num_eliminable ? QImode
1920 : caller_save_spill_class != NO_REGS ? HImode
1921 : VOIDmode);
32131a9c
RK
1922
1923 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1924 If that insn didn't set the register (i.e., it copied the register to
1925 memory), just delete that insn instead of the equivalencing insn plus
1926 anything now dead. If we call delete_dead_insn on that insn, we may
1927 delete the insn that actually sets the register if the register die
1928 there and that is incorrect. */
1929
1930 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1931 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1932 && GET_CODE (reg_equiv_init[i]) != NOTE)
1933 {
1934 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1935 delete_dead_insn (reg_equiv_init[i]);
1936 else
1937 {
1938 PUT_CODE (reg_equiv_init[i], NOTE);
1939 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1940 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1941 }
1942 }
1943
1944 /* Use the reload registers where necessary
1945 by generating move instructions to move the must-be-register
1946 values into or out of the reload registers. */
1947
a8efe40d
RK
1948 if (something_needs_reloads || something_needs_elimination
1949 || (caller_save_needed && num_eliminable)
1950 || caller_save_spill_class != NO_REGS)
32131a9c
RK
1951 reload_as_needed (first, global);
1952
2a1f8b6b 1953 /* If we were able to eliminate the frame pointer, show that it is no
546b63fb 1954 longer live at the start of any basic block. If it ls live by
2a1f8b6b
RK
1955 virtue of being in a pseudo, that pseudo will be marked live
1956 and hence the frame pointer will be known to be live via that
1957 pseudo. */
1958
1959 if (! frame_pointer_needed)
1960 for (i = 0; i < n_basic_blocks; i++)
3ec2ea3e
DE
1961 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1962 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1963 % REGSET_ELT_BITS));
2a1f8b6b 1964
5352b11a
RS
1965 /* Come here (with failure set nonzero) if we can't get enough spill regs
1966 and we decide not to abort about it. */
1967 failed:
1968
a3ec87a8
RS
1969 reload_in_progress = 0;
1970
32131a9c
RK
1971 /* Now eliminate all pseudo regs by modifying them into
1972 their equivalent memory references.
1973 The REG-rtx's for the pseudos are modified in place,
1974 so all insns that used to refer to them now refer to memory.
1975
1976 For a reg that has a reg_equiv_address, all those insns
1977 were changed by reloading so that no insns refer to it any longer;
1978 but the DECL_RTL of a variable decl may refer to it,
1979 and if so this causes the debugging info to mention the variable. */
1980
1981 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1982 {
1983 rtx addr = 0;
ab1fd483 1984 int in_struct = 0;
32131a9c 1985 if (reg_equiv_mem[i])
ab1fd483
RS
1986 {
1987 addr = XEXP (reg_equiv_mem[i], 0);
1988 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1989 }
32131a9c
RK
1990 if (reg_equiv_address[i])
1991 addr = reg_equiv_address[i];
1992 if (addr)
1993 {
1994 if (reg_renumber[i] < 0)
1995 {
1996 rtx reg = regno_reg_rtx[i];
1997 XEXP (reg, 0) = addr;
1998 REG_USERVAR_P (reg) = 0;
ab1fd483 1999 MEM_IN_STRUCT_P (reg) = in_struct;
32131a9c
RK
2000 PUT_CODE (reg, MEM);
2001 }
2002 else if (reg_equiv_mem[i])
2003 XEXP (reg_equiv_mem[i], 0) = addr;
2004 }
2005 }
2006
2007#ifdef PRESERVE_DEATH_INFO_REGNO_P
2008 /* Make a pass over all the insns and remove death notes for things that
2009 are no longer registers or no longer die in the insn (e.g., an input
2010 and output pseudo being tied). */
2011
2012 for (insn = first; insn; insn = NEXT_INSN (insn))
2013 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2014 {
2015 rtx note, next;
2016
2017 for (note = REG_NOTES (insn); note; note = next)
2018 {
2019 next = XEXP (note, 1);
2020 if (REG_NOTE_KIND (note) == REG_DEAD
2021 && (GET_CODE (XEXP (note, 0)) != REG
2022 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2023 remove_note (insn, note);
2024 }
2025 }
2026#endif
2027
2028 /* Indicate that we no longer have known memory locations or constants. */
2029 reg_equiv_constant = 0;
2030 reg_equiv_memory_loc = 0;
5352b11a 2031
c8ab4464
RS
2032 if (scratch_list)
2033 free (scratch_list);
c307c237 2034 scratch_list = 0;
c8ab4464
RS
2035 if (scratch_block)
2036 free (scratch_block);
c307c237
RK
2037 scratch_block = 0;
2038
8b4f9969
JW
2039 CLEAR_HARD_REG_SET (used_spill_regs);
2040 for (i = 0; i < n_spills; i++)
2041 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2042
5352b11a 2043 return failure;
32131a9c
RK
2044}
2045\f
2046/* Nonzero if, after spilling reg REGNO for non-groups,
2047 it will still be possible to find a group if we still need one. */
2048
2049static int
2050possible_group_p (regno, max_groups)
2051 int regno;
2052 int *max_groups;
2053{
2054 int i;
2055 int class = (int) NO_REGS;
2056
2057 for (i = 0; i < (int) N_REG_CLASSES; i++)
2058 if (max_groups[i] > 0)
2059 {
2060 class = i;
2061 break;
2062 }
2063
2064 if (class == (int) NO_REGS)
2065 return 1;
2066
2067 /* Consider each pair of consecutive registers. */
2068 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2069 {
2070 /* Ignore pairs that include reg REGNO. */
2071 if (i == regno || i + 1 == regno)
2072 continue;
2073
2074 /* Ignore pairs that are outside the class that needs the group.
2075 ??? Here we fail to handle the case where two different classes
2076 independently need groups. But this never happens with our
2077 current machine descriptions. */
2078 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2079 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2080 continue;
2081
2082 /* A pair of consecutive regs we can still spill does the trick. */
2083 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2084 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2085 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2086 return 1;
2087
2088 /* A pair of one already spilled and one we can spill does it
2089 provided the one already spilled is not otherwise reserved. */
2090 if (spill_reg_order[i] < 0
2091 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2092 && spill_reg_order[i + 1] >= 0
2093 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2094 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2095 return 1;
2096 if (spill_reg_order[i + 1] < 0
2097 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2098 && spill_reg_order[i] >= 0
2099 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2100 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2101 return 1;
2102 }
2103
2104 return 0;
2105}
2106\f
066aca28
RK
2107/* Count any groups of CLASS that can be formed from the registers recently
2108 spilled. */
32131a9c
RK
2109
2110static void
066aca28 2111count_possible_groups (group_size, group_mode, max_groups, class)
546b63fb 2112 int *group_size;
32131a9c 2113 enum machine_mode *group_mode;
546b63fb 2114 int *max_groups;
066aca28 2115 int class;
32131a9c 2116{
066aca28
RK
2117 HARD_REG_SET new;
2118 int i, j;
2119
32131a9c
RK
2120 /* Now find all consecutive groups of spilled registers
2121 and mark each group off against the need for such groups.
2122 But don't count them against ordinary need, yet. */
2123
066aca28
RK
2124 if (group_size[class] == 0)
2125 return;
2126
2127 CLEAR_HARD_REG_SET (new);
2128
2129 /* Make a mask of all the regs that are spill regs in class I. */
2130 for (i = 0; i < n_spills; i++)
2131 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2132 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2133 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2134 SET_HARD_REG_BIT (new, spill_regs[i]);
2135
2136 /* Find each consecutive group of them. */
2137 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2138 if (TEST_HARD_REG_BIT (new, i)
2139 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2140 && HARD_REGNO_MODE_OK (i, group_mode[class]))
32131a9c 2141 {
066aca28
RK
2142 for (j = 1; j < group_size[class]; j++)
2143 if (! TEST_HARD_REG_BIT (new, i + j))
2144 break;
32131a9c 2145
066aca28
RK
2146 if (j == group_size[class])
2147 {
2148 /* We found a group. Mark it off against this class's need for
2149 groups, and against each superclass too. */
2150 register enum reg_class *p;
2151
2152 max_groups[class]--;
2153 p = reg_class_superclasses[class];
2154 while (*p != LIM_REG_CLASSES)
d601d5da
JW
2155 {
2156 if (group_size [(int) *p] <= group_size [class])
2157 max_groups[(int) *p]--;
2158 p++;
2159 }
066aca28
RK
2160
2161 /* Don't count these registers again. */
46a70e45 2162 for (j = 0; j < group_size[class]; j++)
066aca28
RK
2163 SET_HARD_REG_BIT (counted_for_groups, i + j);
2164 }
2165
2166 /* Skip to the last reg in this group. When i is incremented above,
2167 it will then point to the first reg of the next possible group. */
2168 i += j - 1;
2169 }
32131a9c
RK
2170}
2171\f
2172/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2173 another mode that needs to be reloaded for the same register class CLASS.
2174 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2175 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2176
2177 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2178 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2179 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2180 causes unnecessary failures on machines requiring alignment of register
2181 groups when the two modes are different sizes, because the larger mode has
2182 more strict alignment rules than the smaller mode. */
2183
2184static int
2185modes_equiv_for_class_p (allocate_mode, other_mode, class)
2186 enum machine_mode allocate_mode, other_mode;
2187 enum reg_class class;
2188{
2189 register int regno;
2190 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2191 {
2192 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2193 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2194 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2195 return 0;
2196 }
2197 return 1;
2198}
2199
5352b11a
RS
2200/* Handle the failure to find a register to spill.
2201 INSN should be one of the insns which needed this particular spill reg. */
2202
2203static void
2204spill_failure (insn)
2205 rtx insn;
2206{
2207 if (asm_noperands (PATTERN (insn)) >= 0)
2208 error_for_asm (insn, "`asm' needs too many reloads");
2209 else
a89b2cc4 2210 fatal_insn ("Unable to find a register to spill.", insn);
5352b11a
RS
2211}
2212
32131a9c
RK
2213/* Add a new register to the tables of available spill-registers
2214 (as well as spilling all pseudos allocated to the register).
2215 I is the index of this register in potential_reload_regs.
2216 CLASS is the regclass whose need is being satisfied.
2217 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2218 so that this register can count off against them.
2219 MAX_NONGROUPS is 0 if this register is part of a group.
2220 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2221
2222static int
2223new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2224 int i;
2225 int class;
2226 int *max_needs;
2227 int *max_nongroups;
2228 int global;
2229 FILE *dumpfile;
2230{
2231 register enum reg_class *p;
2232 int val;
2233 int regno = potential_reload_regs[i];
2234
2235 if (i >= FIRST_PSEUDO_REGISTER)
2236 abort (); /* Caller failed to find any register. */
2237
2238 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2239 fatal ("fixed or forbidden register was spilled.\n\
56f58d3a
RK
2240This may be due to a compiler bug or to impossible asm\n\
2241statements or clauses.");
32131a9c
RK
2242
2243 /* Make reg REGNO an additional reload reg. */
2244
2245 potential_reload_regs[i] = -1;
2246 spill_regs[n_spills] = regno;
2247 spill_reg_order[regno] = n_spills;
2248 if (dumpfile)
2249 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2250
2251 /* Clear off the needs we just satisfied. */
2252
2253 max_needs[class]--;
2254 p = reg_class_superclasses[class];
2255 while (*p != LIM_REG_CLASSES)
2256 max_needs[(int) *p++]--;
2257
2258 if (max_nongroups && max_nongroups[class] > 0)
2259 {
2260 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2261 max_nongroups[class]--;
2262 p = reg_class_superclasses[class];
2263 while (*p != LIM_REG_CLASSES)
2264 max_nongroups[(int) *p++]--;
2265 }
2266
2267 /* Spill every pseudo reg that was allocated to this reg
2268 or to something that overlaps this reg. */
2269
2270 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2271
2272 /* If there are some registers still to eliminate and this register
2273 wasn't ever used before, additional stack space may have to be
2274 allocated to store this register. Thus, we may have changed the offset
2275 between the stack and frame pointers, so mark that something has changed.
2276 (If new pseudos were spilled, thus requiring more space, VAL would have
2277 been set non-zero by the call to spill_hard_reg above since additional
2278 reloads may be needed in that case.
2279
2280 One might think that we need only set VAL to 1 if this is a call-used
2281 register. However, the set of registers that must be saved by the
2282 prologue is not identical to the call-used set. For example, the
2283 register used by the call insn for the return PC is a call-used register,
2284 but must be saved by the prologue. */
2285 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2286 val = 1;
2287
2288 regs_ever_live[spill_regs[n_spills]] = 1;
2289 n_spills++;
2290
2291 return val;
2292}
2293\f
2294/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2295 data that is dead in INSN. */
2296
2297static void
2298delete_dead_insn (insn)
2299 rtx insn;
2300{
2301 rtx prev = prev_real_insn (insn);
2302 rtx prev_dest;
2303
2304 /* If the previous insn sets a register that dies in our insn, delete it
2305 too. */
2306 if (prev && GET_CODE (PATTERN (prev)) == SET
2307 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2308 && reg_mentioned_p (prev_dest, PATTERN (insn))
2309 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2310 delete_dead_insn (prev);
2311
2312 PUT_CODE (insn, NOTE);
2313 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2314 NOTE_SOURCE_FILE (insn) = 0;
2315}
2316
2317/* Modify the home of pseudo-reg I.
2318 The new home is present in reg_renumber[I].
2319
2320 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2321 or it may be -1, meaning there is none or it is not relevant.
2322 This is used so that all pseudos spilled from a given hard reg
2323 can share one stack slot. */
2324
2325static void
2326alter_reg (i, from_reg)
2327 register int i;
2328 int from_reg;
2329{
2330 /* When outputting an inline function, this can happen
2331 for a reg that isn't actually used. */
2332 if (regno_reg_rtx[i] == 0)
2333 return;
2334
2335 /* If the reg got changed to a MEM at rtl-generation time,
2336 ignore it. */
2337 if (GET_CODE (regno_reg_rtx[i]) != REG)
2338 return;
2339
2340 /* Modify the reg-rtx to contain the new hard reg
2341 number or else to contain its pseudo reg number. */
2342 REGNO (regno_reg_rtx[i])
2343 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2344
2345 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2346 allocate a stack slot for it. */
2347
2348 if (reg_renumber[i] < 0
2349 && reg_n_refs[i] > 0
2350 && reg_equiv_constant[i] == 0
2351 && reg_equiv_memory_loc[i] == 0)
2352 {
2353 register rtx x;
2354 int inherent_size = PSEUDO_REGNO_BYTES (i);
2355 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2356 int adjust = 0;
2357
2358 /* Each pseudo reg has an inherent size which comes from its own mode,
2359 and a total size which provides room for paradoxical subregs
2360 which refer to the pseudo reg in wider modes.
2361
2362 We can use a slot already allocated if it provides both
2363 enough inherent space and enough total space.
2364 Otherwise, we allocate a new slot, making sure that it has no less
2365 inherent space, and no less total space, then the previous slot. */
2366 if (from_reg == -1)
2367 {
2368 /* No known place to spill from => no slot to reuse. */
2369 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
f76b9db2 2370 if (BYTES_BIG_ENDIAN)
02db8dd0
RK
2371 /* Cancel the big-endian correction done in assign_stack_local.
2372 Get the address of the beginning of the slot.
2373 This is so we can do a big-endian correction unconditionally
2374 below. */
2375 adjust = inherent_size - total_size;
2376
2377 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
32131a9c
RK
2378 }
2379 /* Reuse a stack slot if possible. */
2380 else if (spill_stack_slot[from_reg] != 0
2381 && spill_stack_slot_width[from_reg] >= total_size
2382 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2383 >= inherent_size))
2384 x = spill_stack_slot[from_reg];
2385 /* Allocate a bigger slot. */
2386 else
2387 {
2388 /* Compute maximum size needed, both for inherent size
2389 and for total size. */
2390 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
4f2d3674 2391 rtx stack_slot;
32131a9c
RK
2392 if (spill_stack_slot[from_reg])
2393 {
2394 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2395 > inherent_size)
2396 mode = GET_MODE (spill_stack_slot[from_reg]);
2397 if (spill_stack_slot_width[from_reg] > total_size)
2398 total_size = spill_stack_slot_width[from_reg];
2399 }
2400 /* Make a slot with that size. */
2401 x = assign_stack_local (mode, total_size, -1);
4f2d3674 2402 stack_slot = x;
f76b9db2
ILT
2403 if (BYTES_BIG_ENDIAN)
2404 {
2405 /* Cancel the big-endian correction done in assign_stack_local.
2406 Get the address of the beginning of the slot.
2407 This is so we can do a big-endian correction unconditionally
2408 below. */
2409 adjust = GET_MODE_SIZE (mode) - total_size;
4f2d3674 2410 if (adjust)
02db8dd0
RK
2411 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2412 * BITS_PER_UNIT,
2413 MODE_INT, 1),
2414 plus_constant (XEXP (x, 0), adjust));
f76b9db2 2415 }
4f2d3674 2416 spill_stack_slot[from_reg] = stack_slot;
32131a9c
RK
2417 spill_stack_slot_width[from_reg] = total_size;
2418 }
2419
32131a9c
RK
2420 /* On a big endian machine, the "address" of the slot
2421 is the address of the low part that fits its inherent mode. */
f76b9db2 2422 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
32131a9c 2423 adjust += (total_size - inherent_size);
32131a9c
RK
2424
2425 /* If we have any adjustment to make, or if the stack slot is the
2426 wrong mode, make a new stack slot. */
2427 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2428 {
2429 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2430 plus_constant (XEXP (x, 0), adjust));
2431 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2432 }
2433
2434 /* Save the stack slot for later. */
2435 reg_equiv_memory_loc[i] = x;
2436 }
2437}
2438
2439/* Mark the slots in regs_ever_live for the hard regs
2440 used by pseudo-reg number REGNO. */
2441
2442void
2443mark_home_live (regno)
2444 int regno;
2445{
2446 register int i, lim;
2447 i = reg_renumber[regno];
2448 if (i < 0)
2449 return;
2450 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2451 while (i < lim)
2452 regs_ever_live[i++] = 1;
2453}
c307c237
RK
2454
2455/* Mark the registers used in SCRATCH as being live. */
2456
2457static void
2458mark_scratch_live (scratch)
2459 rtx scratch;
2460{
2461 register int i;
2462 int regno = REGNO (scratch);
2463 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2464
2465 for (i = regno; i < lim; i++)
2466 regs_ever_live[i] = 1;
2467}
32131a9c
RK
2468\f
2469/* This function handles the tracking of elimination offsets around branches.
2470
2471 X is a piece of RTL being scanned.
2472
2473 INSN is the insn that it came from, if any.
2474
2475 INITIAL_P is non-zero if we are to set the offset to be the initial
2476 offset and zero if we are setting the offset of the label to be the
2477 current offset. */
2478
2479static void
2480set_label_offsets (x, insn, initial_p)
2481 rtx x;
2482 rtx insn;
2483 int initial_p;
2484{
2485 enum rtx_code code = GET_CODE (x);
2486 rtx tem;
2487 int i;
2488 struct elim_table *p;
2489
2490 switch (code)
2491 {
2492 case LABEL_REF:
8be386d9
RS
2493 if (LABEL_REF_NONLOCAL_P (x))
2494 return;
2495
32131a9c
RK
2496 x = XEXP (x, 0);
2497
0f41302f 2498 /* ... fall through ... */
32131a9c
RK
2499
2500 case CODE_LABEL:
2501 /* If we know nothing about this label, set the desired offsets. Note
2502 that this sets the offset at a label to be the offset before a label
2503 if we don't know anything about the label. This is not correct for
2504 the label after a BARRIER, but is the best guess we can make. If
2505 we guessed wrong, we will suppress an elimination that might have
2506 been possible had we been able to guess correctly. */
2507
2508 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2509 {
2510 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2511 offsets_at[CODE_LABEL_NUMBER (x)][i]
2512 = (initial_p ? reg_eliminate[i].initial_offset
2513 : reg_eliminate[i].offset);
2514 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2515 }
2516
2517 /* Otherwise, if this is the definition of a label and it is
d45cf215 2518 preceded by a BARRIER, set our offsets to the known offset of
32131a9c
RK
2519 that label. */
2520
2521 else if (x == insn
2522 && (tem = prev_nonnote_insn (insn)) != 0
2523 && GET_CODE (tem) == BARRIER)
2a4b5f3b
RK
2524 {
2525 num_not_at_initial_offset = 0;
2526 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2527 {
2528 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2529 = offsets_at[CODE_LABEL_NUMBER (x)][i];
1d0d98f3
RK
2530 if (reg_eliminate[i].can_eliminate
2531 && (reg_eliminate[i].offset
2532 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
2533 num_not_at_initial_offset++;
2534 }
2535 }
32131a9c
RK
2536
2537 else
2538 /* If neither of the above cases is true, compare each offset
2539 with those previously recorded and suppress any eliminations
2540 where the offsets disagree. */
a8fdc208 2541
32131a9c
RK
2542 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2543 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2544 != (initial_p ? reg_eliminate[i].initial_offset
2545 : reg_eliminate[i].offset))
2546 reg_eliminate[i].can_eliminate = 0;
2547
2548 return;
2549
2550 case JUMP_INSN:
2551 set_label_offsets (PATTERN (insn), insn, initial_p);
2552
0f41302f 2553 /* ... fall through ... */
32131a9c
RK
2554
2555 case INSN:
2556 case CALL_INSN:
2557 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2558 and hence must have all eliminations at their initial offsets. */
2559 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2560 if (REG_NOTE_KIND (tem) == REG_LABEL)
2561 set_label_offsets (XEXP (tem, 0), insn, 1);
2562 return;
2563
2564 case ADDR_VEC:
2565 case ADDR_DIFF_VEC:
2566 /* Each of the labels in the address vector must be at their initial
2567 offsets. We want the first first for ADDR_VEC and the second
2568 field for ADDR_DIFF_VEC. */
2569
2570 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2571 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2572 insn, initial_p);
2573 return;
2574
2575 case SET:
2576 /* We only care about setting PC. If the source is not RETURN,
2577 IF_THEN_ELSE, or a label, disable any eliminations not at
2578 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2579 isn't one of those possibilities. For branches to a label,
2580 call ourselves recursively.
2581
2582 Note that this can disable elimination unnecessarily when we have
2583 a non-local goto since it will look like a non-constant jump to
2584 someplace in the current function. This isn't a significant
2585 problem since such jumps will normally be when all elimination
2586 pairs are back to their initial offsets. */
2587
2588 if (SET_DEST (x) != pc_rtx)
2589 return;
2590
2591 switch (GET_CODE (SET_SRC (x)))
2592 {
2593 case PC:
2594 case RETURN:
2595 return;
2596
2597 case LABEL_REF:
2598 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2599 return;
2600
2601 case IF_THEN_ELSE:
2602 tem = XEXP (SET_SRC (x), 1);
2603 if (GET_CODE (tem) == LABEL_REF)
2604 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2605 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2606 break;
2607
2608 tem = XEXP (SET_SRC (x), 2);
2609 if (GET_CODE (tem) == LABEL_REF)
2610 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2611 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2612 break;
2613 return;
2614 }
2615
2616 /* If we reach here, all eliminations must be at their initial
2617 offset because we are doing a jump to a variable address. */
2618 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2619 if (p->offset != p->initial_offset)
2620 p->can_eliminate = 0;
2621 }
2622}
2623\f
2624/* Used for communication between the next two function to properly share
2625 the vector for an ASM_OPERANDS. */
2626
2627static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2628
a8fdc208 2629/* Scan X and replace any eliminable registers (such as fp) with a
32131a9c
RK
2630 replacement (such as sp), plus an offset.
2631
2632 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2633 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2634 MEM, we are allowed to replace a sum of a register and the constant zero
2635 with the register, which we cannot do outside a MEM. In addition, we need
2636 to record the fact that a register is referenced outside a MEM.
2637
ff32812a 2638 If INSN is an insn, it is the insn containing X. If we replace a REG
32131a9c
RK
2639 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2640 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2641 that the REG is being modified.
2642
ff32812a
RS
2643 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2644 That's used when we eliminate in expressions stored in notes.
2645 This means, do not set ref_outside_mem even if the reference
2646 is outside of MEMs.
2647
32131a9c
RK
2648 If we see a modification to a register we know about, take the
2649 appropriate action (see case SET, below).
2650
2651 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2652 replacements done assuming all offsets are at their initial values. If
2653 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2654 encounter, return the actual location so that find_reloads will do
2655 the proper thing. */
2656
2657rtx
2658eliminate_regs (x, mem_mode, insn)
2659 rtx x;
2660 enum machine_mode mem_mode;
2661 rtx insn;
2662{
2663 enum rtx_code code = GET_CODE (x);
2664 struct elim_table *ep;
2665 int regno;
2666 rtx new;
2667 int i, j;
2668 char *fmt;
2669 int copied = 0;
2670
2671 switch (code)
2672 {
2673 case CONST_INT:
2674 case CONST_DOUBLE:
2675 case CONST:
2676 case SYMBOL_REF:
2677 case CODE_LABEL:
2678 case PC:
2679 case CC0:
2680 case ASM_INPUT:
2681 case ADDR_VEC:
2682 case ADDR_DIFF_VEC:
2683 case RETURN:
2684 return x;
2685
2686 case REG:
2687 regno = REGNO (x);
2688
2689 /* First handle the case where we encounter a bare register that
2690 is eliminable. Replace it with a PLUS. */
2691 if (regno < FIRST_PSEUDO_REGISTER)
2692 {
2693 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2694 ep++)
2695 if (ep->from_rtx == x && ep->can_eliminate)
2696 {
ff32812a
RS
2697 if (! mem_mode
2698 /* Refs inside notes don't count for this purpose. */
fe089a90 2699 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
ff32812a 2700 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2701 ep->ref_outside_mem = 1;
2702 return plus_constant (ep->to_rtx, ep->previous_offset);
2703 }
2704
2705 }
2706 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2707 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2708 {
2709 /* In this case, find_reloads would attempt to either use an
2710 incorrect address (if something is not at its initial offset)
2711 or substitute an replaced address into an insn (which loses
2712 if the offset is changed by some later action). So we simply
2713 return the replaced stack slot (assuming it is changed by
2714 elimination) and ignore the fact that this is actually a
2715 reference to the pseudo. Ensure we make a copy of the
2716 address in case it is shared. */
fb3821f7 2717 new = eliminate_regs (reg_equiv_memory_loc[regno],
e5687447 2718 mem_mode, insn);
32131a9c 2719 if (new != reg_equiv_memory_loc[regno])
208dffa5
RS
2720 {
2721 cannot_omit_stores[regno] = 1;
2722 return copy_rtx (new);
2723 }
32131a9c
RK
2724 }
2725 return x;
2726
2727 case PLUS:
2728 /* If this is the sum of an eliminable register and a constant, rework
2729 the sum. */
2730 if (GET_CODE (XEXP (x, 0)) == REG
2731 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2732 && CONSTANT_P (XEXP (x, 1)))
2733 {
2734 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2735 ep++)
2736 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2737 {
e5687447
JW
2738 if (! mem_mode
2739 /* Refs inside notes don't count for this purpose. */
2740 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2741 || GET_CODE (insn) == INSN_LIST)))
32131a9c
RK
2742 ep->ref_outside_mem = 1;
2743
2744 /* The only time we want to replace a PLUS with a REG (this
2745 occurs when the constant operand of the PLUS is the negative
2746 of the offset) is when we are inside a MEM. We won't want
2747 to do so at other times because that would change the
2748 structure of the insn in a way that reload can't handle.
2749 We special-case the commonest situation in
2750 eliminate_regs_in_insn, so just replace a PLUS with a
2751 PLUS here, unless inside a MEM. */
a23b64d5 2752 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
32131a9c
RK
2753 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2754 return ep->to_rtx;
2755 else
2756 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2757 plus_constant (XEXP (x, 1),
2758 ep->previous_offset));
2759 }
2760
2761 /* If the register is not eliminable, we are done since the other
2762 operand is a constant. */
2763 return x;
2764 }
2765
2766 /* If this is part of an address, we want to bring any constant to the
2767 outermost PLUS. We will do this by doing register replacement in
2768 our operands and seeing if a constant shows up in one of them.
2769
2770 We assume here this is part of an address (or a "load address" insn)
2771 since an eliminable register is not likely to appear in any other
2772 context.
2773
2774 If we have (plus (eliminable) (reg)), we want to produce
2775 (plus (plus (replacement) (reg) (const))). If this was part of a
2776 normal add insn, (plus (replacement) (reg)) will be pushed as a
2777 reload. This is the desired action. */
2778
2779 {
e5687447
JW
2780 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2781 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
32131a9c
RK
2782
2783 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2784 {
2785 /* If one side is a PLUS and the other side is a pseudo that
a8fdc208 2786 didn't get a hard register but has a reg_equiv_constant,
32131a9c
RK
2787 we must replace the constant here since it may no longer
2788 be in the position of any operand. */
2789 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2790 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2791 && reg_renumber[REGNO (new1)] < 0
2792 && reg_equiv_constant != 0
2793 && reg_equiv_constant[REGNO (new1)] != 0)
2794 new1 = reg_equiv_constant[REGNO (new1)];
2795 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2796 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2797 && reg_renumber[REGNO (new0)] < 0
2798 && reg_equiv_constant[REGNO (new0)] != 0)
2799 new0 = reg_equiv_constant[REGNO (new0)];
2800
2801 new = form_sum (new0, new1);
2802
2803 /* As above, if we are not inside a MEM we do not want to
2804 turn a PLUS into something else. We might try to do so here
2805 for an addition of 0 if we aren't optimizing. */
2806 if (! mem_mode && GET_CODE (new) != PLUS)
2807 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2808 else
2809 return new;
2810 }
2811 }
2812 return x;
2813
981c7390
RK
2814 case MULT:
2815 /* If this is the product of an eliminable register and a
2816 constant, apply the distribute law and move the constant out
2817 so that we have (plus (mult ..) ..). This is needed in order
9faa82d8 2818 to keep load-address insns valid. This case is pathological.
981c7390
RK
2819 We ignore the possibility of overflow here. */
2820 if (GET_CODE (XEXP (x, 0)) == REG
2821 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2822 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2823 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2824 ep++)
2825 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2826 {
2827 if (! mem_mode
2828 /* Refs inside notes don't count for this purpose. */
2829 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2830 || GET_CODE (insn) == INSN_LIST)))
2831 ep->ref_outside_mem = 1;
2832
2833 return
2834 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2835 ep->previous_offset * INTVAL (XEXP (x, 1)));
2836 }
32131a9c 2837
0f41302f 2838 /* ... fall through ... */
32131a9c 2839
32131a9c
RK
2840 case CALL:
2841 case COMPARE:
2842 case MINUS:
32131a9c
RK
2843 case DIV: case UDIV:
2844 case MOD: case UMOD:
2845 case AND: case IOR: case XOR:
45620ed4
RK
2846 case ROTATERT: case ROTATE:
2847 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
32131a9c
RK
2848 case NE: case EQ:
2849 case GE: case GT: case GEU: case GTU:
2850 case LE: case LT: case LEU: case LTU:
2851 {
e5687447 2852 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
fb3821f7 2853 rtx new1
e5687447 2854 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
32131a9c
RK
2855
2856 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2857 return gen_rtx (code, GET_MODE (x), new0, new1);
2858 }
2859 return x;
2860
981c7390
RK
2861 case EXPR_LIST:
2862 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2863 if (XEXP (x, 0))
2864 {
2865 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2866 if (new != XEXP (x, 0))
2867 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2868 }
2869
0f41302f 2870 /* ... fall through ... */
981c7390
RK
2871
2872 case INSN_LIST:
2873 /* Now do eliminations in the rest of the chain. If this was
2874 an EXPR_LIST, this might result in allocating more memory than is
2875 strictly needed, but it simplifies the code. */
2876 if (XEXP (x, 1))
2877 {
2878 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2879 if (new != XEXP (x, 1))
2880 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2881 }
2882 return x;
2883
32131a9c
RK
2884 case PRE_INC:
2885 case POST_INC:
2886 case PRE_DEC:
2887 case POST_DEC:
2888 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2889 if (ep->to_rtx == XEXP (x, 0))
2890 {
4c05b187
RK
2891 int size = GET_MODE_SIZE (mem_mode);
2892
2893 /* If more bytes than MEM_MODE are pushed, account for them. */
2894#ifdef PUSH_ROUNDING
2895 if (ep->to_rtx == stack_pointer_rtx)
2896 size = PUSH_ROUNDING (size);
2897#endif
32131a9c 2898 if (code == PRE_DEC || code == POST_DEC)
4c05b187 2899 ep->offset += size;
32131a9c 2900 else
4c05b187 2901 ep->offset -= size;
32131a9c
RK
2902 }
2903
2904 /* Fall through to generic unary operation case. */
32131a9c
RK
2905 case STRICT_LOW_PART:
2906 case NEG: case NOT:
2907 case SIGN_EXTEND: case ZERO_EXTEND:
2908 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2909 case FLOAT: case FIX:
2910 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2911 case ABS:
2912 case SQRT:
2913 case FFS:
e5687447 2914 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
32131a9c
RK
2915 if (new != XEXP (x, 0))
2916 return gen_rtx (code, GET_MODE (x), new);
2917 return x;
2918
2919 case SUBREG:
2920 /* Similar to above processing, but preserve SUBREG_WORD.
2921 Convert (subreg (mem)) to (mem) if not paradoxical.
2922 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2923 pseudo didn't get a hard reg, we must replace this with the
2924 eliminated version of the memory location because push_reloads
2925 may do the replacement in certain circumstances. */
2926 if (GET_CODE (SUBREG_REG (x)) == REG
2927 && (GET_MODE_SIZE (GET_MODE (x))
2928 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2929 && reg_equiv_memory_loc != 0
2930 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2931 {
2932 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
e5687447 2933 mem_mode, insn);
32131a9c
RK
2934
2935 /* If we didn't change anything, we must retain the pseudo. */
2936 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
59e2c378 2937 new = SUBREG_REG (x);
32131a9c 2938 else
59e2c378
RK
2939 {
2940 /* Otherwise, ensure NEW isn't shared in case we have to reload
2941 it. */
2942 new = copy_rtx (new);
2943
2944 /* In this case, we must show that the pseudo is used in this
2945 insn so that delete_output_reload will do the right thing. */
2946 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2947 && GET_CODE (insn) != INSN_LIST)
2948 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2949 insn);
2950 }
32131a9c
RK
2951 }
2952 else
e5687447 2953 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
32131a9c
RK
2954
2955 if (new != XEXP (x, 0))
2956 {
2957 if (GET_CODE (new) == MEM
2958 && (GET_MODE_SIZE (GET_MODE (x))
a3b75c07 2959 <= GET_MODE_SIZE (GET_MODE (new)))
e90d3cbb 2960#ifdef LOAD_EXTEND_OP
a3b75c07
RS
2961 /* On these machines we will be reloading what is
2962 inside the SUBREG if it originally was a pseudo and
2963 the inner and outer modes are both a word or
2964 smaller. So leave the SUBREG then. */
2965 && ! (GET_CODE (SUBREG_REG (x)) == REG
2966 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
fc4a0dca
JW
2967 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2968 && (GET_MODE_SIZE (GET_MODE (x))
2969 > GET_MODE_SIZE (GET_MODE (new)))
2970 && INTEGRAL_MODE_P (GET_MODE (new))
2971 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
a3b75c07
RS
2972#endif
2973 )
32131a9c
RK
2974 {
2975 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2976 enum machine_mode mode = GET_MODE (x);
2977
f76b9db2
ILT
2978 if (BYTES_BIG_ENDIAN)
2979 offset += (MIN (UNITS_PER_WORD,
2980 GET_MODE_SIZE (GET_MODE (new)))
2981 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
32131a9c
RK
2982
2983 PUT_MODE (new, mode);
2984 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2985 return new;
2986 }
2987 else
2988 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2989 }
2990
2991 return x;
2992
94714ecc
RK
2993 case USE:
2994 /* If using a register that is the source of an eliminate we still
2995 think can be performed, note it cannot be performed since we don't
2996 know how this register is used. */
2997 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2998 if (ep->from_rtx == XEXP (x, 0))
2999 ep->can_eliminate = 0;
3000
3001 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3002 if (new != XEXP (x, 0))
3003 return gen_rtx (code, GET_MODE (x), new);
3004 return x;
3005
32131a9c
RK
3006 case CLOBBER:
3007 /* If clobbering a register that is the replacement register for an
d45cf215 3008 elimination we still think can be performed, note that it cannot
32131a9c
RK
3009 be performed. Otherwise, we need not be concerned about it. */
3010 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3011 if (ep->to_rtx == XEXP (x, 0))
3012 ep->can_eliminate = 0;
3013
e5687447 3014 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2045084c
JVA
3015 if (new != XEXP (x, 0))
3016 return gen_rtx (code, GET_MODE (x), new);
32131a9c
RK
3017 return x;
3018
3019 case ASM_OPERANDS:
3020 {
3021 rtx *temp_vec;
3022 /* Properly handle sharing input and constraint vectors. */
3023 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3024 {
3025 /* When we come to a new vector not seen before,
3026 scan all its elements; keep the old vector if none
3027 of them changes; otherwise, make a copy. */
3028 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3029 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3030 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3031 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
e5687447 3032 mem_mode, insn);
32131a9c
RK
3033
3034 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3035 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3036 break;
3037
3038 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3039 new_asm_operands_vec = old_asm_operands_vec;
3040 else
3041 new_asm_operands_vec
3042 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3043 }
3044
3045 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3046 if (new_asm_operands_vec == old_asm_operands_vec)
3047 return x;
3048
3049 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3050 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3051 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3052 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3053 ASM_OPERANDS_SOURCE_FILE (x),
3054 ASM_OPERANDS_SOURCE_LINE (x));
3055 new->volatil = x->volatil;
3056 return new;
3057 }
3058
3059 case SET:
3060 /* Check for setting a register that we know about. */
3061 if (GET_CODE (SET_DEST (x)) == REG)
3062 {
3063 /* See if this is setting the replacement register for an
a8fdc208 3064 elimination.
32131a9c 3065
3ec2ea3e
DE
3066 If DEST is the hard frame pointer, we do nothing because we
3067 assume that all assignments to the frame pointer are for
3068 non-local gotos and are being done at a time when they are valid
3069 and do not disturb anything else. Some machines want to
3070 eliminate a fake argument pointer (or even a fake frame pointer)
3071 with either the real frame or the stack pointer. Assignments to
3072 the hard frame pointer must not prevent this elimination. */
32131a9c
RK
3073
3074 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3075 ep++)
3076 if (ep->to_rtx == SET_DEST (x)
3ec2ea3e 3077 && SET_DEST (x) != hard_frame_pointer_rtx)
32131a9c 3078 {
6dc42e49 3079 /* If it is being incremented, adjust the offset. Otherwise,
32131a9c
RK
3080 this elimination can't be done. */
3081 rtx src = SET_SRC (x);
3082
3083 if (GET_CODE (src) == PLUS
3084 && XEXP (src, 0) == SET_DEST (x)
3085 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3086 ep->offset -= INTVAL (XEXP (src, 1));
3087 else
3088 ep->can_eliminate = 0;
3089 }
3090
3091 /* Now check to see we are assigning to a register that can be
3092 eliminated. If so, it must be as part of a PARALLEL, since we
3093 will not have been called if this is a single SET. So indicate
3094 that we can no longer eliminate this reg. */
3095 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3096 ep++)
3097 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3098 ep->can_eliminate = 0;
3099 }
3100
3101 /* Now avoid the loop below in this common case. */
3102 {
e5687447
JW
3103 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3104 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
32131a9c 3105
ff32812a 3106 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
32131a9c
RK
3107 write a CLOBBER insn. */
3108 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
572ca60a
RS
3109 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3110 && GET_CODE (insn) != INSN_LIST)
32131a9c
RK
3111 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3112
3113 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3114 return gen_rtx (SET, VOIDmode, new0, new1);
3115 }
3116
3117 return x;
3118
3119 case MEM:
3120 /* Our only special processing is to pass the mode of the MEM to our
3121 recursive call and copy the flags. While we are here, handle this
3122 case more efficiently. */
e5687447 3123 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
32131a9c
RK
3124 if (new != XEXP (x, 0))
3125 {
3126 new = gen_rtx (MEM, GET_MODE (x), new);
3127 new->volatil = x->volatil;
3128 new->unchanging = x->unchanging;
3129 new->in_struct = x->in_struct;
3130 return new;
3131 }
3132 else
3133 return x;
3134 }
3135
3136 /* Process each of our operands recursively. If any have changed, make a
3137 copy of the rtx. */
3138 fmt = GET_RTX_FORMAT (code);
3139 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3140 {
3141 if (*fmt == 'e')
3142 {
e5687447 3143 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
32131a9c
RK
3144 if (new != XEXP (x, i) && ! copied)
3145 {
3146 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3147 bcopy ((char *) x, (char *) new_x,
3148 (sizeof (*new_x) - sizeof (new_x->fld)
3149 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
32131a9c
RK
3150 x = new_x;
3151 copied = 1;
3152 }
3153 XEXP (x, i) = new;
3154 }
3155 else if (*fmt == 'E')
3156 {
3157 int copied_vec = 0;
3158 for (j = 0; j < XVECLEN (x, i); j++)
3159 {
3160 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3161 if (new != XVECEXP (x, i, j) && ! copied_vec)
3162 {
3163 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3164 &XVECEXP (x, i, 0));
3165 if (! copied)
3166 {
3167 rtx new_x = rtx_alloc (code);
4c9a05bc
RK
3168 bcopy ((char *) x, (char *) new_x,
3169 (sizeof (*new_x) - sizeof (new_x->fld)
3170 + (sizeof (new_x->fld[0])
3171 * GET_RTX_LENGTH (code))));
32131a9c
RK
3172 x = new_x;
3173 copied = 1;
3174 }
3175 XVEC (x, i) = new_v;
3176 copied_vec = 1;
3177 }
3178 XVECEXP (x, i, j) = new;
3179 }
3180 }
3181 }
3182
3183 return x;
3184}
3185\f
3186/* Scan INSN and eliminate all eliminable registers in it.
3187
3188 If REPLACE is nonzero, do the replacement destructively. Also
3189 delete the insn as dead it if it is setting an eliminable register.
3190
3191 If REPLACE is zero, do all our allocations in reload_obstack.
3192
3193 If no eliminations were done and this insn doesn't require any elimination
3194 processing (these are not identical conditions: it might be updating sp,
3195 but not referencing fp; this needs to be seen during reload_as_needed so
3196 that the offset between fp and sp can be taken into consideration), zero
3197 is returned. Otherwise, 1 is returned. */
3198
3199static int
3200eliminate_regs_in_insn (insn, replace)
3201 rtx insn;
3202 int replace;
3203{
3204 rtx old_body = PATTERN (insn);
774672d2 3205 rtx old_set = single_set (insn);
32131a9c
RK
3206 rtx new_body;
3207 int val = 0;
3208 struct elim_table *ep;
3209
3210 if (! replace)
3211 push_obstacks (&reload_obstack, &reload_obstack);
3212
774672d2
RK
3213 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3214 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
32131a9c
RK
3215 {
3216 /* Check for setting an eliminable register. */
3217 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
774672d2 3218 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
32131a9c 3219 {
dd1eab0a
RK
3220#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3221 /* If this is setting the frame pointer register to the
3222 hardware frame pointer register and this is an elimination
3223 that will be done (tested above), this insn is really
3224 adjusting the frame pointer downward to compensate for
3225 the adjustment done before a nonlocal goto. */
3226 if (ep->from == FRAME_POINTER_REGNUM
3227 && ep->to == HARD_FRAME_POINTER_REGNUM)
3228 {
3229 rtx src = SET_SRC (old_set);
3230 int offset, ok = 0;
3231
3232 if (src == ep->to_rtx)
3233 offset = 0, ok = 1;
3234 else if (GET_CODE (src) == PLUS
3235 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3236 offset = INTVAL (XEXP (src, 0)), ok = 1;
3237
3238 if (ok)
3239 {
3240 if (replace)
3241 {
3242 rtx src
3243 = plus_constant (ep->to_rtx, offset - ep->offset);
3244
3245 /* First see if this insn remains valid when we
3246 make the change. If not, keep the INSN_CODE
3247 the same and let reload fit it up. */
3248 validate_change (insn, &SET_SRC (old_set), src, 1);
3249 validate_change (insn, &SET_DEST (old_set),
3250 ep->to_rtx, 1);
3251 if (! apply_change_group ())
3252 {
3253 SET_SRC (old_set) = src;
3254 SET_DEST (old_set) = ep->to_rtx;
3255 }
3256 }
3257
3258 val = 1;
3259 goto done;
3260 }
3261 }
3262#endif
3263
32131a9c
RK
3264 /* In this case this insn isn't serving a useful purpose. We
3265 will delete it in reload_as_needed once we know that this
3266 elimination is, in fact, being done.
3267
abc95ed3 3268 If REPLACE isn't set, we can't delete this insn, but needn't
32131a9c
RK
3269 process it since it won't be used unless something changes. */
3270 if (replace)
3271 delete_dead_insn (insn);
3272 val = 1;
3273 goto done;
3274 }
3275
3276 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3277 in the insn is the negative of the offset in FROM. Substitute
3278 (set (reg) (reg to)) for the insn and change its code.
3279
3280 We have to do this here, rather than in eliminate_regs, do that we can
3281 change the insn code. */
3282
774672d2
RK
3283 if (GET_CODE (SET_SRC (old_set)) == PLUS
3284 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3285 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
32131a9c
RK
3286 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3287 ep++)
774672d2 3288 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
922d9d40 3289 && ep->can_eliminate)
32131a9c 3290 {
922d9d40
RK
3291 /* We must stop at the first elimination that will be used.
3292 If this one would replace the PLUS with a REG, do it
3293 now. Otherwise, quit the loop and let eliminate_regs
3294 do its normal replacement. */
774672d2 3295 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
922d9d40 3296 {
774672d2
RK
3297 /* We assume here that we don't need a PARALLEL of
3298 any CLOBBERs for this assignment. There's not
3299 much we can do if we do need it. */
922d9d40 3300 PATTERN (insn) = gen_rtx (SET, VOIDmode,
774672d2 3301 SET_DEST (old_set), ep->to_rtx);
922d9d40
RK
3302 INSN_CODE (insn) = -1;
3303 val = 1;
3304 goto done;
3305 }
3306
3307 break;
32131a9c
RK
3308 }
3309 }
3310
3311 old_asm_operands_vec = 0;
3312
3313 /* Replace the body of this insn with a substituted form. If we changed
05b4c365 3314 something, return non-zero.
32131a9c
RK
3315
3316 If we are replacing a body that was a (set X (plus Y Z)), try to
3317 re-recognize the insn. We do this in case we had a simple addition
3318 but now can do this as a load-address. This saves an insn in this
0f41302f 3319 common case. */
32131a9c 3320
fb3821f7 3321 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
32131a9c
RK
3322 if (new_body != old_body)
3323 {
7c791b13
RK
3324 /* If we aren't replacing things permanently and we changed something,
3325 make another copy to ensure that all the RTL is new. Otherwise
3326 things can go wrong if find_reload swaps commutative operands
0f41302f 3327 and one is inside RTL that has been copied while the other is not. */
7c791b13 3328
4d411872
RS
3329 /* Don't copy an asm_operands because (1) there's no need and (2)
3330 copy_rtx can't do it properly when there are multiple outputs. */
b84f9d9c 3331 if (! replace && asm_noperands (old_body) < 0)
7c791b13
RK
3332 new_body = copy_rtx (new_body);
3333
774672d2
RK
3334 /* If we had a move insn but now we don't, rerecognize it. This will
3335 cause spurious re-recognition if the old move had a PARALLEL since
3336 the new one still will, but we can't call single_set without
3337 having put NEW_BODY into the insn and the re-recognition won't
3338 hurt in this rare case. */
3339 if (old_set != 0
3340 && ((GET_CODE (SET_SRC (old_set)) == REG
3341 && (GET_CODE (new_body) != SET
3342 || GET_CODE (SET_SRC (new_body)) != REG))
3343 /* If this was a load from or store to memory, compare
3344 the MEM in recog_operand to the one in the insn. If they
3345 are not equal, then rerecognize the insn. */
3346 || (old_set != 0
3347 && ((GET_CODE (SET_SRC (old_set)) == MEM
3348 && SET_SRC (old_set) != recog_operand[1])
3349 || (GET_CODE (SET_DEST (old_set)) == MEM
3350 && SET_DEST (old_set) != recog_operand[0])))
3351 /* If this was an add insn before, rerecognize. */
3352 || GET_CODE (SET_SRC (old_set)) == PLUS))
4a5d0fb5
RS
3353 {
3354 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
0ba846c7
RS
3355 /* If recognition fails, store the new body anyway.
3356 It's normal to have recognition failures here
3357 due to bizarre memory addresses; reloading will fix them. */
3358 PATTERN (insn) = new_body;
4a5d0fb5 3359 }
0ba846c7 3360 else
32131a9c
RK
3361 PATTERN (insn) = new_body;
3362
32131a9c
RK
3363 val = 1;
3364 }
a8fdc208 3365
32131a9c
RK
3366 /* Loop through all elimination pairs. See if any have changed and
3367 recalculate the number not at initial offset.
3368
a8efe40d
RK
3369 Compute the maximum offset (minimum offset if the stack does not
3370 grow downward) for each elimination pair.
3371
32131a9c
RK
3372 We also detect a cases where register elimination cannot be done,
3373 namely, if a register would be both changed and referenced outside a MEM
3374 in the resulting insn since such an insn is often undefined and, even if
3375 not, we cannot know what meaning will be given to it. Note that it is
3376 valid to have a register used in an address in an insn that changes it
3377 (presumably with a pre- or post-increment or decrement).
3378
3379 If anything changes, return nonzero. */
3380
3381 num_not_at_initial_offset = 0;
3382 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3383 {
3384 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3385 ep->can_eliminate = 0;
3386
3387 ep->ref_outside_mem = 0;
3388
3389 if (ep->previous_offset != ep->offset)
3390 val = 1;
3391
3392 ep->previous_offset = ep->offset;
3393 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3394 num_not_at_initial_offset++;
a8efe40d
RK
3395
3396#ifdef STACK_GROWS_DOWNWARD
3397 ep->max_offset = MAX (ep->max_offset, ep->offset);
3398#else
3399 ep->max_offset = MIN (ep->max_offset, ep->offset);
3400#endif
32131a9c
RK
3401 }
3402
3403 done:
9faa82d8 3404 /* If we changed something, perform elimination in REG_NOTES. This is
05b4c365
RK
3405 needed even when REPLACE is zero because a REG_DEAD note might refer
3406 to a register that we eliminate and could cause a different number
3407 of spill registers to be needed in the final reload pass than in
3408 the pre-passes. */
20748cab 3409 if (val && REG_NOTES (insn) != 0)
ff32812a 3410 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
05b4c365 3411
32131a9c
RK
3412 if (! replace)
3413 pop_obstacks ();
3414
3415 return val;
3416}
3417
3418/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3419 replacement we currently believe is valid, mark it as not eliminable if X
3420 modifies DEST in any way other than by adding a constant integer to it.
3421
3422 If DEST is the frame pointer, we do nothing because we assume that
3ec2ea3e
DE
3423 all assignments to the hard frame pointer are nonlocal gotos and are being
3424 done at a time when they are valid and do not disturb anything else.
32131a9c 3425 Some machines want to eliminate a fake argument pointer with either the
3ec2ea3e
DE
3426 frame or stack pointer. Assignments to the hard frame pointer must not
3427 prevent this elimination.
32131a9c
RK
3428
3429 Called via note_stores from reload before starting its passes to scan
3430 the insns of the function. */
3431
3432static void
3433mark_not_eliminable (dest, x)
3434 rtx dest;
3435 rtx x;
3436{
3437 register int i;
3438
3439 /* A SUBREG of a hard register here is just changing its mode. We should
3440 not see a SUBREG of an eliminable hard register, but check just in
3441 case. */
3442 if (GET_CODE (dest) == SUBREG)
3443 dest = SUBREG_REG (dest);
3444
3ec2ea3e 3445 if (dest == hard_frame_pointer_rtx)
32131a9c
RK
3446 return;
3447
3448 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3449 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3450 && (GET_CODE (x) != SET
3451 || GET_CODE (SET_SRC (x)) != PLUS
3452 || XEXP (SET_SRC (x), 0) != dest
3453 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3454 {
3455 reg_eliminate[i].can_eliminate_previous
3456 = reg_eliminate[i].can_eliminate = 0;
3457 num_eliminable--;
3458 }
3459}
3460\f
3461/* Kick all pseudos out of hard register REGNO.
3462 If GLOBAL is nonzero, try to find someplace else to put them.
3463 If DUMPFILE is nonzero, log actions taken on that file.
3464
3465 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3466 because we found we can't eliminate some register. In the case, no pseudos
3467 are allowed to be in the register, even if they are only in a block that
3468 doesn't require spill registers, unlike the case when we are spilling this
3469 hard reg to produce another spill register.
3470
3471 Return nonzero if any pseudos needed to be kicked out. */
3472
3473static int
3474spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3475 register int regno;
3476 int global;
3477 FILE *dumpfile;
3478 int cant_eliminate;
3479{
c307c237 3480 enum reg_class class = REGNO_REG_CLASS (regno);
32131a9c
RK
3481 int something_changed = 0;
3482 register int i;
3483
3484 SET_HARD_REG_BIT (forbidden_regs, regno);
3485
9ff3516a
RK
3486 if (cant_eliminate)
3487 regs_ever_live[regno] = 1;
3488
32131a9c
RK
3489 /* Spill every pseudo reg that was allocated to this reg
3490 or to something that overlaps this reg. */
3491
3492 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3493 if (reg_renumber[i] >= 0
3494 && reg_renumber[i] <= regno
a8fdc208 3495 && (reg_renumber[i]
32131a9c
RK
3496 + HARD_REGNO_NREGS (reg_renumber[i],
3497 PSEUDO_REGNO_MODE (i))
3498 > regno))
3499 {
32131a9c
RK
3500 /* If this register belongs solely to a basic block which needed no
3501 spilling of any class that this register is contained in,
3502 leave it be, unless we are spilling this register because
3503 it was a hard register that can't be eliminated. */
3504
3505 if (! cant_eliminate
3506 && basic_block_needs[0]
3507 && reg_basic_block[i] >= 0
3508 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3509 {
3510 enum reg_class *p;
3511
3512 for (p = reg_class_superclasses[(int) class];
3513 *p != LIM_REG_CLASSES; p++)
3514 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3515 break;
a8fdc208 3516
32131a9c
RK
3517 if (*p == LIM_REG_CLASSES)
3518 continue;
3519 }
3520
3521 /* Mark it as no longer having a hard register home. */
3522 reg_renumber[i] = -1;
3523 /* We will need to scan everything again. */
3524 something_changed = 1;
3525 if (global)
2c5d9e37 3526 retry_global_alloc (i, forbidden_regs);
32131a9c
RK
3527
3528 alter_reg (i, regno);
3529 if (dumpfile)
3530 {
3531 if (reg_renumber[i] == -1)
3532 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3533 else
3534 fprintf (dumpfile, " Register %d now in %d.\n\n",
3535 i, reg_renumber[i]);
3536 }
3537 }
c307c237
RK
3538 for (i = 0; i < scratch_list_length; i++)
3539 {
3540 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3541 {
3542 if (! cant_eliminate && basic_block_needs[0]
3543 && ! basic_block_needs[(int) class][scratch_block[i]])
3544 {
3545 enum reg_class *p;
3546
3547 for (p = reg_class_superclasses[(int) class];
3548 *p != LIM_REG_CLASSES; p++)
3549 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3550 break;
3551
3552 if (*p == LIM_REG_CLASSES)
3553 continue;
3554 }
3555 PUT_CODE (scratch_list[i], SCRATCH);
3556 scratch_list[i] = 0;
3557 something_changed = 1;
3558 continue;
3559 }
3560 }
32131a9c
RK
3561
3562 return something_changed;
3563}
3564\f
56f58d3a
RK
3565/* Find all paradoxical subregs within X and update reg_max_ref_width.
3566 Also mark any hard registers used to store user variables as
3567 forbidden from being used for spill registers. */
32131a9c
RK
3568
3569static void
3570scan_paradoxical_subregs (x)
3571 register rtx x;
3572{
3573 register int i;
3574 register char *fmt;
3575 register enum rtx_code code = GET_CODE (x);
3576
3577 switch (code)
3578 {
56f58d3a
RK
3579 case REG:
3580#ifdef SMALL_REGISTER_CLASSES
3581 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3582 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3583#endif
3584 return;
3585
32131a9c
RK
3586 case CONST_INT:
3587 case CONST:
3588 case SYMBOL_REF:
3589 case LABEL_REF:
3590 case CONST_DOUBLE:
3591 case CC0:
3592 case PC:
32131a9c
RK
3593 case USE:
3594 case CLOBBER:
3595 return;
3596
3597 case SUBREG:
3598 if (GET_CODE (SUBREG_REG (x)) == REG
3599 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3600 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3601 = GET_MODE_SIZE (GET_MODE (x));
3602 return;
3603 }
3604
3605 fmt = GET_RTX_FORMAT (code);
3606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3607 {
3608 if (fmt[i] == 'e')
3609 scan_paradoxical_subregs (XEXP (x, i));
3610 else if (fmt[i] == 'E')
3611 {
3612 register int j;
3613 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3614 scan_paradoxical_subregs (XVECEXP (x, i, j));
3615 }
3616 }
3617}
3618\f
32131a9c
RK
3619static int
3620hard_reg_use_compare (p1, p2)
3621 struct hard_reg_n_uses *p1, *p2;
3622{
3623 int tem = p1->uses - p2->uses;
3624 if (tem != 0) return tem;
3625 /* If regs are equally good, sort by regno,
3626 so that the results of qsort leave nothing to chance. */
3627 return p1->regno - p2->regno;
3628}
3629
3630/* Choose the order to consider regs for use as reload registers
3631 based on how much trouble would be caused by spilling one.
3632 Store them in order of decreasing preference in potential_reload_regs. */
3633
3634static void
2c5d9e37
RK
3635order_regs_for_reload (global)
3636 int global;
32131a9c
RK
3637{
3638 register int i;
3639 register int o = 0;
3640 int large = 0;
3641
3642 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3643
3644 CLEAR_HARD_REG_SET (bad_spill_regs);
3645
3646 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3647 potential_reload_regs[i] = -1;
3648
3649 /* Count number of uses of each hard reg by pseudo regs allocated to it
3650 and then order them by decreasing use. */
3651
3652 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3653 {
3654 hard_reg_n_uses[i].uses = 0;
3655 hard_reg_n_uses[i].regno = i;
3656 }
3657
3658 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3659 {
3660 int regno = reg_renumber[i];
3661 if (regno >= 0)
3662 {
3663 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3664 while (regno < lim)
2c5d9e37
RK
3665 {
3666 /* If allocated by local-alloc, show more uses since
3667 we're not going to be able to reallocate it, but
3668 we might if allocated by global alloc. */
3669 if (global && reg_allocno[i] < 0)
3670 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3671
3672 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3673 }
32131a9c
RK
3674 }
3675 large += reg_n_refs[i];
3676 }
3677
3678 /* Now fixed registers (which cannot safely be used for reloading)
3679 get a very high use count so they will be considered least desirable.
3680 Registers used explicitly in the rtl code are almost as bad. */
3681
3682 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3683 {
3684 if (fixed_regs[i])
3685 {
3686 hard_reg_n_uses[i].uses += 2 * large + 2;
3687 SET_HARD_REG_BIT (bad_spill_regs, i);
3688 }
3689 else if (regs_explicitly_used[i])
3690 {
3691 hard_reg_n_uses[i].uses += large + 1;
546b63fb 3692#ifndef SMALL_REGISTER_CLASSES
32131a9c
RK
3693 /* ??? We are doing this here because of the potential that
3694 bad code may be generated if a register explicitly used in
3695 an insn was used as a spill register for that insn. But
3696 not using these are spill registers may lose on some machine.
3697 We'll have to see how this works out. */
3698 SET_HARD_REG_BIT (bad_spill_regs, i);
546b63fb 3699#endif
32131a9c
RK
3700 }
3701 }
3ec2ea3e
DE
3702 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3703 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
32131a9c
RK
3704
3705#ifdef ELIMINABLE_REGS
3706 /* If registers other than the frame pointer are eliminable, mark them as
3707 poor choices. */
3708 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3709 {
3710 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3711 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3712 }
3713#endif
3714
3715 /* Prefer registers not so far used, for use in temporary loading.
3716 Among them, if REG_ALLOC_ORDER is defined, use that order.
3717 Otherwise, prefer registers not preserved by calls. */
3718
3719#ifdef REG_ALLOC_ORDER
3720 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3721 {
3722 int regno = reg_alloc_order[i];
3723
3724 if (hard_reg_n_uses[regno].uses == 0)
3725 potential_reload_regs[o++] = regno;
3726 }
3727#else
3728 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3729 {
3730 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3731 potential_reload_regs[o++] = i;
3732 }
3733 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3734 {
3735 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3736 potential_reload_regs[o++] = i;
3737 }
3738#endif
3739
3740 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3741 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3742
3743 /* Now add the regs that are already used,
3744 preferring those used less often. The fixed and otherwise forbidden
3745 registers will be at the end of this list. */
3746
3747 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3748 if (hard_reg_n_uses[i].uses != 0)
3749 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3750}
3751\f
a5339699 3752/* Used in reload_as_needed to sort the spilled regs. */
2f23a46d 3753
a5339699
RK
3754static int
3755compare_spill_regs (r1, r2)
3756 short *r1, *r2;
3757{
2f23a46d 3758 return *r1 - *r2;
a5339699
RK
3759}
3760
32131a9c
RK
3761/* Reload pseudo-registers into hard regs around each insn as needed.
3762 Additional register load insns are output before the insn that needs it
3763 and perhaps store insns after insns that modify the reloaded pseudo reg.
3764
3765 reg_last_reload_reg and reg_reloaded_contents keep track of
d08ea79f 3766 which registers are already available in reload registers.
32131a9c
RK
3767 We update these for the reloads that we perform,
3768 as the insns are scanned. */
3769
3770static void
3771reload_as_needed (first, live_known)
3772 rtx first;
3773 int live_known;
3774{
3775 register rtx insn;
3776 register int i;
3777 int this_block = 0;
3778 rtx x;
3779 rtx after_call = 0;
3780
4c9a05bc
RK
3781 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3782 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
32131a9c 3783 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4c9a05bc 3784 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
32131a9c
RK
3785 reg_has_output_reload = (char *) alloca (max_regno);
3786 for (i = 0; i < n_spills; i++)
3787 {
3788 reg_reloaded_contents[i] = -1;
3789 reg_reloaded_insn[i] = 0;
3790 }
3791
3792 /* Reset all offsets on eliminable registers to their initial values. */
3793#ifdef ELIMINABLE_REGS
3794 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3795 {
3796 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
510dd77e 3797 reg_eliminate[i].initial_offset);
32131a9c
RK
3798 reg_eliminate[i].previous_offset
3799 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3800 }
3801#else
3802 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3803 reg_eliminate[0].previous_offset
3804 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3805#endif
3806
3807 num_not_at_initial_offset = 0;
3808
a5339699
RK
3809 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3810 pack registers with group needs. */
3811 if (n_spills > 1)
5f40cc2d
RK
3812 {
3813 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3814 for (i = 0; i < n_spills; i++)
3815 spill_reg_order[spill_regs[i]] = i;
3816 }
a5339699 3817
32131a9c
RK
3818 for (insn = first; insn;)
3819 {
3820 register rtx next = NEXT_INSN (insn);
3821
3822 /* Notice when we move to a new basic block. */
aa2c50d6 3823 if (live_known && this_block + 1 < n_basic_blocks
32131a9c
RK
3824 && insn == basic_block_head[this_block+1])
3825 ++this_block;
3826
3827 /* If we pass a label, copy the offsets from the label information
3828 into the current offsets of each elimination. */
3829 if (GET_CODE (insn) == CODE_LABEL)
2a4b5f3b
RK
3830 {
3831 num_not_at_initial_offset = 0;
3832 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3833 {
3834 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3835 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
1d0d98f3
RK
3836 if (reg_eliminate[i].can_eliminate
3837 && (reg_eliminate[i].offset
3838 != reg_eliminate[i].initial_offset))
2a4b5f3b
RK
3839 num_not_at_initial_offset++;
3840 }
3841 }
32131a9c
RK
3842
3843 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3844 {
3845 rtx avoid_return_reg = 0;
0639444f 3846 rtx oldpat = PATTERN (insn);
32131a9c
RK
3847
3848#ifdef SMALL_REGISTER_CLASSES
3849 /* Set avoid_return_reg if this is an insn
3850 that might use the value of a function call. */
3851 if (GET_CODE (insn) == CALL_INSN)
3852 {
3853 if (GET_CODE (PATTERN (insn)) == SET)
3854 after_call = SET_DEST (PATTERN (insn));
3855 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3856 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3857 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3858 else
3859 after_call = 0;
3860 }
3861 else if (after_call != 0
3862 && !(GET_CODE (PATTERN (insn)) == SET
3863 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3864 {
2b979c57 3865 if (reg_referenced_p (after_call, PATTERN (insn)))
32131a9c
RK
3866 avoid_return_reg = after_call;
3867 after_call = 0;
3868 }
3869#endif /* SMALL_REGISTER_CLASSES */
3870
2758481d
RS
3871 /* If this is a USE and CLOBBER of a MEM, ensure that any
3872 references to eliminable registers have been removed. */
3873
3874 if ((GET_CODE (PATTERN (insn)) == USE
3875 || GET_CODE (PATTERN (insn)) == CLOBBER)
3876 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3877 XEXP (XEXP (PATTERN (insn), 0), 0)
3878 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
fb3821f7 3879 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
2758481d 3880
32131a9c
RK
3881 /* If we need to do register elimination processing, do so.
3882 This might delete the insn, in which case we are done. */
3883 if (num_eliminable && GET_MODE (insn) == QImode)
3884 {
3885 eliminate_regs_in_insn (insn, 1);
3886 if (GET_CODE (insn) == NOTE)
3887 {
3888 insn = next;
3889 continue;
3890 }
3891 }
3892
3893 if (GET_MODE (insn) == VOIDmode)
3894 n_reloads = 0;
3895 /* First find the pseudo regs that must be reloaded for this insn.
3896 This info is returned in the tables reload_... (see reload.h).
3897 Also modify the body of INSN by substituting RELOAD
3898 rtx's for those pseudo regs. */
3899 else
3900 {
3901 bzero (reg_has_output_reload, max_regno);
3902 CLEAR_HARD_REG_SET (reg_is_output_reload);
3903
3904 find_reloads (insn, 1, spill_indirect_levels, live_known,
3905 spill_reg_order);
3906 }
3907
3908 if (n_reloads > 0)
3909 {
3c3eeea6
RK
3910 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3911 rtx p;
32131a9c
RK
3912 int class;
3913
3914 /* If this block has not had spilling done for a
546b63fb
RK
3915 particular clas and we have any non-optionals that need a
3916 spill reg in that class, abort. */
32131a9c
RK
3917
3918 for (class = 0; class < N_REG_CLASSES; class++)
3919 if (basic_block_needs[class] != 0
3920 && basic_block_needs[class][this_block] == 0)
3921 for (i = 0; i < n_reloads; i++)
546b63fb
RK
3922 if (class == (int) reload_reg_class[i]
3923 && reload_reg_rtx[i] == 0
3924 && ! reload_optional[i]
3925 && (reload_in[i] != 0 || reload_out[i] != 0
3926 || reload_secondary_p[i] != 0))
a89b2cc4 3927 fatal_insn ("Non-optional registers need a spill register", insn);
32131a9c
RK
3928
3929 /* Now compute which reload regs to reload them into. Perhaps
3930 reusing reload regs from previous insns, or else output
3931 load insns to reload them. Maybe output store insns too.
3932 Record the choices of reload reg in reload_reg_rtx. */
3933 choose_reload_regs (insn, avoid_return_reg);
3934
546b63fb
RK
3935#ifdef SMALL_REGISTER_CLASSES
3936 /* Merge any reloads that we didn't combine for fear of
3937 increasing the number of spill registers needed but now
3938 discover can be safely merged. */
3939 merge_assigned_reloads (insn);
3940#endif
3941
32131a9c
RK
3942 /* Generate the insns to reload operands into or out of
3943 their reload regs. */
3944 emit_reload_insns (insn);
3945
3946 /* Substitute the chosen reload regs from reload_reg_rtx
3947 into the insn's body (or perhaps into the bodies of other
3948 load and store insn that we just made for reloading
3949 and that we moved the structure into). */
3950 subst_reloads ();
3c3eeea6
RK
3951
3952 /* If this was an ASM, make sure that all the reload insns
3953 we have generated are valid. If not, give an error
3954 and delete them. */
3955
3956 if (asm_noperands (PATTERN (insn)) >= 0)
3957 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3958 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3959 && (recog_memoized (p) < 0
3960 || (insn_extract (p),
3961 ! constrain_operands (INSN_CODE (p), 1))))
3962 {
3963 error_for_asm (insn,
3964 "`asm' operand requires impossible reload");
3965 PUT_CODE (p, NOTE);
3966 NOTE_SOURCE_FILE (p) = 0;
3967 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3968 }
32131a9c
RK
3969 }
3970 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3971 is no longer validly lying around to save a future reload.
3972 Note that this does not detect pseudos that were reloaded
3973 for this insn in order to be stored in
3974 (obeying register constraints). That is correct; such reload
3975 registers ARE still valid. */
0639444f 3976 note_stores (oldpat, forget_old_reloads_1);
32131a9c
RK
3977
3978 /* There may have been CLOBBER insns placed after INSN. So scan
3979 between INSN and NEXT and use them to forget old reloads. */
3980 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3981 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3982 note_stores (PATTERN (x), forget_old_reloads_1);
3983
3984#ifdef AUTO_INC_DEC
3985 /* Likewise for regs altered by auto-increment in this insn.
3986 But note that the reg-notes are not changed by reloading:
3987 they still contain the pseudo-regs, not the spill regs. */
3988 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3989 if (REG_NOTE_KIND (x) == REG_INC)
3990 {
3991 /* See if this pseudo reg was reloaded in this insn.
3992 If so, its last-reload info is still valid
3993 because it is based on this insn's reload. */
3994 for (i = 0; i < n_reloads; i++)
3995 if (reload_out[i] == XEXP (x, 0))
3996 break;
3997
08fb99fa 3998 if (i == n_reloads)
9a881562 3999 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
32131a9c
RK
4000 }
4001#endif
4002 }
4003 /* A reload reg's contents are unknown after a label. */
4004 if (GET_CODE (insn) == CODE_LABEL)
4005 for (i = 0; i < n_spills; i++)
4006 {
4007 reg_reloaded_contents[i] = -1;
4008 reg_reloaded_insn[i] = 0;
4009 }
4010
4011 /* Don't assume a reload reg is still good after a call insn
4012 if it is a call-used reg. */
546b63fb 4013 else if (GET_CODE (insn) == CALL_INSN)
32131a9c
RK
4014 for (i = 0; i < n_spills; i++)
4015 if (call_used_regs[spill_regs[i]])
4016 {
4017 reg_reloaded_contents[i] = -1;
4018 reg_reloaded_insn[i] = 0;
4019 }
4020
4021 /* In case registers overlap, allow certain insns to invalidate
4022 particular hard registers. */
4023
4024#ifdef INSN_CLOBBERS_REGNO_P
4025 for (i = 0 ; i < n_spills ; i++)
4026 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4027 {
4028 reg_reloaded_contents[i] = -1;
4029 reg_reloaded_insn[i] = 0;
4030 }
4031#endif
4032
4033 insn = next;
4034
4035#ifdef USE_C_ALLOCA
4036 alloca (0);
4037#endif
4038 }
4039}
4040
4041/* Discard all record of any value reloaded from X,
4042 or reloaded in X from someplace else;
4043 unless X is an output reload reg of the current insn.
4044
4045 X may be a hard reg (the reload reg)
4046 or it may be a pseudo reg that was reloaded from. */
4047
4048static void
9a881562 4049forget_old_reloads_1 (x, ignored)
32131a9c 4050 rtx x;
9a881562 4051 rtx ignored;
32131a9c
RK
4052{
4053 register int regno;
4054 int nr;
0a2e51a9
RS
4055 int offset = 0;
4056
4057 /* note_stores does give us subregs of hard regs. */
4058 while (GET_CODE (x) == SUBREG)
4059 {
4060 offset += SUBREG_WORD (x);
4061 x = SUBREG_REG (x);
4062 }
32131a9c
RK
4063
4064 if (GET_CODE (x) != REG)
4065 return;
4066
0a2e51a9 4067 regno = REGNO (x) + offset;
32131a9c
RK
4068
4069 if (regno >= FIRST_PSEUDO_REGISTER)
4070 nr = 1;
4071 else
4072 {
4073 int i;
4074 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4075 /* Storing into a spilled-reg invalidates its contents.
4076 This can happen if a block-local pseudo is allocated to that reg
4077 and it wasn't spilled because this block's total need is 0.
4078 Then some insn might have an optional reload and use this reg. */
4079 for (i = 0; i < nr; i++)
4080 if (spill_reg_order[regno + i] >= 0
4081 /* But don't do this if the reg actually serves as an output
4082 reload reg in the current instruction. */
4083 && (n_reloads == 0
4084 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4085 {
4086 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4087 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4088 }
4089 }
4090
4091 /* Since value of X has changed,
4092 forget any value previously copied from it. */
4093
4094 while (nr-- > 0)
4095 /* But don't forget a copy if this is the output reload
4096 that establishes the copy's validity. */
4097 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4098 reg_last_reload_reg[regno + nr] = 0;
4099}
4100\f
4101/* For each reload, the mode of the reload register. */
4102static enum machine_mode reload_mode[MAX_RELOADS];
4103
4104/* For each reload, the largest number of registers it will require. */
4105static int reload_nregs[MAX_RELOADS];
4106
4107/* Comparison function for qsort to decide which of two reloads
4108 should be handled first. *P1 and *P2 are the reload numbers. */
4109
4110static int
4111reload_reg_class_lower (p1, p2)
4112 short *p1, *p2;
4113{
4114 register int r1 = *p1, r2 = *p2;
4115 register int t;
a8fdc208 4116
32131a9c
RK
4117 /* Consider required reloads before optional ones. */
4118 t = reload_optional[r1] - reload_optional[r2];
4119 if (t != 0)
4120 return t;
4121
4122 /* Count all solitary classes before non-solitary ones. */
4123 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4124 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4125 if (t != 0)
4126 return t;
4127
4128 /* Aside from solitaires, consider all multi-reg groups first. */
4129 t = reload_nregs[r2] - reload_nregs[r1];
4130 if (t != 0)
4131 return t;
4132
4133 /* Consider reloads in order of increasing reg-class number. */
4134 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4135 if (t != 0)
4136 return t;
4137
4138 /* If reloads are equally urgent, sort by reload number,
4139 so that the results of qsort leave nothing to chance. */
4140 return r1 - r2;
4141}
4142\f
4143/* The following HARD_REG_SETs indicate when each hard register is
4144 used for a reload of various parts of the current insn. */
4145
4146/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4147static HARD_REG_SET reload_reg_used;
546b63fb
RK
4148/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4149static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4150/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4151static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4152/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4153static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4154/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4155static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c
RK
4156/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4157static HARD_REG_SET reload_reg_used_in_op_addr;
893bc853
RK
4158/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4159static HARD_REG_SET reload_reg_used_in_op_addr_reload;
546b63fb
RK
4160/* If reg is in use for a RELOAD_FOR_INSN reload. */
4161static HARD_REG_SET reload_reg_used_in_insn;
4162/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4163static HARD_REG_SET reload_reg_used_in_other_addr;
32131a9c
RK
4164
4165/* If reg is in use as a reload reg for any sort of reload. */
4166static HARD_REG_SET reload_reg_used_at_all;
4167
be7ae2a4
RK
4168/* If reg is use as an inherited reload. We just mark the first register
4169 in the group. */
4170static HARD_REG_SET reload_reg_used_for_inherit;
4171
546b63fb
RK
4172/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4173 TYPE. MODE is used to indicate how many consecutive regs are
4174 actually used. */
32131a9c
RK
4175
4176static void
546b63fb 4177mark_reload_reg_in_use (regno, opnum, type, mode)
32131a9c 4178 int regno;
546b63fb
RK
4179 int opnum;
4180 enum reload_type type;
32131a9c
RK
4181 enum machine_mode mode;
4182{
4183 int nregs = HARD_REGNO_NREGS (regno, mode);
4184 int i;
4185
4186 for (i = regno; i < nregs + regno; i++)
4187 {
546b63fb 4188 switch (type)
32131a9c
RK
4189 {
4190 case RELOAD_OTHER:
4191 SET_HARD_REG_BIT (reload_reg_used, i);
4192 break;
4193
546b63fb
RK
4194 case RELOAD_FOR_INPUT_ADDRESS:
4195 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
32131a9c
RK
4196 break;
4197
546b63fb
RK
4198 case RELOAD_FOR_OUTPUT_ADDRESS:
4199 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
32131a9c
RK
4200 break;
4201
4202 case RELOAD_FOR_OPERAND_ADDRESS:
4203 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4204 break;
4205
893bc853
RK
4206 case RELOAD_FOR_OPADDR_ADDR:
4207 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4208 break;
4209
546b63fb
RK
4210 case RELOAD_FOR_OTHER_ADDRESS:
4211 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4212 break;
4213
32131a9c 4214 case RELOAD_FOR_INPUT:
546b63fb 4215 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
32131a9c
RK
4216 break;
4217
4218 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4219 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4220 break;
4221
4222 case RELOAD_FOR_INSN:
4223 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
32131a9c
RK
4224 break;
4225 }
4226
4227 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4228 }
4229}
4230
be7ae2a4
RK
4231/* Similarly, but show REGNO is no longer in use for a reload. */
4232
4233static void
4234clear_reload_reg_in_use (regno, opnum, type, mode)
4235 int regno;
4236 int opnum;
4237 enum reload_type type;
4238 enum machine_mode mode;
4239{
4240 int nregs = HARD_REGNO_NREGS (regno, mode);
4241 int i;
4242
4243 for (i = regno; i < nregs + regno; i++)
4244 {
4245 switch (type)
4246 {
4247 case RELOAD_OTHER:
4248 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4249 break;
4250
4251 case RELOAD_FOR_INPUT_ADDRESS:
4252 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4253 break;
4254
4255 case RELOAD_FOR_OUTPUT_ADDRESS:
4256 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4257 break;
4258
4259 case RELOAD_FOR_OPERAND_ADDRESS:
4260 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4261 break;
4262
893bc853
RK
4263 case RELOAD_FOR_OPADDR_ADDR:
4264 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4265 break;
4266
be7ae2a4
RK
4267 case RELOAD_FOR_OTHER_ADDRESS:
4268 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4269 break;
4270
4271 case RELOAD_FOR_INPUT:
4272 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4273 break;
4274
4275 case RELOAD_FOR_OUTPUT:
4276 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4277 break;
4278
4279 case RELOAD_FOR_INSN:
4280 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4281 break;
4282 }
4283 }
4284}
4285
32131a9c 4286/* 1 if reg REGNO is free as a reload reg for a reload of the sort
546b63fb 4287 specified by OPNUM and TYPE. */
32131a9c
RK
4288
4289static int
546b63fb 4290reload_reg_free_p (regno, opnum, type)
32131a9c 4291 int regno;
546b63fb
RK
4292 int opnum;
4293 enum reload_type type;
32131a9c 4294{
546b63fb
RK
4295 int i;
4296
4297 /* In use for a RELOAD_OTHER means it's not available for anything except
4298 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4299 to be used only for inputs. */
4300
4301 if (type != RELOAD_FOR_OTHER_ADDRESS
4302 && TEST_HARD_REG_BIT (reload_reg_used, regno))
32131a9c 4303 return 0;
546b63fb
RK
4304
4305 switch (type)
32131a9c
RK
4306 {
4307 case RELOAD_OTHER:
224f1d71
RK
4308 /* In use for anything except RELOAD_FOR_OTHER_ADDRESS means
4309 we can't use it for RELOAD_OTHER. */
4310 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4311 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4312 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4313 return 0;
4314
4315 for (i = 0; i < reload_n_operands; i++)
4316 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4317 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4318 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4319 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4320 return 0;
4321
4322 return 1;
32131a9c 4323
32131a9c 4324 case RELOAD_FOR_INPUT:
546b63fb
RK
4325 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4326 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4327 return 0;
4328
893bc853
RK
4329 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4330 return 0;
4331
546b63fb
RK
4332 /* If it is used for some other input, can't use it. */
4333 for (i = 0; i < reload_n_operands; i++)
4334 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4335 return 0;
4336
4337 /* If it is used in a later operand's address, can't use it. */
4338 for (i = opnum + 1; i < reload_n_operands; i++)
4339 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4340 return 0;
4341
4342 return 1;
4343
4344 case RELOAD_FOR_INPUT_ADDRESS:
4345 /* Can't use a register if it is used for an input address for this
4346 operand or used as an input in an earlier one. */
4347 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4348 return 0;
4349
4350 for (i = 0; i < opnum; i++)
4351 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4352 return 0;
4353
4354 return 1;
4355
4356 case RELOAD_FOR_OUTPUT_ADDRESS:
4357 /* Can't use a register if it is used for an output address for this
4358 operand or used as an output in this or a later operand. */
4359 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4360 return 0;
4361
4362 for (i = opnum; i < reload_n_operands; i++)
4363 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4364 return 0;
4365
4366 return 1;
4367
32131a9c 4368 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4369 for (i = 0; i < reload_n_operands; i++)
4370 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4371 return 0;
4372
4373 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4374 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4375
893bc853
RK
4376 case RELOAD_FOR_OPADDR_ADDR:
4377 for (i = 0; i < reload_n_operands; i++)
4378 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4379 return 0;
4380
4381 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4382
32131a9c 4383 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4384 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4385 outputs, or an operand address for this or an earlier output. */
4386 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4387 return 0;
4388
4389 for (i = 0; i < reload_n_operands; i++)
4390 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4391 return 0;
4392
4393 for (i = 0; i <= opnum; i++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4395 return 0;
4396
4397 return 1;
4398
4399 case RELOAD_FOR_INSN:
4400 for (i = 0; i < reload_n_operands; i++)
4401 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4402 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4403 return 0;
4404
4405 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4406 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4407
4408 case RELOAD_FOR_OTHER_ADDRESS:
4409 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4410 }
4411 abort ();
4412}
4413
4414/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4415 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4416 is not in use for a reload in any prior part of the insn.
4417
4418 We can assume that the reload reg was already tested for availability
4419 at the time it is needed, and we should not check this again,
4420 in case the reg has already been marked in use. */
4421
4422static int
546b63fb 4423reload_reg_free_before_p (regno, opnum, type)
32131a9c 4424 int regno;
546b63fb
RK
4425 int opnum;
4426 enum reload_type type;
32131a9c 4427{
546b63fb
RK
4428 int i;
4429
4430 switch (type)
32131a9c 4431 {
546b63fb
RK
4432 case RELOAD_FOR_OTHER_ADDRESS:
4433 /* These always come first. */
32131a9c
RK
4434 return 1;
4435
546b63fb
RK
4436 case RELOAD_OTHER:
4437 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4438
32131a9c 4439 /* If this use is for part of the insn,
546b63fb
RK
4440 check the reg is not in use for any prior part. It is tempting
4441 to try to do this by falling through from objecs that occur
4442 later in the insn to ones that occur earlier, but that will not
4443 correctly take into account the fact that here we MUST ignore
4444 things that would prevent the register from being allocated in
4445 the first place, since we know that it was allocated. */
4446
4447 case RELOAD_FOR_OUTPUT_ADDRESS:
4448 /* Earlier reloads are for earlier outputs or their addresses,
4449 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4450 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4451 RELOAD_OTHER).. */
4452 for (i = 0; i < opnum; i++)
4453 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4454 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4455 return 0;
4456
4457 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
32131a9c 4458 return 0;
546b63fb
RK
4459
4460 for (i = 0; i < reload_n_operands; i++)
4461 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4462 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4463 return 0;
4464
4465 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4466 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4467 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4468
32131a9c 4469 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4470 /* This can't be used in the output address for this operand and
4471 anything that can't be used for it, except that we've already
4472 tested for RELOAD_FOR_INSN objects. */
4473
4474 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
32131a9c 4475 return 0;
546b63fb
RK
4476
4477 for (i = 0; i < opnum; i++)
4478 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4479 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4480 return 0;
4481
4482 for (i = 0; i < reload_n_operands; i++)
4483 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4484 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4485 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4486 return 0;
4487
4488 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4489
32131a9c 4490 case RELOAD_FOR_OPERAND_ADDRESS:
893bc853 4491 case RELOAD_FOR_OPADDR_ADDR:
546b63fb
RK
4492 case RELOAD_FOR_INSN:
4493 /* These can't conflict with inputs, or each other, so all we have to
4494 test is input addresses and the addresses of OTHER items. */
4495
4496 for (i = 0; i < reload_n_operands; i++)
4497 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4498 return 0;
4499
4500 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4501
32131a9c 4502 case RELOAD_FOR_INPUT:
546b63fb
RK
4503 /* The only things earlier are the address for this and
4504 earlier inputs, other inputs (which we know we don't conflict
4505 with), and addresses of RELOAD_OTHER objects. */
4506
4507 for (i = 0; i <= opnum; i++)
4508 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4509 return 0;
4510
4511 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4512
4513 case RELOAD_FOR_INPUT_ADDRESS:
4514 /* Similarly, all we have to check is for use in earlier inputs'
4515 addresses. */
4516 for (i = 0; i < opnum; i++)
4517 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4518 return 0;
4519
4520 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
32131a9c
RK
4521 }
4522 abort ();
4523}
4524
4525/* Return 1 if the value in reload reg REGNO, as used by a reload
546b63fb 4526 needed for the part of the insn specified by OPNUM and TYPE,
32131a9c
RK
4527 is still available in REGNO at the end of the insn.
4528
4529 We can assume that the reload reg was already tested for availability
4530 at the time it is needed, and we should not check this again,
4531 in case the reg has already been marked in use. */
4532
4533static int
546b63fb 4534reload_reg_reaches_end_p (regno, opnum, type)
32131a9c 4535 int regno;
546b63fb
RK
4536 int opnum;
4537 enum reload_type type;
32131a9c 4538{
546b63fb
RK
4539 int i;
4540
4541 switch (type)
32131a9c
RK
4542 {
4543 case RELOAD_OTHER:
4544 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4545 its value must reach the end. */
4546 return 1;
4547
4548 /* If this use is for part of the insn,
546b63fb
RK
4549 its value reaches if no subsequent part uses the same register.
4550 Just like the above function, don't try to do this with lots
4551 of fallthroughs. */
4552
4553 case RELOAD_FOR_OTHER_ADDRESS:
4554 /* Here we check for everything else, since these don't conflict
4555 with anything else and everything comes later. */
4556
4557 for (i = 0; i < reload_n_operands; i++)
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4559 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4560 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4561 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4562 return 0;
4563
4564 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4565 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4566 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4567
4568 case RELOAD_FOR_INPUT_ADDRESS:
4569 /* Similar, except that we check only for this and subsequent inputs
4570 and the address of only subsequent inputs and we do not need
4571 to check for RELOAD_OTHER objects since they are known not to
4572 conflict. */
4573
4574 for (i = opnum; i < reload_n_operands; i++)
4575 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4576 return 0;
4577
4578 for (i = opnum + 1; i < reload_n_operands; i++)
4579 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4580 return 0;
4581
4582 for (i = 0; i < reload_n_operands; i++)
4583 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4584 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4585 return 0;
4586
893bc853
RK
4587 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4588 return 0;
4589
546b63fb
RK
4590 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4591 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4592
32131a9c 4593 case RELOAD_FOR_INPUT:
546b63fb
RK
4594 /* Similar to input address, except we start at the next operand for
4595 both input and input address and we do not check for
4596 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4597 would conflict. */
4598
4599 for (i = opnum + 1; i < reload_n_operands; i++)
4600 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4601 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4602 return 0;
4603
0f41302f 4604 /* ... fall through ... */
546b63fb 4605
32131a9c 4606 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
4607 /* Check outputs and their addresses. */
4608
4609 for (i = 0; i < reload_n_operands; i++)
4610 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4611 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4612 return 0;
4613
4614 return 1;
4615
893bc853
RK
4616 case RELOAD_FOR_OPADDR_ADDR:
4617 for (i = 0; i < reload_n_operands; i++)
4618 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4619 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4620 return 0;
4621
4622 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4623 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4624
546b63fb 4625 case RELOAD_FOR_INSN:
893bc853 4626 /* These conflict with other outputs with RELOAD_OTHER. So
546b63fb
RK
4627 we need only check for output addresses. */
4628
4629 opnum = -1;
4630
0f41302f 4631 /* ... fall through ... */
546b63fb 4632
32131a9c 4633 case RELOAD_FOR_OUTPUT:
546b63fb
RK
4634 case RELOAD_FOR_OUTPUT_ADDRESS:
4635 /* We already know these can't conflict with a later output. So the
4636 only thing to check are later output addresses. */
4637 for (i = opnum + 1; i < reload_n_operands; i++)
4638 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4639 return 0;
4640
32131a9c
RK
4641 return 1;
4642 }
546b63fb 4643
32131a9c
RK
4644 abort ();
4645}
4646\f
351aa1c1
RK
4647/* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4648 Return 0 otherwise.
4649
4650 This function uses the same algorithm as reload_reg_free_p above. */
4651
4652static int
4653reloads_conflict (r1, r2)
4654 int r1, r2;
4655{
4656 enum reload_type r1_type = reload_when_needed[r1];
4657 enum reload_type r2_type = reload_when_needed[r2];
4658 int r1_opnum = reload_opnum[r1];
4659 int r2_opnum = reload_opnum[r2];
4660
0f41302f
MS
4661 /* RELOAD_OTHER conflicts with everything except
4662 RELOAD_FOR_OTHER_ADDRESS. */
351aa1c1 4663
adab4fc5 4664 if (r2_type == RELOAD_OTHER && r1_type != RELOAD_FOR_OTHER_ADDRESS)
351aa1c1
RK
4665 return 1;
4666
4667 /* Otherwise, check conflicts differently for each type. */
4668
4669 switch (r1_type)
4670 {
4671 case RELOAD_FOR_INPUT:
4672 return (r2_type == RELOAD_FOR_INSN
4673 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
893bc853 4674 || r2_type == RELOAD_FOR_OPADDR_ADDR
351aa1c1
RK
4675 || r2_type == RELOAD_FOR_INPUT
4676 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4677
4678 case RELOAD_FOR_INPUT_ADDRESS:
4679 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4680 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4681
4682 case RELOAD_FOR_OUTPUT_ADDRESS:
4683 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4684 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4685
4686 case RELOAD_FOR_OPERAND_ADDRESS:
4687 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4688 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4689
893bc853
RK
4690 case RELOAD_FOR_OPADDR_ADDR:
4691 return (r2_type == RELOAD_FOR_INPUT
4692 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4693
351aa1c1
RK
4694 case RELOAD_FOR_OUTPUT:
4695 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
0aaf4a58 4696 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
351aa1c1
RK
4697 && r2_opnum >= r1_opnum));
4698
4699 case RELOAD_FOR_INSN:
4700 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4701 || r2_type == RELOAD_FOR_INSN
4702 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4703
4704 case RELOAD_FOR_OTHER_ADDRESS:
4705 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4706
adab4fc5
RK
4707 case RELOAD_OTHER:
4708 return r2_type != RELOAD_FOR_OTHER_ADDRESS;
4709
351aa1c1
RK
4710 default:
4711 abort ();
4712 }
4713}
4714\f
32131a9c
RK
4715/* Vector of reload-numbers showing the order in which the reloads should
4716 be processed. */
4717short reload_order[MAX_RELOADS];
4718
4719/* Indexed by reload number, 1 if incoming value
4720 inherited from previous insns. */
4721char reload_inherited[MAX_RELOADS];
4722
4723/* For an inherited reload, this is the insn the reload was inherited from,
4724 if we know it. Otherwise, this is 0. */
4725rtx reload_inheritance_insn[MAX_RELOADS];
4726
4727/* If non-zero, this is a place to get the value of the reload,
4728 rather than using reload_in. */
4729rtx reload_override_in[MAX_RELOADS];
4730
4731/* For each reload, the index in spill_regs of the spill register used,
4732 or -1 if we did not need one of the spill registers for this reload. */
4733int reload_spill_index[MAX_RELOADS];
4734
32131a9c
RK
4735/* Find a spill register to use as a reload register for reload R.
4736 LAST_RELOAD is non-zero if this is the last reload for the insn being
4737 processed.
4738
4739 Set reload_reg_rtx[R] to the register allocated.
4740
4741 If NOERROR is nonzero, we return 1 if successful,
4742 or 0 if we couldn't find a spill reg and we didn't change anything. */
4743
4744static int
4745allocate_reload_reg (r, insn, last_reload, noerror)
4746 int r;
4747 rtx insn;
4748 int last_reload;
4749 int noerror;
4750{
4751 int i;
4752 int pass;
4753 int count;
4754 rtx new;
4755 int regno;
4756
4757 /* If we put this reload ahead, thinking it is a group,
4758 then insist on finding a group. Otherwise we can grab a
a8fdc208 4759 reg that some other reload needs.
32131a9c
RK
4760 (That can happen when we have a 68000 DATA_OR_FP_REG
4761 which is a group of data regs or one fp reg.)
4762 We need not be so restrictive if there are no more reloads
4763 for this insn.
4764
4765 ??? Really it would be nicer to have smarter handling
4766 for that kind of reg class, where a problem like this is normal.
4767 Perhaps those classes should be avoided for reloading
4768 by use of more alternatives. */
4769
4770 int force_group = reload_nregs[r] > 1 && ! last_reload;
4771
4772 /* If we want a single register and haven't yet found one,
4773 take any reg in the right class and not in use.
4774 If we want a consecutive group, here is where we look for it.
4775
4776 We use two passes so we can first look for reload regs to
4777 reuse, which are already in use for other reloads in this insn,
4778 and only then use additional registers.
4779 I think that maximizing reuse is needed to make sure we don't
4780 run out of reload regs. Suppose we have three reloads, and
4781 reloads A and B can share regs. These need two regs.
4782 Suppose A and B are given different regs.
4783 That leaves none for C. */
4784 for (pass = 0; pass < 2; pass++)
4785 {
4786 /* I is the index in spill_regs.
4787 We advance it round-robin between insns to use all spill regs
4788 equally, so that inherited reloads have a chance
a5339699
RK
4789 of leapfrogging each other. Don't do this, however, when we have
4790 group needs and failure would be fatal; if we only have a relatively
4791 small number of spill registers, and more than one of them has
4792 group needs, then by starting in the middle, we may end up
4793 allocating the first one in such a way that we are not left with
4794 sufficient groups to handle the rest. */
4795
4796 if (noerror || ! force_group)
4797 i = last_spill_reg;
4798 else
4799 i = -1;
4800
4801 for (count = 0; count < n_spills; count++)
32131a9c
RK
4802 {
4803 int class = (int) reload_reg_class[r];
4804
4805 i = (i + 1) % n_spills;
4806
546b63fb
RK
4807 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4808 reload_when_needed[r])
32131a9c
RK
4809 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4810 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
be7ae2a4
RK
4811 /* Look first for regs to share, then for unshared. But
4812 don't share regs used for inherited reloads; they are
4813 the ones we want to preserve. */
4814 && (pass
4815 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4816 spill_regs[i])
4817 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4818 spill_regs[i]))))
32131a9c
RK
4819 {
4820 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4821 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4822 (on 68000) got us two FP regs. If NR is 1,
4823 we would reject both of them. */
4824 if (force_group)
4825 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4826 /* If we need only one reg, we have already won. */
4827 if (nr == 1)
4828 {
4829 /* But reject a single reg if we demand a group. */
4830 if (force_group)
4831 continue;
4832 break;
4833 }
4834 /* Otherwise check that as many consecutive regs as we need
4835 are available here.
4836 Also, don't use for a group registers that are
4837 needed for nongroups. */
4838 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4839 while (nr > 1)
4840 {
4841 regno = spill_regs[i] + nr - 1;
4842 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4843 && spill_reg_order[regno] >= 0
546b63fb
RK
4844 && reload_reg_free_p (regno, reload_opnum[r],
4845 reload_when_needed[r])
32131a9c
RK
4846 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4847 regno)))
4848 break;
4849 nr--;
4850 }
4851 if (nr == 1)
4852 break;
4853 }
4854 }
4855
4856 /* If we found something on pass 1, omit pass 2. */
4857 if (count < n_spills)
4858 break;
4859 }
4860
4861 /* We should have found a spill register by now. */
4862 if (count == n_spills)
4863 {
4864 if (noerror)
4865 return 0;
139fc12e 4866 goto failure;
32131a9c
RK
4867 }
4868
be7ae2a4
RK
4869 /* I is the index in SPILL_REG_RTX of the reload register we are to
4870 allocate. Get an rtx for it and find its register number. */
32131a9c
RK
4871
4872 new = spill_reg_rtx[i];
4873
4874 if (new == 0 || GET_MODE (new) != reload_mode[r])
be7ae2a4
RK
4875 spill_reg_rtx[i] = new
4876 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4877
32131a9c
RK
4878 regno = true_regnum (new);
4879
4880 /* Detect when the reload reg can't hold the reload mode.
4881 This used to be one `if', but Sequent compiler can't handle that. */
4882 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4883 {
4884 enum machine_mode test_mode = VOIDmode;
4885 if (reload_in[r])
4886 test_mode = GET_MODE (reload_in[r]);
4887 /* If reload_in[r] has VOIDmode, it means we will load it
4888 in whatever mode the reload reg has: to wit, reload_mode[r].
4889 We have already tested that for validity. */
4890 /* Aside from that, we need to test that the expressions
4891 to reload from or into have modes which are valid for this
4892 reload register. Otherwise the reload insns would be invalid. */
4893 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4894 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4895 if (! (reload_out[r] != 0
4896 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
be7ae2a4
RK
4897 {
4898 /* The reg is OK. */
4899 last_spill_reg = i;
4900
4901 /* Mark as in use for this insn the reload regs we use
4902 for this. */
4903 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4904 reload_when_needed[r], reload_mode[r]);
4905
4906 reload_reg_rtx[r] = new;
4907 reload_spill_index[r] = i;
4908 return 1;
4909 }
32131a9c
RK
4910 }
4911
4912 /* The reg is not OK. */
4913 if (noerror)
4914 return 0;
4915
139fc12e 4916 failure:
32131a9c
RK
4917 if (asm_noperands (PATTERN (insn)) < 0)
4918 /* It's the compiler's fault. */
a89b2cc4 4919 fatal_insn ("Could not find a spill register", insn);
32131a9c
RK
4920
4921 /* It's the user's fault; the operand's mode and constraint
4922 don't match. Disable this reload so we don't crash in final. */
4923 error_for_asm (insn,
4924 "`asm' operand constraint incompatible with operand size");
4925 reload_in[r] = 0;
4926 reload_out[r] = 0;
4927 reload_reg_rtx[r] = 0;
4928 reload_optional[r] = 1;
4929 reload_secondary_p[r] = 1;
4930
4931 return 1;
4932}
4933\f
4934/* Assign hard reg targets for the pseudo-registers we must reload
4935 into hard regs for this insn.
4936 Also output the instructions to copy them in and out of the hard regs.
4937
4938 For machines with register classes, we are responsible for
4939 finding a reload reg in the proper class. */
4940
4941static void
4942choose_reload_regs (insn, avoid_return_reg)
4943 rtx insn;
32131a9c
RK
4944 rtx avoid_return_reg;
4945{
4946 register int i, j;
4947 int max_group_size = 1;
4948 enum reg_class group_class = NO_REGS;
4949 int inheritance;
4950
4951 rtx save_reload_reg_rtx[MAX_RELOADS];
4952 char save_reload_inherited[MAX_RELOADS];
4953 rtx save_reload_inheritance_insn[MAX_RELOADS];
4954 rtx save_reload_override_in[MAX_RELOADS];
4955 int save_reload_spill_index[MAX_RELOADS];
4956 HARD_REG_SET save_reload_reg_used;
546b63fb
RK
4957 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4958 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4959 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4960 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
32131a9c 4961 HARD_REG_SET save_reload_reg_used_in_op_addr;
893bc853 4962 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
546b63fb
RK
4963 HARD_REG_SET save_reload_reg_used_in_insn;
4964 HARD_REG_SET save_reload_reg_used_in_other_addr;
32131a9c
RK
4965 HARD_REG_SET save_reload_reg_used_at_all;
4966
4967 bzero (reload_inherited, MAX_RELOADS);
4c9a05bc
RK
4968 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4969 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
32131a9c
RK
4970
4971 CLEAR_HARD_REG_SET (reload_reg_used);
4972 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
32131a9c 4973 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
893bc853 4974 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
546b63fb
RK
4975 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4976 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
32131a9c 4977
546b63fb
RK
4978 for (i = 0; i < reload_n_operands; i++)
4979 {
4980 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4981 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4982 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4983 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4984 }
32131a9c
RK
4985
4986#ifdef SMALL_REGISTER_CLASSES
4987 /* Don't bother with avoiding the return reg
4988 if we have no mandatory reload that could use it. */
4989 if (avoid_return_reg)
4990 {
4991 int do_avoid = 0;
4992 int regno = REGNO (avoid_return_reg);
4993 int nregs
4994 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4995 int r;
4996
4997 for (r = regno; r < regno + nregs; r++)
4998 if (spill_reg_order[r] >= 0)
4999 for (j = 0; j < n_reloads; j++)
5000 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5001 && (reload_in[j] != 0 || reload_out[j] != 0
5002 || reload_secondary_p[j])
5003 &&
5004 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5005 do_avoid = 1;
5006 if (!do_avoid)
5007 avoid_return_reg = 0;
5008 }
5009#endif /* SMALL_REGISTER_CLASSES */
5010
5011#if 0 /* Not needed, now that we can always retry without inheritance. */
5012 /* See if we have more mandatory reloads than spill regs.
5013 If so, then we cannot risk optimizations that could prevent
a8fdc208 5014 reloads from sharing one spill register.
32131a9c
RK
5015
5016 Since we will try finding a better register than reload_reg_rtx
5017 unless it is equal to reload_in or reload_out, count such reloads. */
5018
5019 {
5020 int tem = 0;
5021#ifdef SMALL_REGISTER_CLASSES
5022 int tem = (avoid_return_reg != 0);
a8fdc208 5023#endif
32131a9c
RK
5024 for (j = 0; j < n_reloads; j++)
5025 if (! reload_optional[j]
5026 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5027 && (reload_reg_rtx[j] == 0
5028 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5029 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5030 tem++;
5031 if (tem > n_spills)
5032 must_reuse = 1;
5033 }
5034#endif
5035
5036#ifdef SMALL_REGISTER_CLASSES
5037 /* Don't use the subroutine call return reg for a reload
5038 if we are supposed to avoid it. */
5039 if (avoid_return_reg)
5040 {
5041 int regno = REGNO (avoid_return_reg);
5042 int nregs
5043 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5044 int r;
5045
5046 for (r = regno; r < regno + nregs; r++)
5047 if (spill_reg_order[r] >= 0)
5048 SET_HARD_REG_BIT (reload_reg_used, r);
5049 }
5050#endif /* SMALL_REGISTER_CLASSES */
5051
5052 /* In order to be certain of getting the registers we need,
5053 we must sort the reloads into order of increasing register class.
5054 Then our grabbing of reload registers will parallel the process
a8fdc208 5055 that provided the reload registers.
32131a9c
RK
5056
5057 Also note whether any of the reloads wants a consecutive group of regs.
5058 If so, record the maximum size of the group desired and what
5059 register class contains all the groups needed by this insn. */
5060
5061 for (j = 0; j < n_reloads; j++)
5062 {
5063 reload_order[j] = j;
5064 reload_spill_index[j] = -1;
5065
5066 reload_mode[j]
546b63fb
RK
5067 = (reload_inmode[j] == VOIDmode
5068 || (GET_MODE_SIZE (reload_outmode[j])
5069 > GET_MODE_SIZE (reload_inmode[j])))
5070 ? reload_outmode[j] : reload_inmode[j];
32131a9c
RK
5071
5072 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5073
5074 if (reload_nregs[j] > 1)
5075 {
5076 max_group_size = MAX (reload_nregs[j], max_group_size);
5077 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5078 }
5079
5080 /* If we have already decided to use a certain register,
5081 don't use it in another way. */
5082 if (reload_reg_rtx[j])
546b63fb 5083 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
32131a9c
RK
5084 reload_when_needed[j], reload_mode[j]);
5085 }
5086
5087 if (n_reloads > 1)
5088 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5089
4c9a05bc
RK
5090 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5091 sizeof reload_reg_rtx);
32131a9c 5092 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4c9a05bc
RK
5093 bcopy ((char *) reload_inheritance_insn,
5094 (char *) save_reload_inheritance_insn,
32131a9c 5095 sizeof reload_inheritance_insn);
4c9a05bc 5096 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
32131a9c 5097 sizeof reload_override_in);
4c9a05bc 5098 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
32131a9c
RK
5099 sizeof reload_spill_index);
5100 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5101 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
32131a9c
RK
5102 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5103 reload_reg_used_in_op_addr);
893bc853
RK
5104
5105 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5106 reload_reg_used_in_op_addr_reload);
5107
546b63fb
RK
5108 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5109 reload_reg_used_in_insn);
5110 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5111 reload_reg_used_in_other_addr);
5112
5113 for (i = 0; i < reload_n_operands; i++)
5114 {
5115 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5116 reload_reg_used_in_output[i]);
5117 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5118 reload_reg_used_in_input[i]);
5119 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5120 reload_reg_used_in_input_addr[i]);
5121 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5122 reload_reg_used_in_output_addr[i]);
5123 }
32131a9c 5124
58b1581b
RS
5125 /* If -O, try first with inheritance, then turning it off.
5126 If not -O, don't do inheritance.
5127 Using inheritance when not optimizing leads to paradoxes
5128 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5129 because one side of the comparison might be inherited. */
32131a9c 5130
58b1581b 5131 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
32131a9c
RK
5132 {
5133 /* Process the reloads in order of preference just found.
5134 Beyond this point, subregs can be found in reload_reg_rtx.
5135
5136 This used to look for an existing reloaded home for all
5137 of the reloads, and only then perform any new reloads.
5138 But that could lose if the reloads were done out of reg-class order
5139 because a later reload with a looser constraint might have an old
5140 home in a register needed by an earlier reload with a tighter constraint.
5141
5142 To solve this, we make two passes over the reloads, in the order
5143 described above. In the first pass we try to inherit a reload
5144 from a previous insn. If there is a later reload that needs a
5145 class that is a proper subset of the class being processed, we must
5146 also allocate a spill register during the first pass.
5147
5148 Then make a second pass over the reloads to allocate any reloads
5149 that haven't been given registers yet. */
5150
be7ae2a4
RK
5151 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5152
32131a9c
RK
5153 for (j = 0; j < n_reloads; j++)
5154 {
5155 register int r = reload_order[j];
5156
5157 /* Ignore reloads that got marked inoperative. */
5158 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5159 continue;
5160
5161 /* If find_reloads chose a to use reload_in or reload_out as a reload
5162 register, we don't need to chose one. Otherwise, try even if it found
5163 one since we might save an insn if we find the value lying around. */
5164 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5165 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5166 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5167 continue;
5168
5169#if 0 /* No longer needed for correct operation.
5170 It might give better code, or might not; worth an experiment? */
5171 /* If this is an optional reload, we can't inherit from earlier insns
5172 until we are sure that any non-optional reloads have been allocated.
5173 The following code takes advantage of the fact that optional reloads
5174 are at the end of reload_order. */
5175 if (reload_optional[r] != 0)
5176 for (i = 0; i < j; i++)
5177 if ((reload_out[reload_order[i]] != 0
5178 || reload_in[reload_order[i]] != 0
5179 || reload_secondary_p[reload_order[i]])
5180 && ! reload_optional[reload_order[i]]
5181 && reload_reg_rtx[reload_order[i]] == 0)
5182 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5183#endif
5184
5185 /* First see if this pseudo is already available as reloaded
5186 for a previous insn. We cannot try to inherit for reloads
5187 that are smaller than the maximum number of registers needed
5188 for groups unless the register we would allocate cannot be used
5189 for the groups.
5190
5191 We could check here to see if this is a secondary reload for
5192 an object that is already in a register of the desired class.
5193 This would avoid the need for the secondary reload register.
5194 But this is complex because we can't easily determine what
5195 objects might want to be loaded via this reload. So let a register
5196 be allocated here. In `emit_reload_insns' we suppress one of the
5197 loads in the case described above. */
5198
5199 if (inheritance)
5200 {
5201 register int regno = -1;
db660765 5202 enum machine_mode mode;
32131a9c
RK
5203
5204 if (reload_in[r] == 0)
5205 ;
5206 else if (GET_CODE (reload_in[r]) == REG)
db660765
TW
5207 {
5208 regno = REGNO (reload_in[r]);
5209 mode = GET_MODE (reload_in[r]);
5210 }
32131a9c 5211 else if (GET_CODE (reload_in_reg[r]) == REG)
db660765
TW
5212 {
5213 regno = REGNO (reload_in_reg[r]);
5214 mode = GET_MODE (reload_in_reg[r]);
5215 }
32131a9c
RK
5216#if 0
5217 /* This won't work, since REGNO can be a pseudo reg number.
5218 Also, it takes much more hair to keep track of all the things
5219 that can invalidate an inherited reload of part of a pseudoreg. */
5220 else if (GET_CODE (reload_in[r]) == SUBREG
5221 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5222 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5223#endif
5224
5225 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5226 {
5227 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5228
5229 if (reg_reloaded_contents[i] == regno
db660765
TW
5230 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5231 >= GET_MODE_SIZE (mode))
32131a9c
RK
5232 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5233 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5234 spill_regs[i])
5235 && (reload_nregs[r] == max_group_size
5236 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5237 spill_regs[i]))
546b63fb
RK
5238 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5239 reload_when_needed[r])
32131a9c 5240 && reload_reg_free_before_p (spill_regs[i],
546b63fb 5241 reload_opnum[r],
32131a9c
RK
5242 reload_when_needed[r]))
5243 {
5244 /* If a group is needed, verify that all the subsequent
0f41302f 5245 registers still have their values intact. */
32131a9c
RK
5246 int nr
5247 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5248 int k;
5249
5250 for (k = 1; k < nr; k++)
5251 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5252 != regno)
5253 break;
5254
5255 if (k == nr)
5256 {
c74fa651
RS
5257 int i1;
5258
5259 /* We found a register that contains the
5260 value we need. If this register is the
5261 same as an `earlyclobber' operand of the
5262 current insn, just mark it as a place to
5263 reload from since we can't use it as the
5264 reload register itself. */
5265
5266 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5267 if (reg_overlap_mentioned_for_reload_p
5268 (reg_last_reload_reg[regno],
5269 reload_earlyclobbers[i1]))
5270 break;
5271
8908158d
RS
5272 if (i1 != n_earlyclobbers
5273 /* Don't really use the inherited spill reg
5274 if we need it wider than we've got it. */
5275 || (GET_MODE_SIZE (reload_mode[r])
5276 > GET_MODE_SIZE (mode)))
c74fa651
RS
5277 reload_override_in[r] = reg_last_reload_reg[regno];
5278 else
5279 {
54c40e68 5280 int k;
c74fa651
RS
5281 /* We can use this as a reload reg. */
5282 /* Mark the register as in use for this part of
5283 the insn. */
5284 mark_reload_reg_in_use (spill_regs[i],
5285 reload_opnum[r],
5286 reload_when_needed[r],
5287 reload_mode[r]);
5288 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5289 reload_inherited[r] = 1;
5290 reload_inheritance_insn[r]
5291 = reg_reloaded_insn[i];
5292 reload_spill_index[r] = i;
54c40e68
RS
5293 for (k = 0; k < nr; k++)
5294 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5295 spill_regs[i + k]);
c74fa651 5296 }
32131a9c
RK
5297 }
5298 }
5299 }
5300 }
5301
5302 /* Here's another way to see if the value is already lying around. */
5303 if (inheritance
5304 && reload_in[r] != 0
5305 && ! reload_inherited[r]
5306 && reload_out[r] == 0
5307 && (CONSTANT_P (reload_in[r])
5308 || GET_CODE (reload_in[r]) == PLUS
5309 || GET_CODE (reload_in[r]) == REG
5310 || GET_CODE (reload_in[r]) == MEM)
5311 && (reload_nregs[r] == max_group_size
5312 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5313 {
5314 register rtx equiv
5315 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
fb3821f7 5316 -1, NULL_PTR, 0, reload_mode[r]);
32131a9c
RK
5317 int regno;
5318
5319 if (equiv != 0)
5320 {
5321 if (GET_CODE (equiv) == REG)
5322 regno = REGNO (equiv);
5323 else if (GET_CODE (equiv) == SUBREG)
5324 {
f8a9e02b
RK
5325 /* This must be a SUBREG of a hard register.
5326 Make a new REG since this might be used in an
5327 address and not all machines support SUBREGs
5328 there. */
5329 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5330 equiv = gen_rtx (REG, reload_mode[r], regno);
32131a9c
RK
5331 }
5332 else
5333 abort ();
5334 }
5335
5336 /* If we found a spill reg, reject it unless it is free
5337 and of the desired class. */
5338 if (equiv != 0
5339 && ((spill_reg_order[regno] >= 0
546b63fb 5340 && ! reload_reg_free_before_p (regno, reload_opnum[r],
32131a9c
RK
5341 reload_when_needed[r]))
5342 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5343 regno)))
5344 equiv = 0;
5345
5346 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5347 equiv = 0;
5348
5349 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5350 equiv = 0;
5351
5352 /* We found a register that contains the value we need.
5353 If this register is the same as an `earlyclobber' operand
5354 of the current insn, just mark it as a place to reload from
5355 since we can't use it as the reload register itself. */
5356
5357 if (equiv != 0)
5358 for (i = 0; i < n_earlyclobbers; i++)
bfa30b22
RK
5359 if (reg_overlap_mentioned_for_reload_p (equiv,
5360 reload_earlyclobbers[i]))
32131a9c
RK
5361 {
5362 reload_override_in[r] = equiv;
5363 equiv = 0;
5364 break;
5365 }
5366
0f41302f
MS
5367 /* JRV: If the equiv register we have found is
5368 explicitly clobbered in the current insn, mark but
5369 don't use, as above. */
32131a9c
RK
5370
5371 if (equiv != 0 && regno_clobbered_p (regno, insn))
5372 {
5373 reload_override_in[r] = equiv;
5374 equiv = 0;
5375 }
5376
5377 /* If we found an equivalent reg, say no code need be generated
5378 to load it, and use it as our reload reg. */
3ec2ea3e 5379 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
32131a9c 5380 {
100338df
JL
5381 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5382 int k;
32131a9c
RK
5383 reload_reg_rtx[r] = equiv;
5384 reload_inherited[r] = 1;
100338df
JL
5385
5386 /* If any of the hard registers in EQUIV are spill
5387 registers, mark them as in use for this insn. */
5388 for (k = 0; k < nr; k++)
be7ae2a4 5389 {
100338df
JL
5390 i = spill_reg_order[regno + k];
5391 if (i >= 0)
5392 {
5393 mark_reload_reg_in_use (regno, reload_opnum[r],
5394 reload_when_needed[r],
5395 reload_mode[r]);
5396 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5397 regno + k);
5398 }
be7ae2a4 5399 }
32131a9c
RK
5400 }
5401 }
5402
5403 /* If we found a register to use already, or if this is an optional
5404 reload, we are done. */
5405 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5406 continue;
5407
5408#if 0 /* No longer needed for correct operation. Might or might not
5409 give better code on the average. Want to experiment? */
5410
5411 /* See if there is a later reload that has a class different from our
5412 class that intersects our class or that requires less register
5413 than our reload. If so, we must allocate a register to this
5414 reload now, since that reload might inherit a previous reload
5415 and take the only available register in our class. Don't do this
5416 for optional reloads since they will force all previous reloads
5417 to be allocated. Also don't do this for reloads that have been
5418 turned off. */
5419
5420 for (i = j + 1; i < n_reloads; i++)
5421 {
5422 int s = reload_order[i];
5423
d45cf215
RS
5424 if ((reload_in[s] == 0 && reload_out[s] == 0
5425 && ! reload_secondary_p[s])
32131a9c
RK
5426 || reload_optional[s])
5427 continue;
5428
5429 if ((reload_reg_class[s] != reload_reg_class[r]
5430 && reg_classes_intersect_p (reload_reg_class[r],
5431 reload_reg_class[s]))
5432 || reload_nregs[s] < reload_nregs[r])
5433 break;
5434 }
5435
5436 if (i == n_reloads)
5437 continue;
5438
5439 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5440#endif
5441 }
5442
5443 /* Now allocate reload registers for anything non-optional that
5444 didn't get one yet. */
5445 for (j = 0; j < n_reloads; j++)
5446 {
5447 register int r = reload_order[j];
5448
5449 /* Ignore reloads that got marked inoperative. */
5450 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5451 continue;
5452
5453 /* Skip reloads that already have a register allocated or are
0f41302f 5454 optional. */
32131a9c
RK
5455 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5456 continue;
5457
5458 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5459 break;
5460 }
5461
5462 /* If that loop got all the way, we have won. */
5463 if (j == n_reloads)
5464 break;
5465
5466 fail:
5467 /* Loop around and try without any inheritance. */
5468 /* First undo everything done by the failed attempt
5469 to allocate with inheritance. */
4c9a05bc
RK
5470 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5471 sizeof reload_reg_rtx);
5472 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5473 sizeof reload_inherited);
5474 bcopy ((char *) save_reload_inheritance_insn,
5475 (char *) reload_inheritance_insn,
32131a9c 5476 sizeof reload_inheritance_insn);
4c9a05bc 5477 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
32131a9c 5478 sizeof reload_override_in);
4c9a05bc 5479 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
32131a9c
RK
5480 sizeof reload_spill_index);
5481 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5482 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
32131a9c
RK
5483 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5484 save_reload_reg_used_in_op_addr);
893bc853
RK
5485 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5486 save_reload_reg_used_in_op_addr_reload);
546b63fb
RK
5487 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5488 save_reload_reg_used_in_insn);
5489 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5490 save_reload_reg_used_in_other_addr);
5491
5492 for (i = 0; i < reload_n_operands; i++)
5493 {
5494 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5495 save_reload_reg_used_in_input[i]);
5496 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5497 save_reload_reg_used_in_output[i]);
5498 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5499 save_reload_reg_used_in_input_addr[i]);
5500 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5501 save_reload_reg_used_in_output_addr[i]);
5502 }
32131a9c
RK
5503 }
5504
5505 /* If we thought we could inherit a reload, because it seemed that
5506 nothing else wanted the same reload register earlier in the insn,
5507 verify that assumption, now that all reloads have been assigned. */
5508
5509 for (j = 0; j < n_reloads; j++)
5510 {
5511 register int r = reload_order[j];
5512
5513 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5514 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
546b63fb 5515 reload_opnum[r],
32131a9c
RK
5516 reload_when_needed[r]))
5517 reload_inherited[r] = 0;
5518
5519 /* If we found a better place to reload from,
5520 validate it in the same fashion, if it is a reload reg. */
5521 if (reload_override_in[r]
5522 && (GET_CODE (reload_override_in[r]) == REG
5523 || GET_CODE (reload_override_in[r]) == SUBREG))
5524 {
5525 int regno = true_regnum (reload_override_in[r]);
5526 if (spill_reg_order[regno] >= 0
546b63fb
RK
5527 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5528 reload_when_needed[r]))
32131a9c
RK
5529 reload_override_in[r] = 0;
5530 }
5531 }
5532
5533 /* Now that reload_override_in is known valid,
5534 actually override reload_in. */
5535 for (j = 0; j < n_reloads; j++)
5536 if (reload_override_in[j])
5537 reload_in[j] = reload_override_in[j];
5538
5539 /* If this reload won't be done because it has been cancelled or is
5540 optional and not inherited, clear reload_reg_rtx so other
5541 routines (such as subst_reloads) don't get confused. */
5542 for (j = 0; j < n_reloads; j++)
be7ae2a4
RK
5543 if (reload_reg_rtx[j] != 0
5544 && ((reload_optional[j] && ! reload_inherited[j])
5545 || (reload_in[j] == 0 && reload_out[j] == 0
5546 && ! reload_secondary_p[j])))
5547 {
5548 int regno = true_regnum (reload_reg_rtx[j]);
5549
5550 if (spill_reg_order[regno] >= 0)
5551 clear_reload_reg_in_use (regno, reload_opnum[j],
5552 reload_when_needed[j], reload_mode[j]);
5553 reload_reg_rtx[j] = 0;
5554 }
32131a9c
RK
5555
5556 /* Record which pseudos and which spill regs have output reloads. */
5557 for (j = 0; j < n_reloads; j++)
5558 {
5559 register int r = reload_order[j];
5560
5561 i = reload_spill_index[r];
5562
5563 /* I is nonneg if this reload used one of the spill regs.
5564 If reload_reg_rtx[r] is 0, this is an optional reload
5565 that we opted to ignore. */
5566 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5567 && reload_reg_rtx[r] != 0)
5568 {
5569 register int nregno = REGNO (reload_out[r]);
372e033b
RS
5570 int nr = 1;
5571
5572 if (nregno < FIRST_PSEUDO_REGISTER)
5573 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
32131a9c
RK
5574
5575 while (--nr >= 0)
372e033b
RS
5576 reg_has_output_reload[nregno + nr] = 1;
5577
5578 if (i >= 0)
32131a9c 5579 {
372e033b
RS
5580 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5581 while (--nr >= 0)
32131a9c
RK
5582 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5583 }
5584
5585 if (reload_when_needed[r] != RELOAD_OTHER
546b63fb
RK
5586 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5587 && reload_when_needed[r] != RELOAD_FOR_INSN)
32131a9c
RK
5588 abort ();
5589 }
5590 }
5591}
5592\f
546b63fb
RK
5593/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5594 reloads of the same item for fear that we might not have enough reload
5595 registers. However, normally they will get the same reload register
5596 and hence actually need not be loaded twice.
5597
5598 Here we check for the most common case of this phenomenon: when we have
5599 a number of reloads for the same object, each of which were allocated
5600 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5601 reload, and is not modified in the insn itself. If we find such,
5602 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5603 This will not increase the number of spill registers needed and will
5604 prevent redundant code. */
5605
5606#ifdef SMALL_REGISTER_CLASSES
5607
5608static void
5609merge_assigned_reloads (insn)
5610 rtx insn;
5611{
5612 int i, j;
5613
5614 /* Scan all the reloads looking for ones that only load values and
5615 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5616 assigned and not modified by INSN. */
5617
5618 for (i = 0; i < n_reloads; i++)
5619 {
5620 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5621 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5622 || reg_set_p (reload_reg_rtx[i], insn))
5623 continue;
5624
5625 /* Look at all other reloads. Ensure that the only use of this
5626 reload_reg_rtx is in a reload that just loads the same value
5627 as we do. Note that any secondary reloads must be of the identical
5628 class since the values, modes, and result registers are the
5629 same, so we need not do anything with any secondary reloads. */
5630
5631 for (j = 0; j < n_reloads; j++)
5632 {
5633 if (i == j || reload_reg_rtx[j] == 0
5634 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5635 reload_reg_rtx[i]))
5636 continue;
5637
5638 /* If the reload regs aren't exactly the same (e.g, different modes)
5639 or if the values are different, we can't merge anything with this
5640 reload register. */
5641
5642 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5643 || reload_out[j] != 0 || reload_in[j] == 0
5644 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5645 break;
5646 }
5647
5648 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5649 we, in fact, found any matching reloads. */
5650
5651 if (j == n_reloads)
5652 {
5653 for (j = 0; j < n_reloads; j++)
5654 if (i != j && reload_reg_rtx[j] != 0
5655 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5656 {
5657 reload_when_needed[i] = RELOAD_OTHER;
5658 reload_in[j] = 0;
5659 transfer_replacements (i, j);
5660 }
5661
5662 /* If this is now RELOAD_OTHER, look for any reloads that load
5663 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5664 if they were for inputs, RELOAD_OTHER for outputs. Note that
5665 this test is equivalent to looking for reloads for this operand
5666 number. */
5667
5668 if (reload_when_needed[i] == RELOAD_OTHER)
5669 for (j = 0; j < n_reloads; j++)
5670 if (reload_in[j] != 0
5671 && reload_when_needed[i] != RELOAD_OTHER
5672 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5673 reload_in[i]))
5674 reload_when_needed[j]
5675 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5676 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5677 }
5678 }
5679}
5680#endif /* SMALL_RELOAD_CLASSES */
5681\f
32131a9c
RK
5682/* Output insns to reload values in and out of the chosen reload regs. */
5683
5684static void
5685emit_reload_insns (insn)
5686 rtx insn;
5687{
5688 register int j;
546b63fb
RK
5689 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5690 rtx other_input_address_reload_insns = 0;
5691 rtx other_input_reload_insns = 0;
5692 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5693 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5694 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5695 rtx operand_reload_insns = 0;
893bc853 5696 rtx other_operand_reload_insns = 0;
d7e0324f 5697 rtx other_output_reload_insns = 0;
32131a9c 5698 rtx following_insn = NEXT_INSN (insn);
a8efe40d 5699 rtx before_insn = insn;
32131a9c
RK
5700 int special;
5701 /* Values to be put in spill_reg_store are put here first. */
5702 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5703
546b63fb
RK
5704 for (j = 0; j < reload_n_operands; j++)
5705 input_reload_insns[j] = input_address_reload_insns[j]
5706 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5707
32131a9c
RK
5708 /* Now output the instructions to copy the data into and out of the
5709 reload registers. Do these in the order that the reloads were reported,
5710 since reloads of base and index registers precede reloads of operands
5711 and the operands may need the base and index registers reloaded. */
5712
5713 for (j = 0; j < n_reloads; j++)
5714 {
5715 register rtx old;
5716 rtx oldequiv_reg = 0;
73b2ad9e
RK
5717
5718 if (reload_spill_index[j] >= 0)
5719 new_spill_reg_store[reload_spill_index[j]] = 0;
32131a9c
RK
5720
5721 old = reload_in[j];
5722 if (old != 0 && ! reload_inherited[j]
5723 && ! rtx_equal_p (reload_reg_rtx[j], old)
5724 && reload_reg_rtx[j] != 0)
5725 {
5726 register rtx reloadreg = reload_reg_rtx[j];
5727 rtx oldequiv = 0;
5728 enum machine_mode mode;
546b63fb 5729 rtx *where;
32131a9c
RK
5730
5731 /* Determine the mode to reload in.
5732 This is very tricky because we have three to choose from.
5733 There is the mode the insn operand wants (reload_inmode[J]).
5734 There is the mode of the reload register RELOADREG.
5735 There is the intrinsic mode of the operand, which we could find
5736 by stripping some SUBREGs.
5737 It turns out that RELOADREG's mode is irrelevant:
5738 we can change that arbitrarily.
5739
5740 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5741 then the reload reg may not support QImode moves, so use SImode.
5742 If foo is in memory due to spilling a pseudo reg, this is safe,
5743 because the QImode value is in the least significant part of a
5744 slot big enough for a SImode. If foo is some other sort of
5745 memory reference, then it is impossible to reload this case,
5746 so previous passes had better make sure this never happens.
5747
5748 Then consider a one-word union which has SImode and one of its
5749 members is a float, being fetched as (SUBREG:SF union:SI).
5750 We must fetch that as SFmode because we could be loading into
5751 a float-only register. In this case OLD's mode is correct.
5752
5753 Consider an immediate integer: it has VOIDmode. Here we need
5754 to get a mode from something else.
5755
5756 In some cases, there is a fourth mode, the operand's
5757 containing mode. If the insn specifies a containing mode for
5758 this operand, it overrides all others.
5759
5760 I am not sure whether the algorithm here is always right,
5761 but it does the right things in those cases. */
5762
5763 mode = GET_MODE (old);
5764 if (mode == VOIDmode)
5765 mode = reload_inmode[j];
32131a9c
RK
5766
5767#ifdef SECONDARY_INPUT_RELOAD_CLASS
5768 /* If we need a secondary register for this operation, see if
5769 the value is already in a register in that class. Don't
5770 do this if the secondary register will be used as a scratch
5771 register. */
5772
b80bba27
RK
5773 if (reload_secondary_in_reload[j] >= 0
5774 && reload_secondary_in_icode[j] == CODE_FOR_nothing
58b1581b 5775 && optimize)
32131a9c
RK
5776 oldequiv
5777 = find_equiv_reg (old, insn,
b80bba27 5778 reload_reg_class[reload_secondary_in_reload[j]],
fb3821f7 5779 -1, NULL_PTR, 0, mode);
32131a9c
RK
5780#endif
5781
5782 /* If reloading from memory, see if there is a register
5783 that already holds the same value. If so, reload from there.
5784 We can pass 0 as the reload_reg_p argument because
5785 any other reload has either already been emitted,
5786 in which case find_equiv_reg will see the reload-insn,
5787 or has yet to be emitted, in which case it doesn't matter
5788 because we will use this equiv reg right away. */
5789
58b1581b 5790 if (oldequiv == 0 && optimize
32131a9c
RK
5791 && (GET_CODE (old) == MEM
5792 || (GET_CODE (old) == REG
5793 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5794 && reg_renumber[REGNO (old)] < 0)))
546b63fb 5795 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
fb3821f7 5796 -1, NULL_PTR, 0, mode);
32131a9c
RK
5797
5798 if (oldequiv)
5799 {
5800 int regno = true_regnum (oldequiv);
5801
5802 /* If OLDEQUIV is a spill register, don't use it for this
5803 if any other reload needs it at an earlier stage of this insn
a8fdc208 5804 or at this stage. */
32131a9c 5805 if (spill_reg_order[regno] >= 0
546b63fb
RK
5806 && (! reload_reg_free_p (regno, reload_opnum[j],
5807 reload_when_needed[j])
5808 || ! reload_reg_free_before_p (regno, reload_opnum[j],
32131a9c
RK
5809 reload_when_needed[j])))
5810 oldequiv = 0;
5811
5812 /* If OLDEQUIV is not a spill register,
5813 don't use it if any other reload wants it. */
5814 if (spill_reg_order[regno] < 0)
5815 {
5816 int k;
5817 for (k = 0; k < n_reloads; k++)
5818 if (reload_reg_rtx[k] != 0 && k != j
bfa30b22
RK
5819 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5820 oldequiv))
32131a9c
RK
5821 {
5822 oldequiv = 0;
5823 break;
5824 }
5825 }
546b63fb
RK
5826
5827 /* If it is no cheaper to copy from OLDEQUIV into the
5828 reload register than it would be to move from memory,
5829 don't use it. Likewise, if we need a secondary register
5830 or memory. */
5831
5832 if (oldequiv != 0
5833 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5834 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5835 reload_reg_class[j])
5836 >= MEMORY_MOVE_COST (mode)))
5837#ifdef SECONDARY_INPUT_RELOAD_CLASS
5838 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5839 mode, oldequiv)
5840 != NO_REGS)
5841#endif
5842#ifdef SECONDARY_MEMORY_NEEDED
5843 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5844 REGNO_REG_CLASS (regno),
5845 mode)
5846#endif
5847 ))
5848 oldequiv = 0;
32131a9c
RK
5849 }
5850
5851 if (oldequiv == 0)
5852 oldequiv = old;
5853 else if (GET_CODE (oldequiv) == REG)
5854 oldequiv_reg = oldequiv;
5855 else if (GET_CODE (oldequiv) == SUBREG)
5856 oldequiv_reg = SUBREG_REG (oldequiv);
5857
76182796
RK
5858 /* If we are reloading from a register that was recently stored in
5859 with an output-reload, see if we can prove there was
5860 actually no need to store the old value in it. */
5861
5862 if (optimize && GET_CODE (oldequiv) == REG
5863 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5864 && spill_reg_order[REGNO (oldequiv)] >= 0
c95c0732 5865 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
8aea655f 5866 && find_reg_note (insn, REG_DEAD, reload_in[j])
76182796 5867 /* This is unsafe if operand occurs more than once in current
b87b7ecd 5868 insn. Perhaps some occurrences weren't reloaded. */
c95c0732 5869 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
76182796
RK
5870 delete_output_reload
5871 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5872
32131a9c 5873 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
3abe6f90
RK
5874 then load RELOADREG from OLDEQUIV. Note that we cannot use
5875 gen_lowpart_common since it can do the wrong thing when
5876 RELOADREG has a multi-word mode. Note that RELOADREG
5877 must always be a REG here. */
32131a9c
RK
5878
5879 if (GET_MODE (reloadreg) != mode)
3abe6f90 5880 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
5881 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5882 oldequiv = SUBREG_REG (oldequiv);
5883 if (GET_MODE (oldequiv) != VOIDmode
5884 && mode != GET_MODE (oldequiv))
5885 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5886
546b63fb 5887 /* Switch to the right place to emit the reload insns. */
32131a9c
RK
5888 switch (reload_when_needed[j])
5889 {
32131a9c 5890 case RELOAD_OTHER:
546b63fb
RK
5891 where = &other_input_reload_insns;
5892 break;
5893 case RELOAD_FOR_INPUT:
5894 where = &input_reload_insns[reload_opnum[j]];
32131a9c 5895 break;
546b63fb
RK
5896 case RELOAD_FOR_INPUT_ADDRESS:
5897 where = &input_address_reload_insns[reload_opnum[j]];
32131a9c 5898 break;
546b63fb
RK
5899 case RELOAD_FOR_OUTPUT_ADDRESS:
5900 where = &output_address_reload_insns[reload_opnum[j]];
32131a9c
RK
5901 break;
5902 case RELOAD_FOR_OPERAND_ADDRESS:
546b63fb
RK
5903 where = &operand_reload_insns;
5904 break;
893bc853
RK
5905 case RELOAD_FOR_OPADDR_ADDR:
5906 where = &other_operand_reload_insns;
5907 break;
546b63fb
RK
5908 case RELOAD_FOR_OTHER_ADDRESS:
5909 where = &other_input_address_reload_insns;
5910 break;
5911 default:
5912 abort ();
32131a9c
RK
5913 }
5914
546b63fb 5915 push_to_sequence (*where);
32131a9c
RK
5916 special = 0;
5917
5918 /* Auto-increment addresses must be reloaded in a special way. */
5919 if (GET_CODE (oldequiv) == POST_INC
5920 || GET_CODE (oldequiv) == POST_DEC
5921 || GET_CODE (oldequiv) == PRE_INC
5922 || GET_CODE (oldequiv) == PRE_DEC)
5923 {
5924 /* We are not going to bother supporting the case where a
5925 incremented register can't be copied directly from
5926 OLDEQUIV since this seems highly unlikely. */
b80bba27 5927 if (reload_secondary_in_reload[j] >= 0)
32131a9c
RK
5928 abort ();
5929 /* Prevent normal processing of this reload. */
5930 special = 1;
5931 /* Output a special code sequence for this case. */
546b63fb 5932 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
32131a9c
RK
5933 }
5934
5935 /* If we are reloading a pseudo-register that was set by the previous
5936 insn, see if we can get rid of that pseudo-register entirely
5937 by redirecting the previous insn into our reload register. */
5938
5939 else if (optimize && GET_CODE (old) == REG
5940 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5941 && dead_or_set_p (insn, old)
5942 /* This is unsafe if some other reload
5943 uses the same reg first. */
546b63fb
RK
5944 && reload_reg_free_before_p (REGNO (reloadreg),
5945 reload_opnum[j],
5946 reload_when_needed[j]))
32131a9c
RK
5947 {
5948 rtx temp = PREV_INSN (insn);
5949 while (temp && GET_CODE (temp) == NOTE)
5950 temp = PREV_INSN (temp);
5951 if (temp
5952 && GET_CODE (temp) == INSN
5953 && GET_CODE (PATTERN (temp)) == SET
5954 && SET_DEST (PATTERN (temp)) == old
5955 /* Make sure we can access insn_operand_constraint. */
5956 && asm_noperands (PATTERN (temp)) < 0
5957 /* This is unsafe if prev insn rejects our reload reg. */
5958 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5959 reloadreg)
5960 /* This is unsafe if operand occurs more than once in current
5961 insn. Perhaps some occurrences aren't reloaded. */
5962 && count_occurrences (PATTERN (insn), old) == 1
5963 /* Don't risk splitting a matching pair of operands. */
5964 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5965 {
5966 /* Store into the reload register instead of the pseudo. */
5967 SET_DEST (PATTERN (temp)) = reloadreg;
5968 /* If these are the only uses of the pseudo reg,
5969 pretend for GDB it lives in the reload reg we used. */
5970 if (reg_n_deaths[REGNO (old)] == 1
5971 && reg_n_sets[REGNO (old)] == 1)
5972 {
5973 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5974 alter_reg (REGNO (old), -1);
5975 }
5976 special = 1;
5977 }
5978 }
5979
546b63fb
RK
5980 /* We can't do that, so output an insn to load RELOADREG. */
5981
32131a9c
RK
5982 if (! special)
5983 {
5984#ifdef SECONDARY_INPUT_RELOAD_CLASS
5985 rtx second_reload_reg = 0;
5986 enum insn_code icode;
5987
5988 /* If we have a secondary reload, pick up the secondary register
d445b551
RK
5989 and icode, if any. If OLDEQUIV and OLD are different or
5990 if this is an in-out reload, recompute whether or not we
5991 still need a secondary register and what the icode should
5992 be. If we still need a secondary register and the class or
5993 icode is different, go back to reloading from OLD if using
5994 OLDEQUIV means that we got the wrong type of register. We
5995 cannot have different class or icode due to an in-out reload
5996 because we don't make such reloads when both the input and
5997 output need secondary reload registers. */
32131a9c 5998
b80bba27 5999 if (reload_secondary_in_reload[j] >= 0)
32131a9c 6000 {
b80bba27 6001 int secondary_reload = reload_secondary_in_reload[j];
1554c2c6
RK
6002 rtx real_oldequiv = oldequiv;
6003 rtx real_old = old;
6004
6005 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6006 and similarly for OLD.
b80bba27 6007 See comments in get_secondary_reload in reload.c. */
1554c2c6
RK
6008 if (GET_CODE (oldequiv) == REG
6009 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6010 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6011 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6012
6013 if (GET_CODE (old) == REG
6014 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6015 && reg_equiv_mem[REGNO (old)] != 0)
6016 real_old = reg_equiv_mem[REGNO (old)];
6017
32131a9c 6018 second_reload_reg = reload_reg_rtx[secondary_reload];
b80bba27 6019 icode = reload_secondary_in_icode[j];
32131a9c 6020
d445b551
RK
6021 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6022 || (reload_in[j] != 0 && reload_out[j] != 0))
32131a9c
RK
6023 {
6024 enum reg_class new_class
6025 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
1554c2c6 6026 mode, real_oldequiv);
32131a9c
RK
6027
6028 if (new_class == NO_REGS)
6029 second_reload_reg = 0;
6030 else
6031 {
6032 enum insn_code new_icode;
6033 enum machine_mode new_mode;
6034
6035 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6036 REGNO (second_reload_reg)))
1554c2c6 6037 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6038 else
6039 {
6040 new_icode = reload_in_optab[(int) mode];
6041 if (new_icode != CODE_FOR_nothing
6042 && ((insn_operand_predicate[(int) new_icode][0]
a8fdc208 6043 && ! ((*insn_operand_predicate[(int) new_icode][0])
32131a9c 6044 (reloadreg, mode)))
a8fdc208
RS
6045 || (insn_operand_predicate[(int) new_icode][1]
6046 && ! ((*insn_operand_predicate[(int) new_icode][1])
1554c2c6 6047 (real_oldequiv, mode)))))
32131a9c
RK
6048 new_icode = CODE_FOR_nothing;
6049
6050 if (new_icode == CODE_FOR_nothing)
6051 new_mode = mode;
6052 else
196ddf8a 6053 new_mode = insn_operand_mode[(int) new_icode][2];
32131a9c
RK
6054
6055 if (GET_MODE (second_reload_reg) != new_mode)
6056 {
6057 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6058 new_mode))
1554c2c6 6059 oldequiv = old, real_oldequiv = real_old;
32131a9c
RK
6060 else
6061 second_reload_reg
3aaa90c7
MM
6062 = gen_rtx (REG, new_mode,
6063 REGNO (second_reload_reg));
32131a9c
RK
6064 }
6065 }
6066 }
6067 }
6068
6069 /* If we still need a secondary reload register, check
6070 to see if it is being used as a scratch or intermediate
1554c2c6
RK
6071 register and generate code appropriately. If we need
6072 a scratch register, use REAL_OLDEQUIV since the form of
6073 the insn may depend on the actual address if it is
6074 a MEM. */
32131a9c
RK
6075
6076 if (second_reload_reg)
6077 {
6078 if (icode != CODE_FOR_nothing)
6079 {
5e03c156
RK
6080 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6081 second_reload_reg));
32131a9c
RK
6082 special = 1;
6083 }
6084 else
6085 {
6086 /* See if we need a scratch register to load the
6087 intermediate register (a tertiary reload). */
6088 enum insn_code tertiary_icode
b80bba27 6089 = reload_secondary_in_icode[secondary_reload];
32131a9c
RK
6090
6091 if (tertiary_icode != CODE_FOR_nothing)
6092 {
6093 rtx third_reload_reg
b80bba27 6094 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
32131a9c 6095
546b63fb
RK
6096 emit_insn ((GEN_FCN (tertiary_icode)
6097 (second_reload_reg, real_oldequiv,
6098 third_reload_reg)));
32131a9c
RK
6099 }
6100 else
5e03c156
RK
6101 gen_reload (second_reload_reg, oldequiv,
6102 reload_opnum[j],
6103 reload_when_needed[j]);
546b63fb
RK
6104
6105 oldequiv = second_reload_reg;
32131a9c
RK
6106 }
6107 }
6108 }
6109#endif
6110
2d182c6f 6111 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
5e03c156
RK
6112 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6113 reload_when_needed[j]);
32131a9c
RK
6114
6115#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6116 /* We may have to make a REG_DEAD note for the secondary reload
6117 register in the insns we just made. Find the last insn that
6118 mentioned the register. */
6119 if (! special && second_reload_reg
6120 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6121 {
6122 rtx prev;
6123
546b63fb 6124 for (prev = get_last_insn (); prev;
32131a9c
RK
6125 prev = PREV_INSN (prev))
6126 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
bfa30b22
RK
6127 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6128 PATTERN (prev)))
32131a9c
RK
6129 {
6130 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6131 second_reload_reg,
6132 REG_NOTES (prev));
6133 break;
6134 }
6135 }
6136#endif
6137 }
6138
546b63fb
RK
6139 /* End this sequence. */
6140 *where = get_insns ();
6141 end_sequence ();
32131a9c
RK
6142 }
6143
6144 /* Add a note saying the input reload reg
6145 dies in this insn, if anyone cares. */
6146#ifdef PRESERVE_DEATH_INFO_REGNO_P
6147 if (old != 0
6148 && reload_reg_rtx[j] != old
6149 && reload_reg_rtx[j] != 0
6150 && reload_out[j] == 0
6151 && ! reload_inherited[j]
6152 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6153 {
6154 register rtx reloadreg = reload_reg_rtx[j];
6155
a8fdc208 6156#if 0
32131a9c
RK
6157 /* We can't abort here because we need to support this for sched.c.
6158 It's not terrible to miss a REG_DEAD note, but we should try
6159 to figure out how to do this correctly. */
6160 /* The code below is incorrect for address-only reloads. */
6161 if (reload_when_needed[j] != RELOAD_OTHER
6162 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6163 abort ();
6164#endif
6165
6166 /* Add a death note to this insn, for an input reload. */
6167
6168 if ((reload_when_needed[j] == RELOAD_OTHER
6169 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6170 && ! dead_or_set_p (insn, reloadreg))
6171 REG_NOTES (insn)
6172 = gen_rtx (EXPR_LIST, REG_DEAD,
6173 reloadreg, REG_NOTES (insn));
6174 }
6175
6176 /* When we inherit a reload, the last marked death of the reload reg
6177 may no longer really be a death. */
6178 if (reload_reg_rtx[j] != 0
6179 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6180 && reload_inherited[j])
6181 {
6182 /* Handle inheriting an output reload.
6183 Remove the death note from the output reload insn. */
6184 if (reload_spill_index[j] >= 0
6185 && GET_CODE (reload_in[j]) == REG
6186 && spill_reg_store[reload_spill_index[j]] != 0
6187 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6188 REG_DEAD, REGNO (reload_reg_rtx[j])))
6189 remove_death (REGNO (reload_reg_rtx[j]),
6190 spill_reg_store[reload_spill_index[j]]);
6191 /* Likewise for input reloads that were inherited. */
6192 else if (reload_spill_index[j] >= 0
6193 && GET_CODE (reload_in[j]) == REG
6194 && spill_reg_store[reload_spill_index[j]] == 0
6195 && reload_inheritance_insn[j] != 0
a8fdc208 6196 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
32131a9c
RK
6197 REGNO (reload_reg_rtx[j])))
6198 remove_death (REGNO (reload_reg_rtx[j]),
6199 reload_inheritance_insn[j]);
6200 else
6201 {
6202 rtx prev;
6203
6204 /* We got this register from find_equiv_reg.
6205 Search back for its last death note and get rid of it.
6206 But don't search back too far.
6207 Don't go past a place where this reg is set,
6208 since a death note before that remains valid. */
6209 for (prev = PREV_INSN (insn);
6210 prev && GET_CODE (prev) != CODE_LABEL;
6211 prev = PREV_INSN (prev))
6212 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6213 && dead_or_set_p (prev, reload_reg_rtx[j]))
6214 {
6215 if (find_regno_note (prev, REG_DEAD,
6216 REGNO (reload_reg_rtx[j])))
6217 remove_death (REGNO (reload_reg_rtx[j]), prev);
6218 break;
6219 }
6220 }
6221 }
6222
6223 /* We might have used find_equiv_reg above to choose an alternate
6224 place from which to reload. If so, and it died, we need to remove
6225 that death and move it to one of the insns we just made. */
6226
6227 if (oldequiv_reg != 0
6228 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6229 {
6230 rtx prev, prev1;
6231
6232 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6233 prev = PREV_INSN (prev))
6234 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6235 && dead_or_set_p (prev, oldequiv_reg))
6236 {
6237 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6238 {
6239 for (prev1 = this_reload_insn;
6240 prev1; prev1 = PREV_INSN (prev1))
6241 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
bfa30b22
RK
6242 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6243 PATTERN (prev1)))
32131a9c
RK
6244 {
6245 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6246 oldequiv_reg,
6247 REG_NOTES (prev1));
6248 break;
6249 }
6250 remove_death (REGNO (oldequiv_reg), prev);
6251 }
6252 break;
6253 }
6254 }
6255#endif
6256
6257 /* If we are reloading a register that was recently stored in with an
6258 output-reload, see if we can prove there was
6259 actually no need to store the old value in it. */
6260
6261 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
546b63fb 6262 && reload_in[j] != 0
32131a9c
RK
6263 && GET_CODE (reload_in[j]) == REG
6264#if 0
6265 /* There doesn't seem to be any reason to restrict this to pseudos
6266 and doing so loses in the case where we are copying from a
6267 register of the wrong class. */
6268 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6269#endif
6270 && spill_reg_store[reload_spill_index[j]] != 0
546b63fb
RK
6271 /* This is unsafe if some other reload uses the same reg first. */
6272 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6273 reload_opnum[j], reload_when_needed[j])
32131a9c
RK
6274 && dead_or_set_p (insn, reload_in[j])
6275 /* This is unsafe if operand occurs more than once in current
6276 insn. Perhaps some occurrences weren't reloaded. */
6277 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6278 delete_output_reload (insn, j,
6279 spill_reg_store[reload_spill_index[j]]);
6280
6281 /* Input-reloading is done. Now do output-reloading,
6282 storing the value from the reload-register after the main insn
6283 if reload_out[j] is nonzero.
6284
6285 ??? At some point we need to support handling output reloads of
6286 JUMP_INSNs or insns that set cc0. */
6287 old = reload_out[j];
6288 if (old != 0
6289 && reload_reg_rtx[j] != old
6290 && reload_reg_rtx[j] != 0)
6291 {
6292 register rtx reloadreg = reload_reg_rtx[j];
6293 register rtx second_reloadreg = 0;
32131a9c
RK
6294 rtx note, p;
6295 enum machine_mode mode;
6296 int special = 0;
6297
6298 /* An output operand that dies right away does need a reload,
6299 but need not be copied from it. Show the new location in the
6300 REG_UNUSED note. */
6301 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6302 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6303 {
6304 XEXP (note, 0) = reload_reg_rtx[j];
6305 continue;
6306 }
a7911cd2
RK
6307 /* Likewise for a SUBREG of an operand that dies. */
6308 else if (GET_CODE (old) == SUBREG
6309 && GET_CODE (SUBREG_REG (old)) == REG
6310 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6311 SUBREG_REG (old))))
6312 {
6313 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6314 reload_reg_rtx[j]);
6315 continue;
6316 }
32131a9c
RK
6317 else if (GET_CODE (old) == SCRATCH)
6318 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6319 but we don't want to make an output reload. */
6320 continue;
6321
6322#if 0
6323 /* Strip off of OLD any size-increasing SUBREGs such as
6324 (SUBREG:SI foo:QI 0). */
6325
6326 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6327 && (GET_MODE_SIZE (GET_MODE (old))
6328 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6329 old = SUBREG_REG (old);
6330#endif
6331
6332 /* If is a JUMP_INSN, we can't support output reloads yet. */
6333 if (GET_CODE (insn) == JUMP_INSN)
6334 abort ();
6335
d7e0324f 6336 if (reload_when_needed[j] == RELOAD_OTHER)
5ca582cf 6337 start_sequence ();
d7e0324f
RK
6338 else
6339 push_to_sequence (output_reload_insns[reload_opnum[j]]);
546b63fb 6340
32131a9c
RK
6341 /* Determine the mode to reload in.
6342 See comments above (for input reloading). */
6343
6344 mode = GET_MODE (old);
6345 if (mode == VOIDmode)
79a365a7
RS
6346 {
6347 /* VOIDmode should never happen for an output. */
6348 if (asm_noperands (PATTERN (insn)) < 0)
6349 /* It's the compiler's fault. */
a89b2cc4 6350 fatal_insn ("VOIDmode on an output", insn);
79a365a7
RS
6351 error_for_asm (insn, "output operand is constant in `asm'");
6352 /* Prevent crash--use something we know is valid. */
6353 mode = word_mode;
6354 old = gen_rtx (REG, mode, REGNO (reloadreg));
6355 }
32131a9c 6356
32131a9c 6357 if (GET_MODE (reloadreg) != mode)
3abe6f90 6358 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
32131a9c
RK
6359
6360#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6361
6362 /* If we need two reload regs, set RELOADREG to the intermediate
5e03c156 6363 one, since it will be stored into OLD. We might need a secondary
32131a9c
RK
6364 register only for an input reload, so check again here. */
6365
b80bba27 6366 if (reload_secondary_out_reload[j] >= 0)
32131a9c 6367 {
1554c2c6 6368 rtx real_old = old;
32131a9c 6369
1554c2c6
RK
6370 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6371 && reg_equiv_mem[REGNO (old)] != 0)
6372 real_old = reg_equiv_mem[REGNO (old)];
32131a9c 6373
1554c2c6
RK
6374 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6375 mode, real_old)
6376 != NO_REGS))
6377 {
6378 second_reloadreg = reloadreg;
b80bba27 6379 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
32131a9c 6380
1554c2c6
RK
6381 /* See if RELOADREG is to be used as a scratch register
6382 or as an intermediate register. */
b80bba27 6383 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
32131a9c 6384 {
b80bba27 6385 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
546b63fb 6386 (real_old, second_reloadreg, reloadreg)));
1554c2c6 6387 special = 1;
32131a9c
RK
6388 }
6389 else
1554c2c6
RK
6390 {
6391 /* See if we need both a scratch and intermediate reload
6392 register. */
5e03c156 6393
b80bba27 6394 int secondary_reload = reload_secondary_out_reload[j];
1554c2c6 6395 enum insn_code tertiary_icode
b80bba27 6396 = reload_secondary_out_icode[secondary_reload];
32131a9c 6397
1554c2c6
RK
6398 if (GET_MODE (reloadreg) != mode)
6399 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6400
6401 if (tertiary_icode != CODE_FOR_nothing)
6402 {
6403 rtx third_reloadreg
b80bba27 6404 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
a7911cd2 6405 rtx tem;
5e03c156
RK
6406
6407 /* Copy primary reload reg to secondary reload reg.
6408 (Note that these have been swapped above, then
6409 secondary reload reg to OLD using our insn. */
6410
a7911cd2
RK
6411 /* If REAL_OLD is a paradoxical SUBREG, remove it
6412 and try to put the opposite SUBREG on
6413 RELOADREG. */
6414 if (GET_CODE (real_old) == SUBREG
6415 && (GET_MODE_SIZE (GET_MODE (real_old))
6416 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6417 && 0 != (tem = gen_lowpart_common
6418 (GET_MODE (SUBREG_REG (real_old)),
6419 reloadreg)))
6420 real_old = SUBREG_REG (real_old), reloadreg = tem;
6421
5e03c156
RK
6422 gen_reload (reloadreg, second_reloadreg,
6423 reload_opnum[j], reload_when_needed[j]);
6424 emit_insn ((GEN_FCN (tertiary_icode)
6425 (real_old, reloadreg, third_reloadreg)));
6426 special = 1;
9ad5f9f6 6427 }
5e03c156 6428
1554c2c6 6429 else
5e03c156
RK
6430 /* Copy between the reload regs here and then to
6431 OUT later. */
1554c2c6 6432
5e03c156
RK
6433 gen_reload (reloadreg, second_reloadreg,
6434 reload_opnum[j], reload_when_needed[j]);
1554c2c6 6435 }
32131a9c
RK
6436 }
6437 }
6438#endif
6439
6440 /* Output the last reload insn. */
6441 if (! special)
5e03c156
RK
6442 gen_reload (old, reloadreg, reload_opnum[j],
6443 reload_when_needed[j]);
32131a9c
RK
6444
6445#ifdef PRESERVE_DEATH_INFO_REGNO_P
6446 /* If final will look at death notes for this reg,
6447 put one on the last output-reload insn to use it. Similarly
6448 for any secondary register. */
6449 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
546b63fb 6450 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6451 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6452 && reg_overlap_mentioned_for_reload_p (reloadreg,
6453 PATTERN (p)))
32131a9c
RK
6454 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6455 reloadreg, REG_NOTES (p));
6456
6457#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6458 if (! special
6459 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
546b63fb 6460 for (p = get_last_insn (); p; p = PREV_INSN (p))
32131a9c 6461 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
bfa30b22
RK
6462 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6463 PATTERN (p)))
32131a9c
RK
6464 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6465 second_reloadreg, REG_NOTES (p));
6466#endif
6467#endif
6468 /* Look at all insns we emitted, just to be safe. */
546b63fb 6469 for (p = get_insns (); p; p = NEXT_INSN (p))
32131a9c
RK
6470 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6471 {
6472 /* If this output reload doesn't come from a spill reg,
6473 clear any memory of reloaded copies of the pseudo reg.
6474 If this output reload comes from a spill reg,
6475 reg_has_output_reload will make this do nothing. */
6476 note_stores (PATTERN (p), forget_old_reloads_1);
6477
73b2ad9e
RK
6478 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6479 && reload_spill_index[j] >= 0)
6480 new_spill_reg_store[reload_spill_index[j]] = p;
32131a9c
RK
6481 }
6482
d7e0324f 6483 if (reload_when_needed[j] == RELOAD_OTHER)
5ca582cf
RK
6484 {
6485 if (other_output_reload_insns)
6486 emit_insns (other_output_reload_insns);
6487 other_output_reload_insns = get_insns ();
6488 }
d7e0324f
RK
6489 else
6490 output_reload_insns[reload_opnum[j]] = get_insns ();
6491
546b63fb 6492 end_sequence ();
32131a9c 6493 }
32131a9c
RK
6494 }
6495
546b63fb
RK
6496 /* Now write all the insns we made for reloads in the order expected by
6497 the allocation functions. Prior to the insn being reloaded, we write
6498 the following reloads:
6499
6500 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6501
5ca582cf 6502 RELOAD_OTHER reloads, output in ascending order by reload number.
546b63fb
RK
6503
6504 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6505 the RELOAD_FOR_INPUT reload for the operand.
6506
893bc853
RK
6507 RELOAD_FOR_OPADDR_ADDRS reloads.
6508
546b63fb
RK
6509 RELOAD_FOR_OPERAND_ADDRESS reloads.
6510
6511 After the insn being reloaded, we write the following:
6512
6513 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
d7e0324f
RK
6514 the RELOAD_FOR_OUTPUT reload for that operand.
6515
5ca582cf
RK
6516 Any RELOAD_OTHER output reloads, output in descending order by
6517 reload number. */
546b63fb
RK
6518
6519 emit_insns_before (other_input_address_reload_insns, before_insn);
6520 emit_insns_before (other_input_reload_insns, before_insn);
6521
6522 for (j = 0; j < reload_n_operands; j++)
6523 {
6524 emit_insns_before (input_address_reload_insns[j], before_insn);
6525 emit_insns_before (input_reload_insns[j], before_insn);
6526 }
6527
893bc853 6528 emit_insns_before (other_operand_reload_insns, before_insn);
546b63fb
RK
6529 emit_insns_before (operand_reload_insns, before_insn);
6530
6531 for (j = 0; j < reload_n_operands; j++)
6532 {
6533 emit_insns_before (output_address_reload_insns[j], following_insn);
6534 emit_insns_before (output_reload_insns[j], following_insn);
6535 }
6536
d7e0324f
RK
6537 emit_insns_before (other_output_reload_insns, following_insn);
6538
32131a9c
RK
6539 /* Move death notes from INSN
6540 to output-operand-address and output reload insns. */
6541#ifdef PRESERVE_DEATH_INFO_REGNO_P
6542 {
6543 rtx insn1;
6544 /* Loop over those insns, last ones first. */
6545 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6546 insn1 = PREV_INSN (insn1))
6547 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6548 {
6549 rtx source = SET_SRC (PATTERN (insn1));
6550 rtx dest = SET_DEST (PATTERN (insn1));
6551
6552 /* The note we will examine next. */
6553 rtx reg_notes = REG_NOTES (insn);
6554 /* The place that pointed to this note. */
6555 rtx *prev_reg_note = &REG_NOTES (insn);
6556
6557 /* If the note is for something used in the source of this
6558 reload insn, or in the output address, move the note. */
6559 while (reg_notes)
6560 {
6561 rtx next_reg_notes = XEXP (reg_notes, 1);
6562 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6563 && GET_CODE (XEXP (reg_notes, 0)) == REG
6564 && ((GET_CODE (dest) != REG
bfa30b22
RK
6565 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6566 dest))
6567 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6568 source)))
32131a9c
RK
6569 {
6570 *prev_reg_note = next_reg_notes;
6571 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6572 REG_NOTES (insn1) = reg_notes;
6573 }
6574 else
6575 prev_reg_note = &XEXP (reg_notes, 1);
6576
6577 reg_notes = next_reg_notes;
6578 }
6579 }
6580 }
6581#endif
6582
6583 /* For all the spill regs newly reloaded in this instruction,
6584 record what they were reloaded from, so subsequent instructions
d445b551
RK
6585 can inherit the reloads.
6586
6587 Update spill_reg_store for the reloads of this insn.
e9e79d69 6588 Copy the elements that were updated in the loop above. */
32131a9c
RK
6589
6590 for (j = 0; j < n_reloads; j++)
6591 {
6592 register int r = reload_order[j];
6593 register int i = reload_spill_index[r];
6594
6595 /* I is nonneg if this reload used one of the spill regs.
6596 If reload_reg_rtx[r] is 0, this is an optional reload
51f0c3b7 6597 that we opted to ignore. */
d445b551 6598
51f0c3b7 6599 if (i >= 0 && reload_reg_rtx[r] != 0)
32131a9c 6600 {
32131a9c
RK
6601 int nr
6602 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6603 int k;
51f0c3b7
JW
6604 int part_reaches_end = 0;
6605 int all_reaches_end = 1;
32131a9c 6606
51f0c3b7
JW
6607 /* For a multi register reload, we need to check if all or part
6608 of the value lives to the end. */
32131a9c
RK
6609 for (k = 0; k < nr; k++)
6610 {
51f0c3b7
JW
6611 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6612 reload_when_needed[r]))
6613 part_reaches_end = 1;
6614 else
6615 all_reaches_end = 0;
32131a9c
RK
6616 }
6617
51f0c3b7
JW
6618 /* Ignore reloads that don't reach the end of the insn in
6619 entirety. */
6620 if (all_reaches_end)
32131a9c 6621 {
51f0c3b7
JW
6622 /* First, clear out memory of what used to be in this spill reg.
6623 If consecutive registers are used, clear them all. */
d08ea79f 6624
32131a9c
RK
6625 for (k = 0; k < nr; k++)
6626 {
51f0c3b7
JW
6627 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6628 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
32131a9c 6629 }
d08ea79f 6630
51f0c3b7
JW
6631 /* Maybe the spill reg contains a copy of reload_out. */
6632 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6633 {
6634 register int nregno = REGNO (reload_out[r]);
6635 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6636 : HARD_REGNO_NREGS (nregno,
6637 GET_MODE (reload_reg_rtx[r])));
6638
6639 spill_reg_store[i] = new_spill_reg_store[i];
6640 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6641
6642 /* If NREGNO is a hard register, it may occupy more than
6643 one register. If it does, say what is in the
6644 rest of the registers assuming that both registers
6645 agree on how many words the object takes. If not,
6646 invalidate the subsequent registers. */
6647
6648 if (nregno < FIRST_PSEUDO_REGISTER)
6649 for (k = 1; k < nnr; k++)
6650 reg_last_reload_reg[nregno + k]
6651 = (nr == nnr
6652 ? gen_rtx (REG,
6653 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6654 REGNO (reload_reg_rtx[r]) + k)
6655 : 0);
6656
6657 /* Now do the inverse operation. */
6658 for (k = 0; k < nr; k++)
6659 {
6660 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6661 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6662 ? nregno
6663 : nregno + k);
6664 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6665 }
6666 }
d08ea79f 6667
51f0c3b7
JW
6668 /* Maybe the spill reg contains a copy of reload_in. Only do
6669 something if there will not be an output reload for
6670 the register being reloaded. */
6671 else if (reload_out[r] == 0
6672 && reload_in[r] != 0
6673 && ((GET_CODE (reload_in[r]) == REG
6674 && ! reg_has_output_reload[REGNO (reload_in[r])])
6675 || (GET_CODE (reload_in_reg[r]) == REG
6676 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6677 {
6678 register int nregno;
6679 int nnr;
d445b551 6680
51f0c3b7
JW
6681 if (GET_CODE (reload_in[r]) == REG)
6682 nregno = REGNO (reload_in[r]);
6683 else
6684 nregno = REGNO (reload_in_reg[r]);
d08ea79f 6685
51f0c3b7
JW
6686 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6687 : HARD_REGNO_NREGS (nregno,
6688 GET_MODE (reload_reg_rtx[r])));
6689
6690 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6691
6692 if (nregno < FIRST_PSEUDO_REGISTER)
6693 for (k = 1; k < nnr; k++)
6694 reg_last_reload_reg[nregno + k]
6695 = (nr == nnr
6696 ? gen_rtx (REG,
6697 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6698 REGNO (reload_reg_rtx[r]) + k)
6699 : 0);
6700
6701 /* Unless we inherited this reload, show we haven't
6702 recently done a store. */
6703 if (! reload_inherited[r])
6704 spill_reg_store[i] = 0;
6705
6706 for (k = 0; k < nr; k++)
6707 {
6708 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6709 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6710 ? nregno
6711 : nregno + k);
6712 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6713 = insn;
6714 }
6715 }
6716 }
d445b551 6717
51f0c3b7
JW
6718 /* However, if part of the reload reaches the end, then we must
6719 invalidate the old info for the part that survives to the end. */
6720 else if (part_reaches_end)
6721 {
546b63fb 6722 for (k = 0; k < nr; k++)
51f0c3b7
JW
6723 if (reload_reg_reaches_end_p (spill_regs[i] + k,
6724 reload_opnum[r],
6725 reload_when_needed[r]))
6726 {
6727 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6728 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6729 }
32131a9c
RK
6730 }
6731 }
6732
6733 /* The following if-statement was #if 0'd in 1.34 (or before...).
6734 It's reenabled in 1.35 because supposedly nothing else
6735 deals with this problem. */
6736
6737 /* If a register gets output-reloaded from a non-spill register,
6738 that invalidates any previous reloaded copy of it.
6739 But forget_old_reloads_1 won't get to see it, because
6740 it thinks only about the original insn. So invalidate it here. */
6741 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6742 {
6743 register int nregno = REGNO (reload_out[r]);
c7093272
RK
6744 if (nregno >= FIRST_PSEUDO_REGISTER)
6745 reg_last_reload_reg[nregno] = 0;
6746 else
6747 {
6748 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
36281332 6749
c7093272
RK
6750 while (num_regs-- > 0)
6751 reg_last_reload_reg[nregno + num_regs] = 0;
6752 }
32131a9c
RK
6753 }
6754 }
6755}
6756\f
5e03c156
RK
6757/* Emit code to perform a reload from IN (which may be a reload register) to
6758 OUT (which may also be a reload register). IN or OUT is from operand
6759 OPNUM with reload type TYPE.
546b63fb 6760
3c3eeea6 6761 Returns first insn emitted. */
32131a9c
RK
6762
6763rtx
5e03c156
RK
6764gen_reload (out, in, opnum, type)
6765 rtx out;
32131a9c 6766 rtx in;
546b63fb
RK
6767 int opnum;
6768 enum reload_type type;
32131a9c 6769{
546b63fb 6770 rtx last = get_last_insn ();
7a5b18b0
RK
6771 rtx tem;
6772
6773 /* If IN is a paradoxical SUBREG, remove it and try to put the
6774 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6775 if (GET_CODE (in) == SUBREG
6776 && (GET_MODE_SIZE (GET_MODE (in))
6777 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6778 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6779 in = SUBREG_REG (in), out = tem;
6780 else if (GET_CODE (out) == SUBREG
6781 && (GET_MODE_SIZE (GET_MODE (out))
6782 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6783 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6784 out = SUBREG_REG (out), in = tem;
32131a9c 6785
a8fdc208 6786 /* How to do this reload can get quite tricky. Normally, we are being
32131a9c
RK
6787 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6788 register that didn't get a hard register. In that case we can just
6789 call emit_move_insn.
6790
a7fd196c
JW
6791 We can also be asked to reload a PLUS that adds a register or a MEM to
6792 another register, constant or MEM. This can occur during frame pointer
6793 elimination and while reloading addresses. This case is handled by
6794 trying to emit a single insn to perform the add. If it is not valid,
6795 we use a two insn sequence.
32131a9c
RK
6796
6797 Finally, we could be called to handle an 'o' constraint by putting
6798 an address into a register. In that case, we first try to do this
6799 with a named pattern of "reload_load_address". If no such pattern
6800 exists, we just emit a SET insn and hope for the best (it will normally
6801 be valid on machines that use 'o').
6802
6803 This entire process is made complex because reload will never
6804 process the insns we generate here and so we must ensure that
6805 they will fit their constraints and also by the fact that parts of
6806 IN might be being reloaded separately and replaced with spill registers.
6807 Because of this, we are, in some sense, just guessing the right approach
6808 here. The one listed above seems to work.
6809
6810 ??? At some point, this whole thing needs to be rethought. */
6811
6812 if (GET_CODE (in) == PLUS
a7fd196c
JW
6813 && (GET_CODE (XEXP (in, 0)) == REG
6814 || GET_CODE (XEXP (in, 0)) == MEM)
6815 && (GET_CODE (XEXP (in, 1)) == REG
6816 || CONSTANT_P (XEXP (in, 1))
6817 || GET_CODE (XEXP (in, 1)) == MEM))
32131a9c 6818 {
a7fd196c
JW
6819 /* We need to compute the sum of a register or a MEM and another
6820 register, constant, or MEM, and put it into the reload
3002e160
JW
6821 register. The best possible way of doing this is if the machine
6822 has a three-operand ADD insn that accepts the required operands.
32131a9c
RK
6823
6824 The simplest approach is to try to generate such an insn and see if it
6825 is recognized and matches its constraints. If so, it can be used.
6826
6827 It might be better not to actually emit the insn unless it is valid,
0009eff2 6828 but we need to pass the insn as an operand to `recog' and
b36d7dd7 6829 `insn_extract' and it is simpler to emit and then delete the insn if
0009eff2 6830 not valid than to dummy things up. */
a8fdc208 6831
af929c62 6832 rtx op0, op1, tem, insn;
32131a9c 6833 int code;
a8fdc208 6834
af929c62
RK
6835 op0 = find_replacement (&XEXP (in, 0));
6836 op1 = find_replacement (&XEXP (in, 1));
6837
32131a9c
RK
6838 /* Since constraint checking is strict, commutativity won't be
6839 checked, so we need to do that here to avoid spurious failure
6840 if the add instruction is two-address and the second operand
6841 of the add is the same as the reload reg, which is frequently
6842 the case. If the insn would be A = B + A, rearrange it so
0f41302f 6843 it will be A = A + B as constrain_operands expects. */
a8fdc208 6844
32131a9c 6845 if (GET_CODE (XEXP (in, 1)) == REG
5e03c156 6846 && REGNO (out) == REGNO (XEXP (in, 1)))
af929c62
RK
6847 tem = op0, op0 = op1, op1 = tem;
6848
6849 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6850 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
32131a9c 6851
5e03c156 6852 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
32131a9c
RK
6853 code = recog_memoized (insn);
6854
6855 if (code >= 0)
6856 {
6857 insn_extract (insn);
6858 /* We want constrain operands to treat this insn strictly in
6859 its validity determination, i.e., the way it would after reload
6860 has completed. */
6861 if (constrain_operands (code, 1))
6862 return insn;
6863 }
6864
546b63fb 6865 delete_insns_since (last);
32131a9c
RK
6866
6867 /* If that failed, we must use a conservative two-insn sequence.
6868 use move to copy constant, MEM, or pseudo register to the reload
af929c62
RK
6869 register since "move" will be able to handle an arbitrary operand,
6870 unlike add which can't, in general. Then add the registers.
32131a9c
RK
6871
6872 If there is another way to do this for a specific machine, a
6873 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6874 we emit below. */
6875
af929c62
RK
6876 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6877 || (GET_CODE (op1) == REG
6878 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6879 tem = op0, op0 = op1, op1 = tem;
32131a9c 6880
5e03c156 6881 emit_insn (gen_move_insn (out, op0));
39b56c2a 6882
5e03c156 6883 /* If OP0 and OP1 are the same, we can use OUT for OP1.
39b56c2a
RK
6884 This fixes a problem on the 32K where the stack pointer cannot
6885 be used as an operand of an add insn. */
6886
6887 if (rtx_equal_p (op0, op1))
5e03c156 6888 op1 = out;
39b56c2a 6889
5e03c156 6890 insn = emit_insn (gen_add2_insn (out, op1));
c77c9766
RK
6891
6892 /* If that failed, copy the address register to the reload register.
0f41302f 6893 Then add the constant to the reload register. */
c77c9766
RK
6894
6895 code = recog_memoized (insn);
6896
6897 if (code >= 0)
6898 {
6899 insn_extract (insn);
6900 /* We want constrain operands to treat this insn strictly in
6901 its validity determination, i.e., the way it would after reload
6902 has completed. */
6903 if (constrain_operands (code, 1))
6904 return insn;
6905 }
6906
6907 delete_insns_since (last);
6908
5e03c156
RK
6909 emit_insn (gen_move_insn (out, op1));
6910 emit_insn (gen_add2_insn (out, op0));
32131a9c
RK
6911 }
6912
0dadecf6
RK
6913#ifdef SECONDARY_MEMORY_NEEDED
6914 /* If we need a memory location to do the move, do it that way. */
6915 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
5e03c156 6916 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
0dadecf6 6917 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
5e03c156
RK
6918 REGNO_REG_CLASS (REGNO (out)),
6919 GET_MODE (out)))
0dadecf6
RK
6920 {
6921 /* Get the memory to use and rewrite both registers to its mode. */
5e03c156 6922 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
0dadecf6 6923
5e03c156
RK
6924 if (GET_MODE (loc) != GET_MODE (out))
6925 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
0dadecf6
RK
6926
6927 if (GET_MODE (loc) != GET_MODE (in))
6928 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6929
546b63fb 6930 emit_insn (gen_move_insn (loc, in));
5e03c156 6931 emit_insn (gen_move_insn (out, loc));
0dadecf6
RK
6932 }
6933#endif
6934
32131a9c
RK
6935 /* If IN is a simple operand, use gen_move_insn. */
6936 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
5e03c156 6937 emit_insn (gen_move_insn (out, in));
32131a9c
RK
6938
6939#ifdef HAVE_reload_load_address
6940 else if (HAVE_reload_load_address)
5e03c156 6941 emit_insn (gen_reload_load_address (out, in));
32131a9c
RK
6942#endif
6943
5e03c156 6944 /* Otherwise, just write (set OUT IN) and hope for the best. */
32131a9c 6945 else
5e03c156 6946 emit_insn (gen_rtx (SET, VOIDmode, out, in));
32131a9c
RK
6947
6948 /* Return the first insn emitted.
546b63fb 6949 We can not just return get_last_insn, because there may have
32131a9c
RK
6950 been multiple instructions emitted. Also note that gen_move_insn may
6951 emit more than one insn itself, so we can not assume that there is one
6952 insn emitted per emit_insn_before call. */
6953
546b63fb 6954 return last ? NEXT_INSN (last) : get_insns ();
32131a9c
RK
6955}
6956\f
6957/* Delete a previously made output-reload
6958 whose result we now believe is not needed.
6959 First we double-check.
6960
6961 INSN is the insn now being processed.
6962 OUTPUT_RELOAD_INSN is the insn of the output reload.
6963 J is the reload-number for this insn. */
6964
6965static void
6966delete_output_reload (insn, j, output_reload_insn)
6967 rtx insn;
6968 int j;
6969 rtx output_reload_insn;
6970{
6971 register rtx i1;
6972
6973 /* Get the raw pseudo-register referred to. */
6974
6975 rtx reg = reload_in[j];
6976 while (GET_CODE (reg) == SUBREG)
6977 reg = SUBREG_REG (reg);
6978
6979 /* If the pseudo-reg we are reloading is no longer referenced
6980 anywhere between the store into it and here,
6981 and no jumps or labels intervene, then the value can get
6982 here through the reload reg alone.
6983 Otherwise, give up--return. */
6984 for (i1 = NEXT_INSN (output_reload_insn);
6985 i1 != insn; i1 = NEXT_INSN (i1))
6986 {
6987 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6988 return;
6989 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6990 && reg_mentioned_p (reg, PATTERN (i1)))
6991 return;
6992 }
6993
208dffa5
RS
6994 if (cannot_omit_stores[REGNO (reg)])
6995 return;
6996
32131a9c
RK
6997 /* If this insn will store in the pseudo again,
6998 the previous store can be removed. */
6999 if (reload_out[j] == reload_in[j])
7000 delete_insn (output_reload_insn);
7001
7002 /* See if the pseudo reg has been completely replaced
7003 with reload regs. If so, delete the store insn
7004 and forget we had a stack slot for the pseudo. */
7005 else if (reg_n_deaths[REGNO (reg)] == 1
7006 && reg_basic_block[REGNO (reg)] >= 0
7007 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7008 {
7009 rtx i2;
7010
7011 /* We know that it was used only between here
7012 and the beginning of the current basic block.
7013 (We also know that the last use before INSN was
7014 the output reload we are thinking of deleting, but never mind that.)
7015 Search that range; see if any ref remains. */
7016 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7017 {
d445b551
RK
7018 rtx set = single_set (i2);
7019
32131a9c
RK
7020 /* Uses which just store in the pseudo don't count,
7021 since if they are the only uses, they are dead. */
d445b551 7022 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
7023 continue;
7024 if (GET_CODE (i2) == CODE_LABEL
7025 || GET_CODE (i2) == JUMP_INSN)
7026 break;
7027 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7028 && reg_mentioned_p (reg, PATTERN (i2)))
7029 /* Some other ref remains;
7030 we can't do anything. */
7031 return;
7032 }
7033
7034 /* Delete the now-dead stores into this pseudo. */
7035 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7036 {
d445b551
RK
7037 rtx set = single_set (i2);
7038
7039 if (set != 0 && SET_DEST (set) == reg)
32131a9c
RK
7040 delete_insn (i2);
7041 if (GET_CODE (i2) == CODE_LABEL
7042 || GET_CODE (i2) == JUMP_INSN)
7043 break;
7044 }
7045
7046 /* For the debugging info,
7047 say the pseudo lives in this reload reg. */
7048 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7049 alter_reg (REGNO (reg), -1);
7050 }
7051}
32131a9c 7052\f
a8fdc208 7053/* Output reload-insns to reload VALUE into RELOADREG.
858a47b1 7054 VALUE is an autoincrement or autodecrement RTX whose operand
32131a9c
RK
7055 is a register or memory location;
7056 so reloading involves incrementing that location.
7057
7058 INC_AMOUNT is the number to increment or decrement by (always positive).
546b63fb 7059 This cannot be deduced from VALUE. */
32131a9c 7060
546b63fb
RK
7061static void
7062inc_for_reload (reloadreg, value, inc_amount)
32131a9c
RK
7063 rtx reloadreg;
7064 rtx value;
7065 int inc_amount;
32131a9c
RK
7066{
7067 /* REG or MEM to be copied and incremented. */
7068 rtx incloc = XEXP (value, 0);
7069 /* Nonzero if increment after copying. */
7070 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
546b63fb 7071 rtx last;
0009eff2
RK
7072 rtx inc;
7073 rtx add_insn;
7074 int code;
32131a9c
RK
7075
7076 /* No hard register is equivalent to this register after
7077 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7078 we could inc/dec that register as well (maybe even using it for
7079 the source), but I'm not sure it's worth worrying about. */
7080 if (GET_CODE (incloc) == REG)
7081 reg_last_reload_reg[REGNO (incloc)] = 0;
7082
7083 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7084 inc_amount = - inc_amount;
7085
fb3821f7 7086 inc = GEN_INT (inc_amount);
0009eff2
RK
7087
7088 /* If this is post-increment, first copy the location to the reload reg. */
7089 if (post)
546b63fb 7090 emit_insn (gen_move_insn (reloadreg, incloc));
0009eff2
RK
7091
7092 /* See if we can directly increment INCLOC. Use a method similar to that
5e03c156 7093 in gen_reload. */
0009eff2 7094
546b63fb
RK
7095 last = get_last_insn ();
7096 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7097 gen_rtx (PLUS, GET_MODE (incloc),
7098 incloc, inc)));
0009eff2
RK
7099
7100 code = recog_memoized (add_insn);
7101 if (code >= 0)
32131a9c 7102 {
0009eff2
RK
7103 insn_extract (add_insn);
7104 if (constrain_operands (code, 1))
32131a9c 7105 {
0009eff2
RK
7106 /* If this is a pre-increment and we have incremented the value
7107 where it lives, copy the incremented value to RELOADREG to
7108 be used as an address. */
7109
7110 if (! post)
546b63fb
RK
7111 emit_insn (gen_move_insn (reloadreg, incloc));
7112
7113 return;
32131a9c
RK
7114 }
7115 }
0009eff2 7116
546b63fb 7117 delete_insns_since (last);
0009eff2
RK
7118
7119 /* If couldn't do the increment directly, must increment in RELOADREG.
7120 The way we do this depends on whether this is pre- or post-increment.
7121 For pre-increment, copy INCLOC to the reload register, increment it
7122 there, then save back. */
7123
7124 if (! post)
7125 {
546b63fb
RK
7126 emit_insn (gen_move_insn (reloadreg, incloc));
7127 emit_insn (gen_add2_insn (reloadreg, inc));
7128 emit_insn (gen_move_insn (incloc, reloadreg));
0009eff2 7129 }
32131a9c
RK
7130 else
7131 {
0009eff2
RK
7132 /* Postincrement.
7133 Because this might be a jump insn or a compare, and because RELOADREG
7134 may not be available after the insn in an input reload, we must do
7135 the incrementation before the insn being reloaded for.
7136
7137 We have already copied INCLOC to RELOADREG. Increment the copy in
7138 RELOADREG, save that back, then decrement RELOADREG so it has
7139 the original value. */
7140
546b63fb
RK
7141 emit_insn (gen_add2_insn (reloadreg, inc));
7142 emit_insn (gen_move_insn (incloc, reloadreg));
7143 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
32131a9c 7144 }
0009eff2 7145
546b63fb 7146 return;
32131a9c
RK
7147}
7148\f
7149/* Return 1 if we are certain that the constraint-string STRING allows
7150 the hard register REG. Return 0 if we can't be sure of this. */
7151
7152static int
7153constraint_accepts_reg_p (string, reg)
7154 char *string;
7155 rtx reg;
7156{
7157 int value = 0;
7158 int regno = true_regnum (reg);
7159 int c;
7160
7161 /* Initialize for first alternative. */
7162 value = 0;
7163 /* Check that each alternative contains `g' or `r'. */
7164 while (1)
7165 switch (c = *string++)
7166 {
7167 case 0:
7168 /* If an alternative lacks `g' or `r', we lose. */
7169 return value;
7170 case ',':
7171 /* If an alternative lacks `g' or `r', we lose. */
7172 if (value == 0)
7173 return 0;
7174 /* Initialize for next alternative. */
7175 value = 0;
7176 break;
7177 case 'g':
7178 case 'r':
7179 /* Any general reg wins for this alternative. */
7180 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7181 value = 1;
7182 break;
7183 default:
7184 /* Any reg in specified class wins for this alternative. */
7185 {
0009eff2 7186 enum reg_class class = REG_CLASS_FROM_LETTER (c);
32131a9c 7187
0009eff2 7188 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
32131a9c
RK
7189 value = 1;
7190 }
7191 }
7192}
7193\f
d445b551
RK
7194/* Return the number of places FIND appears within X, but don't count
7195 an occurrence if some SET_DEST is FIND. */
32131a9c
RK
7196
7197static int
7198count_occurrences (x, find)
7199 register rtx x, find;
7200{
7201 register int i, j;
7202 register enum rtx_code code;
7203 register char *format_ptr;
7204 int count;
7205
7206 if (x == find)
7207 return 1;
7208 if (x == 0)
7209 return 0;
7210
7211 code = GET_CODE (x);
7212
7213 switch (code)
7214 {
7215 case REG:
7216 case QUEUED:
7217 case CONST_INT:
7218 case CONST_DOUBLE:
7219 case SYMBOL_REF:
7220 case CODE_LABEL:
7221 case PC:
7222 case CC0:
7223 return 0;
d445b551
RK
7224
7225 case SET:
7226 if (SET_DEST (x) == find)
7227 return count_occurrences (SET_SRC (x), find);
7228 break;
32131a9c
RK
7229 }
7230
7231 format_ptr = GET_RTX_FORMAT (code);
7232 count = 0;
7233
7234 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7235 {
7236 switch (*format_ptr++)
7237 {
7238 case 'e':
7239 count += count_occurrences (XEXP (x, i), find);
7240 break;
7241
7242 case 'E':
7243 if (XVEC (x, i) != NULL)
7244 {
7245 for (j = 0; j < XVECLEN (x, i); j++)
7246 count += count_occurrences (XVECEXP (x, i, j), find);
7247 }
7248 break;
7249 }
7250 }
7251 return count;
7252}