]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload.c
166bcb986116a73fd89710028cfe3af82826e5e1
[thirdparty/gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "reload.h"
104 #include "regs.h"
105 #include "addresses.h"
106 #include "hard-reg-set.h"
107 #include "flags.h"
108 #include "real.h"
109 #include "output.h"
110 #include "function.h"
111 #include "toplev.h"
112 #include "params.h"
113 #include "target.h"
114 #include "df.h"
115 #include "ira.h"
116
117 /* True if X is a constant that can be forced into the constant pool. */
118 #define CONST_POOL_OK_P(X) \
119 (CONSTANT_P (X) \
120 && GET_CODE (X) != HIGH \
121 && !targetm.cannot_force_const_mem (X))
122
123 /* True if C is a non-empty register class that has too few registers
124 to be safely used as a reload target class. */
125 #define SMALL_REGISTER_CLASS_P(C) \
126 (reg_class_size [(C)] == 1 \
127 || (reg_class_size [(C)] >= 1 && CLASS_LIKELY_SPILLED_P (C)))
128
129 \f
130 /* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134
135 /* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139
140 int reload_n_operands;
141
142 /* Replacing reloads.
143
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
148
149 /* Nonzero means record the places to replace. */
150 static int replace_reloads;
151
152 /* Each replacement is recorded with a structure like this. */
153 struct replacement
154 {
155 rtx *where; /* Location to store in */
156 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
157 a SUBREG; 0 otherwise. */
158 int what; /* which reload this is for */
159 enum machine_mode mode; /* mode it must have */
160 };
161
162 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
163
164 /* Number of replacements currently recorded. */
165 static int n_replacements;
166
167 /* Used to track what is modified by an operand. */
168 struct decomposition
169 {
170 int reg_flag; /* Nonzero if referencing a register. */
171 int safe; /* Nonzero if this can't conflict with anything. */
172 rtx base; /* Base address for MEM. */
173 HOST_WIDE_INT start; /* Starting offset or register number. */
174 HOST_WIDE_INT end; /* Ending offset or register number. */
175 };
176
177 #ifdef SECONDARY_MEMORY_NEEDED
178
179 /* Save MEMs needed to copy from one class of registers to another. One MEM
180 is used per mode, but normally only one or two modes are ever used.
181
182 We keep two versions, before and after register elimination. The one
183 after register elimination is record separately for each operand. This
184 is done in case the address is not valid to be sure that we separately
185 reload each. */
186
187 static rtx secondary_memlocs[NUM_MACHINE_MODES];
188 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
189 static int secondary_memlocs_elim_used = 0;
190 #endif
191
192 /* The instruction we are doing reloads for;
193 so we can test whether a register dies in it. */
194 static rtx this_insn;
195
196 /* Nonzero if this instruction is a user-specified asm with operands. */
197 static int this_insn_is_asm;
198
199 /* If hard_regs_live_known is nonzero,
200 we can tell which hard regs are currently live,
201 at least enough to succeed in choosing dummy reloads. */
202 static int hard_regs_live_known;
203
204 /* Indexed by hard reg number,
205 element is nonnegative if hard reg has been spilled.
206 This vector is passed to `find_reloads' as an argument
207 and is not changed here. */
208 static short *static_reload_reg_p;
209
210 /* Set to 1 in subst_reg_equivs if it changes anything. */
211 static int subst_reg_equivs_changed;
212
213 /* On return from push_reload, holds the reload-number for the OUT
214 operand, which can be different for that from the input operand. */
215 static int output_reloadnum;
216
217 /* Compare two RTX's. */
218 #define MATCHES(x, y) \
219 (x == y || (x != 0 && (REG_P (x) \
220 ? REG_P (y) && REGNO (x) == REGNO (y) \
221 : rtx_equal_p (x, y) && ! side_effects_p (x))))
222
223 /* Indicates if two reloads purposes are for similar enough things that we
224 can merge their reloads. */
225 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
226 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
227 || ((when1) == (when2) && (op1) == (op2)) \
228 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
229 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
230 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
231 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
232 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
233
234 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
235 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
236 ((when1) != (when2) \
237 || ! ((op1) == (op2) \
238 || (when1) == RELOAD_FOR_INPUT \
239 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
240 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
241
242 /* If we are going to reload an address, compute the reload type to
243 use. */
244 #define ADDR_TYPE(type) \
245 ((type) == RELOAD_FOR_INPUT_ADDRESS \
246 ? RELOAD_FOR_INPADDR_ADDRESS \
247 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
248 ? RELOAD_FOR_OUTADDR_ADDRESS \
249 : (type)))
250
251 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
252 enum machine_mode, enum reload_type,
253 enum insn_code *, secondary_reload_info *);
254 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
255 int, unsigned int);
256 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
257 static void push_replacement (rtx *, int, enum machine_mode);
258 static void dup_replacements (rtx *, rtx *);
259 static void combine_reloads (void);
260 static int find_reusable_reload (rtx *, rtx, enum reg_class,
261 enum reload_type, int, int);
262 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
263 enum machine_mode, enum reg_class, int, int);
264 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
265 static struct decomposition decompose (rtx);
266 static int immune_p (rtx, rtx, struct decomposition);
267 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
268 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
269 int *);
270 static rtx make_memloc (rtx, int);
271 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
272 addr_space_t, rtx *);
273 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
274 int, enum reload_type, int, rtx);
275 static rtx subst_reg_equivs (rtx, rtx);
276 static rtx subst_indexed_address (rtx);
277 static void update_auto_inc_notes (rtx, int, int);
278 static int find_reloads_address_1 (enum machine_mode, rtx, int,
279 enum rtx_code, enum rtx_code, rtx *,
280 int, enum reload_type,int, rtx);
281 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
282 enum machine_mode, int,
283 enum reload_type, int);
284 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
285 int, rtx);
286 static void copy_replacements_1 (rtx *, rtx *, int);
287 static int find_inc_amount (rtx, rtx);
288 static int refers_to_mem_for_reload_p (rtx);
289 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
290 rtx, rtx *);
291
292 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
293 list yet. */
294
295 static void
296 push_reg_equiv_alt_mem (int regno, rtx mem)
297 {
298 rtx it;
299
300 for (it = reg_equiv_alt_mem_list [regno]; it; it = XEXP (it, 1))
301 if (rtx_equal_p (XEXP (it, 0), mem))
302 return;
303
304 reg_equiv_alt_mem_list [regno]
305 = alloc_EXPR_LIST (REG_EQUIV, mem,
306 reg_equiv_alt_mem_list [regno]);
307 }
308 \f
309 /* Determine if any secondary reloads are needed for loading (if IN_P is
310 nonzero) or storing (if IN_P is zero) X to or from a reload register of
311 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
312 are needed, push them.
313
314 Return the reload number of the secondary reload we made, or -1 if
315 we didn't need one. *PICODE is set to the insn_code to use if we do
316 need a secondary reload. */
317
318 static int
319 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
320 enum reg_class reload_class,
321 enum machine_mode reload_mode, enum reload_type type,
322 enum insn_code *picode, secondary_reload_info *prev_sri)
323 {
324 enum reg_class rclass = NO_REGS;
325 enum reg_class scratch_class;
326 enum machine_mode mode = reload_mode;
327 enum insn_code icode = CODE_FOR_nothing;
328 enum insn_code t_icode = CODE_FOR_nothing;
329 enum reload_type secondary_type;
330 int s_reload, t_reload = -1;
331 const char *scratch_constraint;
332 char letter;
333 secondary_reload_info sri;
334
335 if (type == RELOAD_FOR_INPUT_ADDRESS
336 || type == RELOAD_FOR_OUTPUT_ADDRESS
337 || type == RELOAD_FOR_INPADDR_ADDRESS
338 || type == RELOAD_FOR_OUTADDR_ADDRESS)
339 secondary_type = type;
340 else
341 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
342
343 *picode = CODE_FOR_nothing;
344
345 /* If X is a paradoxical SUBREG, use the inner value to determine both the
346 mode and object being reloaded. */
347 if (GET_CODE (x) == SUBREG
348 && (GET_MODE_SIZE (GET_MODE (x))
349 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
350 {
351 x = SUBREG_REG (x);
352 reload_mode = GET_MODE (x);
353 }
354
355 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
356 is still a pseudo-register by now, it *must* have an equivalent MEM
357 but we don't want to assume that), use that equivalent when seeing if
358 a secondary reload is needed since whether or not a reload is needed
359 might be sensitive to the form of the MEM. */
360
361 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
362 && reg_equiv_mem[REGNO (x)] != 0)
363 x = reg_equiv_mem[REGNO (x)];
364
365 sri.icode = CODE_FOR_nothing;
366 sri.prev_sri = prev_sri;
367 rclass = targetm.secondary_reload (in_p, x, reload_class, reload_mode, &sri);
368 icode = (enum insn_code) sri.icode;
369
370 /* If we don't need any secondary registers, done. */
371 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
372 return -1;
373
374 if (rclass != NO_REGS)
375 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
376 reload_mode, type, &t_icode, &sri);
377
378 /* If we will be using an insn, the secondary reload is for a
379 scratch register. */
380
381 if (icode != CODE_FOR_nothing)
382 {
383 /* If IN_P is nonzero, the reload register will be the output in
384 operand 0. If IN_P is zero, the reload register will be the input
385 in operand 1. Outputs should have an initial "=", which we must
386 skip. */
387
388 /* ??? It would be useful to be able to handle only two, or more than
389 three, operands, but for now we can only handle the case of having
390 exactly three: output, input and one temp/scratch. */
391 gcc_assert (insn_data[(int) icode].n_operands == 3);
392
393 /* ??? We currently have no way to represent a reload that needs
394 an icode to reload from an intermediate tertiary reload register.
395 We should probably have a new field in struct reload to tag a
396 chain of scratch operand reloads onto. */
397 gcc_assert (rclass == NO_REGS);
398
399 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
400 gcc_assert (*scratch_constraint == '=');
401 scratch_constraint++;
402 if (*scratch_constraint == '&')
403 scratch_constraint++;
404 letter = *scratch_constraint;
405 scratch_class = (letter == 'r' ? GENERAL_REGS
406 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
407 scratch_constraint));
408
409 rclass = scratch_class;
410 mode = insn_data[(int) icode].operand[2].mode;
411 }
412
413 /* This case isn't valid, so fail. Reload is allowed to use the same
414 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
415 in the case of a secondary register, we actually need two different
416 registers for correct code. We fail here to prevent the possibility of
417 silently generating incorrect code later.
418
419 The convention is that secondary input reloads are valid only if the
420 secondary_class is different from class. If you have such a case, you
421 can not use secondary reloads, you must work around the problem some
422 other way.
423
424 Allow this when a reload_in/out pattern is being used. I.e. assume
425 that the generated code handles this case. */
426
427 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
428 || t_icode != CODE_FOR_nothing);
429
430 /* See if we can reuse an existing secondary reload. */
431 for (s_reload = 0; s_reload < n_reloads; s_reload++)
432 if (rld[s_reload].secondary_p
433 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
434 || reg_class_subset_p (rld[s_reload].rclass, rclass))
435 && ((in_p && rld[s_reload].inmode == mode)
436 || (! in_p && rld[s_reload].outmode == mode))
437 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
438 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
439 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
440 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
441 && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
442 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
443 opnum, rld[s_reload].opnum))
444 {
445 if (in_p)
446 rld[s_reload].inmode = mode;
447 if (! in_p)
448 rld[s_reload].outmode = mode;
449
450 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
451 rld[s_reload].rclass = rclass;
452
453 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
454 rld[s_reload].optional &= optional;
455 rld[s_reload].secondary_p = 1;
456 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
457 opnum, rld[s_reload].opnum))
458 rld[s_reload].when_needed = RELOAD_OTHER;
459
460 break;
461 }
462
463 if (s_reload == n_reloads)
464 {
465 #ifdef SECONDARY_MEMORY_NEEDED
466 /* If we need a memory location to copy between the two reload regs,
467 set it up now. Note that we do the input case before making
468 the reload and the output case after. This is due to the
469 way reloads are output. */
470
471 if (in_p && icode == CODE_FOR_nothing
472 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
473 {
474 get_secondary_mem (x, reload_mode, opnum, type);
475
476 /* We may have just added new reloads. Make sure we add
477 the new reload at the end. */
478 s_reload = n_reloads;
479 }
480 #endif
481
482 /* We need to make a new secondary reload for this register class. */
483 rld[s_reload].in = rld[s_reload].out = 0;
484 rld[s_reload].rclass = rclass;
485
486 rld[s_reload].inmode = in_p ? mode : VOIDmode;
487 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
488 rld[s_reload].reg_rtx = 0;
489 rld[s_reload].optional = optional;
490 rld[s_reload].inc = 0;
491 /* Maybe we could combine these, but it seems too tricky. */
492 rld[s_reload].nocombine = 1;
493 rld[s_reload].in_reg = 0;
494 rld[s_reload].out_reg = 0;
495 rld[s_reload].opnum = opnum;
496 rld[s_reload].when_needed = secondary_type;
497 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
498 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
499 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
500 rld[s_reload].secondary_out_icode
501 = ! in_p ? t_icode : CODE_FOR_nothing;
502 rld[s_reload].secondary_p = 1;
503
504 n_reloads++;
505
506 #ifdef SECONDARY_MEMORY_NEEDED
507 if (! in_p && icode == CODE_FOR_nothing
508 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
509 get_secondary_mem (x, mode, opnum, type);
510 #endif
511 }
512
513 *picode = icode;
514 return s_reload;
515 }
516
517 /* If a secondary reload is needed, return its class. If both an intermediate
518 register and a scratch register is needed, we return the class of the
519 intermediate register. */
520 enum reg_class
521 secondary_reload_class (bool in_p, enum reg_class rclass,
522 enum machine_mode mode, rtx x)
523 {
524 enum insn_code icode;
525 secondary_reload_info sri;
526
527 sri.icode = CODE_FOR_nothing;
528 sri.prev_sri = NULL;
529 rclass = targetm.secondary_reload (in_p, x, rclass, mode, &sri);
530 icode = (enum insn_code) sri.icode;
531
532 /* If there are no secondary reloads at all, we return NO_REGS.
533 If an intermediate register is needed, we return its class. */
534 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
535 return rclass;
536
537 /* No intermediate register is needed, but we have a special reload
538 pattern, which we assume for now needs a scratch register. */
539 return scratch_reload_class (icode);
540 }
541
542 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
543 three operands, verify that operand 2 is an output operand, and return
544 its register class.
545 ??? We'd like to be able to handle any pattern with at least 2 operands,
546 for zero or more scratch registers, but that needs more infrastructure. */
547 enum reg_class
548 scratch_reload_class (enum insn_code icode)
549 {
550 const char *scratch_constraint;
551 char scratch_letter;
552 enum reg_class rclass;
553
554 gcc_assert (insn_data[(int) icode].n_operands == 3);
555 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
556 gcc_assert (*scratch_constraint == '=');
557 scratch_constraint++;
558 if (*scratch_constraint == '&')
559 scratch_constraint++;
560 scratch_letter = *scratch_constraint;
561 if (scratch_letter == 'r')
562 return GENERAL_REGS;
563 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
564 scratch_constraint);
565 gcc_assert (rclass != NO_REGS);
566 return rclass;
567 }
568 \f
569 #ifdef SECONDARY_MEMORY_NEEDED
570
571 /* Return a memory location that will be used to copy X in mode MODE.
572 If we haven't already made a location for this mode in this insn,
573 call find_reloads_address on the location being returned. */
574
575 rtx
576 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
577 int opnum, enum reload_type type)
578 {
579 rtx loc;
580 int mem_valid;
581
582 /* By default, if MODE is narrower than a word, widen it to a word.
583 This is required because most machines that require these memory
584 locations do not support short load and stores from all registers
585 (e.g., FP registers). */
586
587 #ifdef SECONDARY_MEMORY_NEEDED_MODE
588 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
589 #else
590 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
591 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
592 #endif
593
594 /* If we already have made a MEM for this operand in MODE, return it. */
595 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
596 return secondary_memlocs_elim[(int) mode][opnum];
597
598 /* If this is the first time we've tried to get a MEM for this mode,
599 allocate a new one. `something_changed' in reload will get set
600 by noticing that the frame size has changed. */
601
602 if (secondary_memlocs[(int) mode] == 0)
603 {
604 #ifdef SECONDARY_MEMORY_NEEDED_RTX
605 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
606 #else
607 secondary_memlocs[(int) mode]
608 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
609 #endif
610 }
611
612 /* Get a version of the address doing any eliminations needed. If that
613 didn't give us a new MEM, make a new one if it isn't valid. */
614
615 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
616 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
617 MEM_ADDR_SPACE (loc));
618
619 if (! mem_valid && loc == secondary_memlocs[(int) mode])
620 loc = copy_rtx (loc);
621
622 /* The only time the call below will do anything is if the stack
623 offset is too large. In that case IND_LEVELS doesn't matter, so we
624 can just pass a zero. Adjust the type to be the address of the
625 corresponding object. If the address was valid, save the eliminated
626 address. If it wasn't valid, we need to make a reload each time, so
627 don't save it. */
628
629 if (! mem_valid)
630 {
631 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
632 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
633 : RELOAD_OTHER);
634
635 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
636 opnum, type, 0, 0);
637 }
638
639 secondary_memlocs_elim[(int) mode][opnum] = loc;
640 if (secondary_memlocs_elim_used <= (int)mode)
641 secondary_memlocs_elim_used = (int)mode + 1;
642 return loc;
643 }
644
645 /* Clear any secondary memory locations we've made. */
646
647 void
648 clear_secondary_mem (void)
649 {
650 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
651 }
652 #endif /* SECONDARY_MEMORY_NEEDED */
653 \f
654
655 /* Find the largest class which has at least one register valid in
656 mode INNER, and which for every such register, that register number
657 plus N is also valid in OUTER (if in range) and is cheap to move
658 into REGNO. Such a class must exist. */
659
660 static enum reg_class
661 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
662 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
663 unsigned int dest_regno ATTRIBUTE_UNUSED)
664 {
665 int best_cost = -1;
666 int rclass;
667 int regno;
668 enum reg_class best_class = NO_REGS;
669 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
670 unsigned int best_size = 0;
671 int cost;
672
673 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
674 {
675 int bad = 0;
676 int good = 0;
677 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
678 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
679 {
680 if (HARD_REGNO_MODE_OK (regno, inner))
681 {
682 good = 1;
683 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
684 || ! HARD_REGNO_MODE_OK (regno + n, outer))
685 bad = 1;
686 }
687 }
688
689 if (bad || !good)
690 continue;
691 cost = REGISTER_MOVE_COST (outer, (enum reg_class) rclass, dest_class);
692
693 if ((reg_class_size[rclass] > best_size
694 && (best_cost < 0 || best_cost >= cost))
695 || best_cost > cost)
696 {
697 best_class = (enum reg_class) rclass;
698 best_size = reg_class_size[rclass];
699 best_cost = REGISTER_MOVE_COST (outer, (enum reg_class) rclass,
700 dest_class);
701 }
702 }
703
704 gcc_assert (best_size != 0);
705
706 return best_class;
707 }
708 \f
709 /* Return the number of a previously made reload that can be combined with
710 a new one, or n_reloads if none of the existing reloads can be used.
711 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
712 push_reload, they determine the kind of the new reload that we try to
713 combine. P_IN points to the corresponding value of IN, which can be
714 modified by this function.
715 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
716
717 static int
718 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
719 enum reload_type type, int opnum, int dont_share)
720 {
721 rtx in = *p_in;
722 int i;
723 /* We can't merge two reloads if the output of either one is
724 earlyclobbered. */
725
726 if (earlyclobber_operand_p (out))
727 return n_reloads;
728
729 /* We can use an existing reload if the class is right
730 and at least one of IN and OUT is a match
731 and the other is at worst neutral.
732 (A zero compared against anything is neutral.)
733
734 If SMALL_REGISTER_CLASSES, don't use existing reloads unless they are
735 for the same thing since that can cause us to need more reload registers
736 than we otherwise would. */
737
738 for (i = 0; i < n_reloads; i++)
739 if ((reg_class_subset_p (rclass, rld[i].rclass)
740 || reg_class_subset_p (rld[i].rclass, rclass))
741 /* If the existing reload has a register, it must fit our class. */
742 && (rld[i].reg_rtx == 0
743 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
744 true_regnum (rld[i].reg_rtx)))
745 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
746 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
747 || (out != 0 && MATCHES (rld[i].out, out)
748 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
749 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
750 && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
751 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
752 return i;
753
754 /* Reloading a plain reg for input can match a reload to postincrement
755 that reg, since the postincrement's value is the right value.
756 Likewise, it can match a preincrement reload, since we regard
757 the preincrementation as happening before any ref in this insn
758 to that register. */
759 for (i = 0; i < n_reloads; i++)
760 if ((reg_class_subset_p (rclass, rld[i].rclass)
761 || reg_class_subset_p (rld[i].rclass, rclass))
762 /* If the existing reload has a register, it must fit our
763 class. */
764 && (rld[i].reg_rtx == 0
765 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
766 true_regnum (rld[i].reg_rtx)))
767 && out == 0 && rld[i].out == 0 && rld[i].in != 0
768 && ((REG_P (in)
769 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
770 && MATCHES (XEXP (rld[i].in, 0), in))
771 || (REG_P (rld[i].in)
772 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
773 && MATCHES (XEXP (in, 0), rld[i].in)))
774 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
775 && (SMALL_REGISTER_CLASS_P (rclass) || SMALL_REGISTER_CLASSES)
776 && MERGABLE_RELOADS (type, rld[i].when_needed,
777 opnum, rld[i].opnum))
778 {
779 /* Make sure reload_in ultimately has the increment,
780 not the plain register. */
781 if (REG_P (in))
782 *p_in = rld[i].in;
783 return i;
784 }
785 return n_reloads;
786 }
787
788 /* Return nonzero if X is a SUBREG which will require reloading of its
789 SUBREG_REG expression. */
790
791 static int
792 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
793 {
794 rtx inner;
795
796 /* Only SUBREGs are problematical. */
797 if (GET_CODE (x) != SUBREG)
798 return 0;
799
800 inner = SUBREG_REG (x);
801
802 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
803 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
804 return 1;
805
806 /* If INNER is not a hard register, then INNER will not need to
807 be reloaded. */
808 if (!REG_P (inner)
809 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
810 return 0;
811
812 /* If INNER is not ok for MODE, then INNER will need reloading. */
813 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
814 return 1;
815
816 /* If the outer part is a word or smaller, INNER larger than a
817 word and the number of regs for INNER is not the same as the
818 number of words in INNER, then INNER will need reloading. */
819 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
820 && output
821 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
822 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
823 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
824 }
825
826 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
827 requiring an extra reload register. The caller has already found that
828 IN contains some reference to REGNO, so check that we can produce the
829 new value in a single step. E.g. if we have
830 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
831 instruction that adds one to a register, this should succeed.
832 However, if we have something like
833 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
834 needs to be loaded into a register first, we need a separate reload
835 register.
836 Such PLUS reloads are generated by find_reload_address_part.
837 The out-of-range PLUS expressions are usually introduced in the instruction
838 patterns by register elimination and substituting pseudos without a home
839 by their function-invariant equivalences. */
840 static int
841 can_reload_into (rtx in, int regno, enum machine_mode mode)
842 {
843 rtx dst, test_insn;
844 int r = 0;
845 struct recog_data save_recog_data;
846
847 /* For matching constraints, we often get notional input reloads where
848 we want to use the original register as the reload register. I.e.
849 technically this is a non-optional input-output reload, but IN is
850 already a valid register, and has been chosen as the reload register.
851 Speed this up, since it trivially works. */
852 if (REG_P (in))
853 return 1;
854
855 /* To test MEMs properly, we'd have to take into account all the reloads
856 that are already scheduled, which can become quite complicated.
857 And since we've already handled address reloads for this MEM, it
858 should always succeed anyway. */
859 if (MEM_P (in))
860 return 1;
861
862 /* If we can make a simple SET insn that does the job, everything should
863 be fine. */
864 dst = gen_rtx_REG (mode, regno);
865 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
866 save_recog_data = recog_data;
867 if (recog_memoized (test_insn) >= 0)
868 {
869 extract_insn (test_insn);
870 r = constrain_operands (1);
871 }
872 recog_data = save_recog_data;
873 return r;
874 }
875
876 /* Record one reload that needs to be performed.
877 IN is an rtx saying where the data are to be found before this instruction.
878 OUT says where they must be stored after the instruction.
879 (IN is zero for data not read, and OUT is zero for data not written.)
880 INLOC and OUTLOC point to the places in the instructions where
881 IN and OUT were found.
882 If IN and OUT are both nonzero, it means the same register must be used
883 to reload both IN and OUT.
884
885 RCLASS is a register class required for the reloaded data.
886 INMODE is the machine mode that the instruction requires
887 for the reg that replaces IN and OUTMODE is likewise for OUT.
888
889 If IN is zero, then OUT's location and mode should be passed as
890 INLOC and INMODE.
891
892 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
893
894 OPTIONAL nonzero means this reload does not need to be performed:
895 it can be discarded if that is more convenient.
896
897 OPNUM and TYPE say what the purpose of this reload is.
898
899 The return value is the reload-number for this reload.
900
901 If both IN and OUT are nonzero, in some rare cases we might
902 want to make two separate reloads. (Actually we never do this now.)
903 Therefore, the reload-number for OUT is stored in
904 output_reloadnum when we return; the return value applies to IN.
905 Usually (presently always), when IN and OUT are nonzero,
906 the two reload-numbers are equal, but the caller should be careful to
907 distinguish them. */
908
909 int
910 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
911 enum reg_class rclass, enum machine_mode inmode,
912 enum machine_mode outmode, int strict_low, int optional,
913 int opnum, enum reload_type type)
914 {
915 int i;
916 int dont_share = 0;
917 int dont_remove_subreg = 0;
918 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
919 int secondary_in_reload = -1, secondary_out_reload = -1;
920 enum insn_code secondary_in_icode = CODE_FOR_nothing;
921 enum insn_code secondary_out_icode = CODE_FOR_nothing;
922
923 /* INMODE and/or OUTMODE could be VOIDmode if no mode
924 has been specified for the operand. In that case,
925 use the operand's mode as the mode to reload. */
926 if (inmode == VOIDmode && in != 0)
927 inmode = GET_MODE (in);
928 if (outmode == VOIDmode && out != 0)
929 outmode = GET_MODE (out);
930
931 /* If find_reloads and friends until now missed to replace a pseudo
932 with a constant of reg_equiv_constant something went wrong
933 beforehand.
934 Note that it can't simply be done here if we missed it earlier
935 since the constant might need to be pushed into the literal pool
936 and the resulting memref would probably need further
937 reloading. */
938 if (in != 0 && REG_P (in))
939 {
940 int regno = REGNO (in);
941
942 gcc_assert (regno < FIRST_PSEUDO_REGISTER
943 || reg_renumber[regno] >= 0
944 || reg_equiv_constant[regno] == NULL_RTX);
945 }
946
947 /* reg_equiv_constant only contains constants which are obviously
948 not appropriate as destination. So if we would need to replace
949 the destination pseudo with a constant we are in real
950 trouble. */
951 if (out != 0 && REG_P (out))
952 {
953 int regno = REGNO (out);
954
955 gcc_assert (regno < FIRST_PSEUDO_REGISTER
956 || reg_renumber[regno] >= 0
957 || reg_equiv_constant[regno] == NULL_RTX);
958 }
959
960 /* If we have a read-write operand with an address side-effect,
961 change either IN or OUT so the side-effect happens only once. */
962 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
963 switch (GET_CODE (XEXP (in, 0)))
964 {
965 case POST_INC: case POST_DEC: case POST_MODIFY:
966 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
967 break;
968
969 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
970 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
971 break;
972
973 default:
974 break;
975 }
976
977 /* If we are reloading a (SUBREG constant ...), really reload just the
978 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
979 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
980 a pseudo and hence will become a MEM) with M1 wider than M2 and the
981 register is a pseudo, also reload the inside expression.
982 For machines that extend byte loads, do this for any SUBREG of a pseudo
983 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
984 M2 is an integral mode that gets extended when loaded.
985 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
986 either M1 is not valid for R or M2 is wider than a word but we only
987 need one word to store an M2-sized quantity in R.
988 (However, if OUT is nonzero, we need to reload the reg *and*
989 the subreg, so do nothing here, and let following statement handle it.)
990
991 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
992 we can't handle it here because CONST_INT does not indicate a mode.
993
994 Similarly, we must reload the inside expression if we have a
995 STRICT_LOW_PART (presumably, in == out in this case).
996
997 Also reload the inner expression if it does not require a secondary
998 reload but the SUBREG does.
999
1000 Finally, reload the inner expression if it is a register that is in
1001 the class whose registers cannot be referenced in a different size
1002 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1003 cannot reload just the inside since we might end up with the wrong
1004 register class. But if it is inside a STRICT_LOW_PART, we have
1005 no choice, so we hope we do get the right register class there. */
1006
1007 if (in != 0 && GET_CODE (in) == SUBREG
1008 && (subreg_lowpart_p (in) || strict_low)
1009 #ifdef CANNOT_CHANGE_MODE_CLASS
1010 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1011 #endif
1012 && (CONSTANT_P (SUBREG_REG (in))
1013 || GET_CODE (SUBREG_REG (in)) == PLUS
1014 || strict_low
1015 || (((REG_P (SUBREG_REG (in))
1016 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1017 || MEM_P (SUBREG_REG (in)))
1018 && ((GET_MODE_SIZE (inmode)
1019 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1020 #ifdef LOAD_EXTEND_OP
1021 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1022 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1023 <= UNITS_PER_WORD)
1024 && (GET_MODE_SIZE (inmode)
1025 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1026 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1027 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1028 #endif
1029 #ifdef WORD_REGISTER_OPERATIONS
1030 || ((GET_MODE_SIZE (inmode)
1031 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1032 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1033 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1034 / UNITS_PER_WORD)))
1035 #endif
1036 ))
1037 || (REG_P (SUBREG_REG (in))
1038 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1039 /* The case where out is nonzero
1040 is handled differently in the following statement. */
1041 && (out == 0 || subreg_lowpart_p (in))
1042 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1043 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1044 > UNITS_PER_WORD)
1045 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1046 / UNITS_PER_WORD)
1047 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1048 [GET_MODE (SUBREG_REG (in))]))
1049 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1050 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1051 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1052 SUBREG_REG (in))
1053 == NO_REGS))
1054 #ifdef CANNOT_CHANGE_MODE_CLASS
1055 || (REG_P (SUBREG_REG (in))
1056 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1057 && REG_CANNOT_CHANGE_MODE_P
1058 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1059 #endif
1060 ))
1061 {
1062 in_subreg_loc = inloc;
1063 inloc = &SUBREG_REG (in);
1064 in = *inloc;
1065 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1066 if (MEM_P (in))
1067 /* This is supposed to happen only for paradoxical subregs made by
1068 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1069 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1070 #endif
1071 inmode = GET_MODE (in);
1072 }
1073
1074 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1075 either M1 is not valid for R or M2 is wider than a word but we only
1076 need one word to store an M2-sized quantity in R.
1077
1078 However, we must reload the inner reg *as well as* the subreg in
1079 that case. */
1080
1081 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1082 code above. This can happen if SUBREG_BYTE != 0. */
1083
1084 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1085 {
1086 enum reg_class in_class = rclass;
1087
1088 if (REG_P (SUBREG_REG (in)))
1089 in_class
1090 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1091 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1092 GET_MODE (SUBREG_REG (in)),
1093 SUBREG_BYTE (in),
1094 GET_MODE (in)),
1095 REGNO (SUBREG_REG (in)));
1096
1097 /* This relies on the fact that emit_reload_insns outputs the
1098 instructions for input reloads of type RELOAD_OTHER in the same
1099 order as the reloads. Thus if the outer reload is also of type
1100 RELOAD_OTHER, we are guaranteed that this inner reload will be
1101 output before the outer reload. */
1102 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1103 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1104 dont_remove_subreg = 1;
1105 }
1106
1107 /* Similarly for paradoxical and problematical SUBREGs on the output.
1108 Note that there is no reason we need worry about the previous value
1109 of SUBREG_REG (out); even if wider than out,
1110 storing in a subreg is entitled to clobber it all
1111 (except in the case of STRICT_LOW_PART,
1112 and in that case the constraint should label it input-output.) */
1113 if (out != 0 && GET_CODE (out) == SUBREG
1114 && (subreg_lowpart_p (out) || strict_low)
1115 #ifdef CANNOT_CHANGE_MODE_CLASS
1116 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1117 #endif
1118 && (CONSTANT_P (SUBREG_REG (out))
1119 || strict_low
1120 || (((REG_P (SUBREG_REG (out))
1121 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1122 || MEM_P (SUBREG_REG (out)))
1123 && ((GET_MODE_SIZE (outmode)
1124 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1125 #ifdef WORD_REGISTER_OPERATIONS
1126 || ((GET_MODE_SIZE (outmode)
1127 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1128 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1129 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1130 / UNITS_PER_WORD)))
1131 #endif
1132 ))
1133 || (REG_P (SUBREG_REG (out))
1134 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1135 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1136 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1137 > UNITS_PER_WORD)
1138 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1139 / UNITS_PER_WORD)
1140 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1141 [GET_MODE (SUBREG_REG (out))]))
1142 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1143 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1144 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1145 SUBREG_REG (out))
1146 == NO_REGS))
1147 #ifdef CANNOT_CHANGE_MODE_CLASS
1148 || (REG_P (SUBREG_REG (out))
1149 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1150 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1151 GET_MODE (SUBREG_REG (out)),
1152 outmode))
1153 #endif
1154 ))
1155 {
1156 out_subreg_loc = outloc;
1157 outloc = &SUBREG_REG (out);
1158 out = *outloc;
1159 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1160 gcc_assert (!MEM_P (out)
1161 || GET_MODE_SIZE (GET_MODE (out))
1162 <= GET_MODE_SIZE (outmode));
1163 #endif
1164 outmode = GET_MODE (out);
1165 }
1166
1167 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1168 either M1 is not valid for R or M2 is wider than a word but we only
1169 need one word to store an M2-sized quantity in R.
1170
1171 However, we must reload the inner reg *as well as* the subreg in
1172 that case. In this case, the inner reg is an in-out reload. */
1173
1174 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1175 {
1176 /* This relies on the fact that emit_reload_insns outputs the
1177 instructions for output reloads of type RELOAD_OTHER in reverse
1178 order of the reloads. Thus if the outer reload is also of type
1179 RELOAD_OTHER, we are guaranteed that this inner reload will be
1180 output after the outer reload. */
1181 dont_remove_subreg = 1;
1182 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1183 &SUBREG_REG (out),
1184 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1185 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1186 GET_MODE (SUBREG_REG (out)),
1187 SUBREG_BYTE (out),
1188 GET_MODE (out)),
1189 REGNO (SUBREG_REG (out))),
1190 VOIDmode, VOIDmode, 0, 0,
1191 opnum, RELOAD_OTHER);
1192 }
1193
1194 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1195 if (in != 0 && out != 0 && MEM_P (out)
1196 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1197 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1198 dont_share = 1;
1199
1200 /* If IN is a SUBREG of a hard register, make a new REG. This
1201 simplifies some of the cases below. */
1202
1203 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1204 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1205 && ! dont_remove_subreg)
1206 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1207
1208 /* Similarly for OUT. */
1209 if (out != 0 && GET_CODE (out) == SUBREG
1210 && REG_P (SUBREG_REG (out))
1211 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1212 && ! dont_remove_subreg)
1213 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1214
1215 /* Narrow down the class of register wanted if that is
1216 desirable on this machine for efficiency. */
1217 {
1218 enum reg_class preferred_class = rclass;
1219
1220 if (in != 0)
1221 preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
1222
1223 /* Output reloads may need analogous treatment, different in detail. */
1224 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1225 if (out != 0)
1226 preferred_class = PREFERRED_OUTPUT_RELOAD_CLASS (out, preferred_class);
1227 #endif
1228
1229 /* Discard what the target said if we cannot do it. */
1230 if (preferred_class != NO_REGS
1231 || (optional && type == RELOAD_FOR_OUTPUT))
1232 rclass = preferred_class;
1233 }
1234
1235 /* Make sure we use a class that can handle the actual pseudo
1236 inside any subreg. For example, on the 386, QImode regs
1237 can appear within SImode subregs. Although GENERAL_REGS
1238 can handle SImode, QImode needs a smaller class. */
1239 #ifdef LIMIT_RELOAD_CLASS
1240 if (in_subreg_loc)
1241 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1242 else if (in != 0 && GET_CODE (in) == SUBREG)
1243 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1244
1245 if (out_subreg_loc)
1246 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1247 if (out != 0 && GET_CODE (out) == SUBREG)
1248 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1249 #endif
1250
1251 /* Verify that this class is at least possible for the mode that
1252 is specified. */
1253 if (this_insn_is_asm)
1254 {
1255 enum machine_mode mode;
1256 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1257 mode = inmode;
1258 else
1259 mode = outmode;
1260 if (mode == VOIDmode)
1261 {
1262 error_for_asm (this_insn, "cannot reload integer constant "
1263 "operand in %<asm%>");
1264 mode = word_mode;
1265 if (in != 0)
1266 inmode = word_mode;
1267 if (out != 0)
1268 outmode = word_mode;
1269 }
1270 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1271 if (HARD_REGNO_MODE_OK (i, mode)
1272 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1273 break;
1274 if (i == FIRST_PSEUDO_REGISTER)
1275 {
1276 error_for_asm (this_insn, "impossible register constraint "
1277 "in %<asm%>");
1278 /* Avoid further trouble with this insn. */
1279 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1280 /* We used to continue here setting class to ALL_REGS, but it triggers
1281 sanity check on i386 for:
1282 void foo(long double d)
1283 {
1284 asm("" :: "a" (d));
1285 }
1286 Returning zero here ought to be safe as we take care in
1287 find_reloads to not process the reloads when instruction was
1288 replaced by USE. */
1289
1290 return 0;
1291 }
1292 }
1293
1294 /* Optional output reloads are always OK even if we have no register class,
1295 since the function of these reloads is only to have spill_reg_store etc.
1296 set, so that the storing insn can be deleted later. */
1297 gcc_assert (rclass != NO_REGS
1298 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1299
1300 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1301
1302 if (i == n_reloads)
1303 {
1304 /* See if we need a secondary reload register to move between CLASS
1305 and IN or CLASS and OUT. Get the icode and push any required reloads
1306 needed for each of them if so. */
1307
1308 if (in != 0)
1309 secondary_in_reload
1310 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1311 &secondary_in_icode, NULL);
1312 if (out != 0 && GET_CODE (out) != SCRATCH)
1313 secondary_out_reload
1314 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1315 type, &secondary_out_icode, NULL);
1316
1317 /* We found no existing reload suitable for re-use.
1318 So add an additional reload. */
1319
1320 #ifdef SECONDARY_MEMORY_NEEDED
1321 /* If a memory location is needed for the copy, make one. */
1322 if (in != 0
1323 && (REG_P (in)
1324 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1325 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1326 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1327 rclass, inmode))
1328 get_secondary_mem (in, inmode, opnum, type);
1329 #endif
1330
1331 i = n_reloads;
1332 rld[i].in = in;
1333 rld[i].out = out;
1334 rld[i].rclass = rclass;
1335 rld[i].inmode = inmode;
1336 rld[i].outmode = outmode;
1337 rld[i].reg_rtx = 0;
1338 rld[i].optional = optional;
1339 rld[i].inc = 0;
1340 rld[i].nocombine = 0;
1341 rld[i].in_reg = inloc ? *inloc : 0;
1342 rld[i].out_reg = outloc ? *outloc : 0;
1343 rld[i].opnum = opnum;
1344 rld[i].when_needed = type;
1345 rld[i].secondary_in_reload = secondary_in_reload;
1346 rld[i].secondary_out_reload = secondary_out_reload;
1347 rld[i].secondary_in_icode = secondary_in_icode;
1348 rld[i].secondary_out_icode = secondary_out_icode;
1349 rld[i].secondary_p = 0;
1350
1351 n_reloads++;
1352
1353 #ifdef SECONDARY_MEMORY_NEEDED
1354 if (out != 0
1355 && (REG_P (out)
1356 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1357 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1358 && SECONDARY_MEMORY_NEEDED (rclass,
1359 REGNO_REG_CLASS (reg_or_subregno (out)),
1360 outmode))
1361 get_secondary_mem (out, outmode, opnum, type);
1362 #endif
1363 }
1364 else
1365 {
1366 /* We are reusing an existing reload,
1367 but we may have additional information for it.
1368 For example, we may now have both IN and OUT
1369 while the old one may have just one of them. */
1370
1371 /* The modes can be different. If they are, we want to reload in
1372 the larger mode, so that the value is valid for both modes. */
1373 if (inmode != VOIDmode
1374 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1375 rld[i].inmode = inmode;
1376 if (outmode != VOIDmode
1377 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1378 rld[i].outmode = outmode;
1379 if (in != 0)
1380 {
1381 rtx in_reg = inloc ? *inloc : 0;
1382 /* If we merge reloads for two distinct rtl expressions that
1383 are identical in content, there might be duplicate address
1384 reloads. Remove the extra set now, so that if we later find
1385 that we can inherit this reload, we can get rid of the
1386 address reloads altogether.
1387
1388 Do not do this if both reloads are optional since the result
1389 would be an optional reload which could potentially leave
1390 unresolved address replacements.
1391
1392 It is not sufficient to call transfer_replacements since
1393 choose_reload_regs will remove the replacements for address
1394 reloads of inherited reloads which results in the same
1395 problem. */
1396 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1397 && ! (rld[i].optional && optional))
1398 {
1399 /* We must keep the address reload with the lower operand
1400 number alive. */
1401 if (opnum > rld[i].opnum)
1402 {
1403 remove_address_replacements (in);
1404 in = rld[i].in;
1405 in_reg = rld[i].in_reg;
1406 }
1407 else
1408 remove_address_replacements (rld[i].in);
1409 }
1410 /* When emitting reloads we don't necessarily look at the in-
1411 and outmode, but also directly at the operands (in and out).
1412 So we can't simply overwrite them with whatever we have found
1413 for this (to-be-merged) reload, we have to "merge" that too.
1414 Reusing another reload already verified that we deal with the
1415 same operands, just possibly in different modes. So we
1416 overwrite the operands only when the new mode is larger.
1417 See also PR33613. */
1418 if (!rld[i].in
1419 || GET_MODE_SIZE (GET_MODE (in))
1420 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1421 rld[i].in = in;
1422 if (!rld[i].in_reg
1423 || (in_reg
1424 && GET_MODE_SIZE (GET_MODE (in_reg))
1425 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1426 rld[i].in_reg = in_reg;
1427 }
1428 if (out != 0)
1429 {
1430 if (!rld[i].out
1431 || (out
1432 && GET_MODE_SIZE (GET_MODE (out))
1433 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1434 rld[i].out = out;
1435 if (outloc
1436 && (!rld[i].out_reg
1437 || GET_MODE_SIZE (GET_MODE (*outloc))
1438 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1439 rld[i].out_reg = *outloc;
1440 }
1441 if (reg_class_subset_p (rclass, rld[i].rclass))
1442 rld[i].rclass = rclass;
1443 rld[i].optional &= optional;
1444 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1445 opnum, rld[i].opnum))
1446 rld[i].when_needed = RELOAD_OTHER;
1447 rld[i].opnum = MIN (rld[i].opnum, opnum);
1448 }
1449
1450 /* If the ostensible rtx being reloaded differs from the rtx found
1451 in the location to substitute, this reload is not safe to combine
1452 because we cannot reliably tell whether it appears in the insn. */
1453
1454 if (in != 0 && in != *inloc)
1455 rld[i].nocombine = 1;
1456
1457 #if 0
1458 /* This was replaced by changes in find_reloads_address_1 and the new
1459 function inc_for_reload, which go with a new meaning of reload_inc. */
1460
1461 /* If this is an IN/OUT reload in an insn that sets the CC,
1462 it must be for an autoincrement. It doesn't work to store
1463 the incremented value after the insn because that would clobber the CC.
1464 So we must do the increment of the value reloaded from,
1465 increment it, store it back, then decrement again. */
1466 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1467 {
1468 out = 0;
1469 rld[i].out = 0;
1470 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1471 /* If we did not find a nonzero amount-to-increment-by,
1472 that contradicts the belief that IN is being incremented
1473 in an address in this insn. */
1474 gcc_assert (rld[i].inc != 0);
1475 }
1476 #endif
1477
1478 /* If we will replace IN and OUT with the reload-reg,
1479 record where they are located so that substitution need
1480 not do a tree walk. */
1481
1482 if (replace_reloads)
1483 {
1484 if (inloc != 0)
1485 {
1486 struct replacement *r = &replacements[n_replacements++];
1487 r->what = i;
1488 r->subreg_loc = in_subreg_loc;
1489 r->where = inloc;
1490 r->mode = inmode;
1491 }
1492 if (outloc != 0 && outloc != inloc)
1493 {
1494 struct replacement *r = &replacements[n_replacements++];
1495 r->what = i;
1496 r->where = outloc;
1497 r->subreg_loc = out_subreg_loc;
1498 r->mode = outmode;
1499 }
1500 }
1501
1502 /* If this reload is just being introduced and it has both
1503 an incoming quantity and an outgoing quantity that are
1504 supposed to be made to match, see if either one of the two
1505 can serve as the place to reload into.
1506
1507 If one of them is acceptable, set rld[i].reg_rtx
1508 to that one. */
1509
1510 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1511 {
1512 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1513 inmode, outmode,
1514 rld[i].rclass, i,
1515 earlyclobber_operand_p (out));
1516
1517 /* If the outgoing register already contains the same value
1518 as the incoming one, we can dispense with loading it.
1519 The easiest way to tell the caller that is to give a phony
1520 value for the incoming operand (same as outgoing one). */
1521 if (rld[i].reg_rtx == out
1522 && (REG_P (in) || CONSTANT_P (in))
1523 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1524 static_reload_reg_p, i, inmode))
1525 rld[i].in = out;
1526 }
1527
1528 /* If this is an input reload and the operand contains a register that
1529 dies in this insn and is used nowhere else, see if it is the right class
1530 to be used for this reload. Use it if so. (This occurs most commonly
1531 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1532 this if it is also an output reload that mentions the register unless
1533 the output is a SUBREG that clobbers an entire register.
1534
1535 Note that the operand might be one of the spill regs, if it is a
1536 pseudo reg and we are in a block where spilling has not taken place.
1537 But if there is no spilling in this block, that is OK.
1538 An explicitly used hard reg cannot be a spill reg. */
1539
1540 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1541 {
1542 rtx note;
1543 int regno;
1544 enum machine_mode rel_mode = inmode;
1545
1546 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1547 rel_mode = outmode;
1548
1549 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1550 if (REG_NOTE_KIND (note) == REG_DEAD
1551 && REG_P (XEXP (note, 0))
1552 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1553 && reg_mentioned_p (XEXP (note, 0), in)
1554 /* Check that a former pseudo is valid; see find_dummy_reload. */
1555 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1556 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1557 ORIGINAL_REGNO (XEXP (note, 0)))
1558 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1559 && ! refers_to_regno_for_reload_p (regno,
1560 end_hard_regno (rel_mode,
1561 regno),
1562 PATTERN (this_insn), inloc)
1563 /* If this is also an output reload, IN cannot be used as
1564 the reload register if it is set in this insn unless IN
1565 is also OUT. */
1566 && (out == 0 || in == out
1567 || ! hard_reg_set_here_p (regno,
1568 end_hard_regno (rel_mode, regno),
1569 PATTERN (this_insn)))
1570 /* ??? Why is this code so different from the previous?
1571 Is there any simple coherent way to describe the two together?
1572 What's going on here. */
1573 && (in != out
1574 || (GET_CODE (in) == SUBREG
1575 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1576 / UNITS_PER_WORD)
1577 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1578 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1579 /* Make sure the operand fits in the reg that dies. */
1580 && (GET_MODE_SIZE (rel_mode)
1581 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1582 && HARD_REGNO_MODE_OK (regno, inmode)
1583 && HARD_REGNO_MODE_OK (regno, outmode))
1584 {
1585 unsigned int offs;
1586 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1587 hard_regno_nregs[regno][outmode]);
1588
1589 for (offs = 0; offs < nregs; offs++)
1590 if (fixed_regs[regno + offs]
1591 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1592 regno + offs))
1593 break;
1594
1595 if (offs == nregs
1596 && (! (refers_to_regno_for_reload_p
1597 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1598 || can_reload_into (in, regno, inmode)))
1599 {
1600 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1601 break;
1602 }
1603 }
1604 }
1605
1606 if (out)
1607 output_reloadnum = i;
1608
1609 return i;
1610 }
1611
1612 /* Record an additional place we must replace a value
1613 for which we have already recorded a reload.
1614 RELOADNUM is the value returned by push_reload
1615 when the reload was recorded.
1616 This is used in insn patterns that use match_dup. */
1617
1618 static void
1619 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1620 {
1621 if (replace_reloads)
1622 {
1623 struct replacement *r = &replacements[n_replacements++];
1624 r->what = reloadnum;
1625 r->where = loc;
1626 r->subreg_loc = 0;
1627 r->mode = mode;
1628 }
1629 }
1630
1631 /* Duplicate any replacement we have recorded to apply at
1632 location ORIG_LOC to also be performed at DUP_LOC.
1633 This is used in insn patterns that use match_dup. */
1634
1635 static void
1636 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1637 {
1638 int i, n = n_replacements;
1639
1640 for (i = 0; i < n; i++)
1641 {
1642 struct replacement *r = &replacements[i];
1643 if (r->where == orig_loc)
1644 push_replacement (dup_loc, r->what, r->mode);
1645 }
1646 }
1647 \f
1648 /* Transfer all replacements that used to be in reload FROM to be in
1649 reload TO. */
1650
1651 void
1652 transfer_replacements (int to, int from)
1653 {
1654 int i;
1655
1656 for (i = 0; i < n_replacements; i++)
1657 if (replacements[i].what == from)
1658 replacements[i].what = to;
1659 }
1660 \f
1661 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1662 or a subpart of it. If we have any replacements registered for IN_RTX,
1663 cancel the reloads that were supposed to load them.
1664 Return nonzero if we canceled any reloads. */
1665 int
1666 remove_address_replacements (rtx in_rtx)
1667 {
1668 int i, j;
1669 char reload_flags[MAX_RELOADS];
1670 int something_changed = 0;
1671
1672 memset (reload_flags, 0, sizeof reload_flags);
1673 for (i = 0, j = 0; i < n_replacements; i++)
1674 {
1675 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1676 reload_flags[replacements[i].what] |= 1;
1677 else
1678 {
1679 replacements[j++] = replacements[i];
1680 reload_flags[replacements[i].what] |= 2;
1681 }
1682 }
1683 /* Note that the following store must be done before the recursive calls. */
1684 n_replacements = j;
1685
1686 for (i = n_reloads - 1; i >= 0; i--)
1687 {
1688 if (reload_flags[i] == 1)
1689 {
1690 deallocate_reload_reg (i);
1691 remove_address_replacements (rld[i].in);
1692 rld[i].in = 0;
1693 something_changed = 1;
1694 }
1695 }
1696 return something_changed;
1697 }
1698 \f
1699 /* If there is only one output reload, and it is not for an earlyclobber
1700 operand, try to combine it with a (logically unrelated) input reload
1701 to reduce the number of reload registers needed.
1702
1703 This is safe if the input reload does not appear in
1704 the value being output-reloaded, because this implies
1705 it is not needed any more once the original insn completes.
1706
1707 If that doesn't work, see we can use any of the registers that
1708 die in this insn as a reload register. We can if it is of the right
1709 class and does not appear in the value being output-reloaded. */
1710
1711 static void
1712 combine_reloads (void)
1713 {
1714 int i, regno;
1715 int output_reload = -1;
1716 int secondary_out = -1;
1717 rtx note;
1718
1719 /* Find the output reload; return unless there is exactly one
1720 and that one is mandatory. */
1721
1722 for (i = 0; i < n_reloads; i++)
1723 if (rld[i].out != 0)
1724 {
1725 if (output_reload >= 0)
1726 return;
1727 output_reload = i;
1728 }
1729
1730 if (output_reload < 0 || rld[output_reload].optional)
1731 return;
1732
1733 /* An input-output reload isn't combinable. */
1734
1735 if (rld[output_reload].in != 0)
1736 return;
1737
1738 /* If this reload is for an earlyclobber operand, we can't do anything. */
1739 if (earlyclobber_operand_p (rld[output_reload].out))
1740 return;
1741
1742 /* If there is a reload for part of the address of this operand, we would
1743 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1744 its life to the point where doing this combine would not lower the
1745 number of spill registers needed. */
1746 for (i = 0; i < n_reloads; i++)
1747 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1748 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1749 && rld[i].opnum == rld[output_reload].opnum)
1750 return;
1751
1752 /* Check each input reload; can we combine it? */
1753
1754 for (i = 0; i < n_reloads; i++)
1755 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1756 /* Life span of this reload must not extend past main insn. */
1757 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1758 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1759 && rld[i].when_needed != RELOAD_OTHER
1760 && (CLASS_MAX_NREGS (rld[i].rclass, rld[i].inmode)
1761 == CLASS_MAX_NREGS (rld[output_reload].rclass,
1762 rld[output_reload].outmode))
1763 && rld[i].inc == 0
1764 && rld[i].reg_rtx == 0
1765 #ifdef SECONDARY_MEMORY_NEEDED
1766 /* Don't combine two reloads with different secondary
1767 memory locations. */
1768 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1769 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1770 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1771 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1772 #endif
1773 && (SMALL_REGISTER_CLASSES
1774 ? (rld[i].rclass == rld[output_reload].rclass)
1775 : (reg_class_subset_p (rld[i].rclass,
1776 rld[output_reload].rclass)
1777 || reg_class_subset_p (rld[output_reload].rclass,
1778 rld[i].rclass)))
1779 && (MATCHES (rld[i].in, rld[output_reload].out)
1780 /* Args reversed because the first arg seems to be
1781 the one that we imagine being modified
1782 while the second is the one that might be affected. */
1783 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1784 rld[i].in)
1785 /* However, if the input is a register that appears inside
1786 the output, then we also can't share.
1787 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1788 If the same reload reg is used for both reg 69 and the
1789 result to be stored in memory, then that result
1790 will clobber the address of the memory ref. */
1791 && ! (REG_P (rld[i].in)
1792 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1793 rld[output_reload].out))))
1794 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1795 rld[i].when_needed != RELOAD_FOR_INPUT)
1796 && (reg_class_size[(int) rld[i].rclass]
1797 || SMALL_REGISTER_CLASSES)
1798 /* We will allow making things slightly worse by combining an
1799 input and an output, but no worse than that. */
1800 && (rld[i].when_needed == RELOAD_FOR_INPUT
1801 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1802 {
1803 int j;
1804
1805 /* We have found a reload to combine with! */
1806 rld[i].out = rld[output_reload].out;
1807 rld[i].out_reg = rld[output_reload].out_reg;
1808 rld[i].outmode = rld[output_reload].outmode;
1809 /* Mark the old output reload as inoperative. */
1810 rld[output_reload].out = 0;
1811 /* The combined reload is needed for the entire insn. */
1812 rld[i].when_needed = RELOAD_OTHER;
1813 /* If the output reload had a secondary reload, copy it. */
1814 if (rld[output_reload].secondary_out_reload != -1)
1815 {
1816 rld[i].secondary_out_reload
1817 = rld[output_reload].secondary_out_reload;
1818 rld[i].secondary_out_icode
1819 = rld[output_reload].secondary_out_icode;
1820 }
1821
1822 #ifdef SECONDARY_MEMORY_NEEDED
1823 /* Copy any secondary MEM. */
1824 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1825 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1826 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1827 #endif
1828 /* If required, minimize the register class. */
1829 if (reg_class_subset_p (rld[output_reload].rclass,
1830 rld[i].rclass))
1831 rld[i].rclass = rld[output_reload].rclass;
1832
1833 /* Transfer all replacements from the old reload to the combined. */
1834 for (j = 0; j < n_replacements; j++)
1835 if (replacements[j].what == output_reload)
1836 replacements[j].what = i;
1837
1838 return;
1839 }
1840
1841 /* If this insn has only one operand that is modified or written (assumed
1842 to be the first), it must be the one corresponding to this reload. It
1843 is safe to use anything that dies in this insn for that output provided
1844 that it does not occur in the output (we already know it isn't an
1845 earlyclobber. If this is an asm insn, give up. */
1846
1847 if (INSN_CODE (this_insn) == -1)
1848 return;
1849
1850 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1851 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1852 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1853 return;
1854
1855 /* See if some hard register that dies in this insn and is not used in
1856 the output is the right class. Only works if the register we pick
1857 up can fully hold our output reload. */
1858 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1859 if (REG_NOTE_KIND (note) == REG_DEAD
1860 && REG_P (XEXP (note, 0))
1861 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1862 rld[output_reload].out)
1863 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1864 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1865 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1866 regno)
1867 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1868 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1869 /* Ensure that a secondary or tertiary reload for this output
1870 won't want this register. */
1871 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1872 || (!(TEST_HARD_REG_BIT
1873 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1874 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1875 || !(TEST_HARD_REG_BIT
1876 (reg_class_contents[(int) rld[secondary_out].rclass],
1877 regno)))))
1878 && !fixed_regs[regno]
1879 /* Check that a former pseudo is valid; see find_dummy_reload. */
1880 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1881 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1882 ORIGINAL_REGNO (XEXP (note, 0)))
1883 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1884 {
1885 rld[output_reload].reg_rtx
1886 = gen_rtx_REG (rld[output_reload].outmode, regno);
1887 return;
1888 }
1889 }
1890 \f
1891 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1892 See if one of IN and OUT is a register that may be used;
1893 this is desirable since a spill-register won't be needed.
1894 If so, return the register rtx that proves acceptable.
1895
1896 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1897 RCLASS is the register class required for the reload.
1898
1899 If FOR_REAL is >= 0, it is the number of the reload,
1900 and in some cases when it can be discovered that OUT doesn't need
1901 to be computed, clear out rld[FOR_REAL].out.
1902
1903 If FOR_REAL is -1, this should not be done, because this call
1904 is just to see if a register can be found, not to find and install it.
1905
1906 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1907 puts an additional constraint on being able to use IN for OUT since
1908 IN must not appear elsewhere in the insn (it is assumed that IN itself
1909 is safe from the earlyclobber). */
1910
1911 static rtx
1912 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1913 enum machine_mode inmode, enum machine_mode outmode,
1914 enum reg_class rclass, int for_real, int earlyclobber)
1915 {
1916 rtx in = real_in;
1917 rtx out = real_out;
1918 int in_offset = 0;
1919 int out_offset = 0;
1920 rtx value = 0;
1921
1922 /* If operands exceed a word, we can't use either of them
1923 unless they have the same size. */
1924 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1925 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1926 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1927 return 0;
1928
1929 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1930 respectively refers to a hard register. */
1931
1932 /* Find the inside of any subregs. */
1933 while (GET_CODE (out) == SUBREG)
1934 {
1935 if (REG_P (SUBREG_REG (out))
1936 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1937 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1938 GET_MODE (SUBREG_REG (out)),
1939 SUBREG_BYTE (out),
1940 GET_MODE (out));
1941 out = SUBREG_REG (out);
1942 }
1943 while (GET_CODE (in) == SUBREG)
1944 {
1945 if (REG_P (SUBREG_REG (in))
1946 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1947 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1948 GET_MODE (SUBREG_REG (in)),
1949 SUBREG_BYTE (in),
1950 GET_MODE (in));
1951 in = SUBREG_REG (in);
1952 }
1953
1954 /* Narrow down the reg class, the same way push_reload will;
1955 otherwise we might find a dummy now, but push_reload won't. */
1956 {
1957 enum reg_class preferred_class = PREFERRED_RELOAD_CLASS (in, rclass);
1958 if (preferred_class != NO_REGS)
1959 rclass = preferred_class;
1960 }
1961
1962 /* See if OUT will do. */
1963 if (REG_P (out)
1964 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1965 {
1966 unsigned int regno = REGNO (out) + out_offset;
1967 unsigned int nwords = hard_regno_nregs[regno][outmode];
1968 rtx saved_rtx;
1969
1970 /* When we consider whether the insn uses OUT,
1971 ignore references within IN. They don't prevent us
1972 from copying IN into OUT, because those refs would
1973 move into the insn that reloads IN.
1974
1975 However, we only ignore IN in its role as this reload.
1976 If the insn uses IN elsewhere and it contains OUT,
1977 that counts. We can't be sure it's the "same" operand
1978 so it might not go through this reload. */
1979 saved_rtx = *inloc;
1980 *inloc = const0_rtx;
1981
1982 if (regno < FIRST_PSEUDO_REGISTER
1983 && HARD_REGNO_MODE_OK (regno, outmode)
1984 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1985 PATTERN (this_insn), outloc))
1986 {
1987 unsigned int i;
1988
1989 for (i = 0; i < nwords; i++)
1990 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1991 regno + i))
1992 break;
1993
1994 if (i == nwords)
1995 {
1996 if (REG_P (real_out))
1997 value = real_out;
1998 else
1999 value = gen_rtx_REG (outmode, regno);
2000 }
2001 }
2002
2003 *inloc = saved_rtx;
2004 }
2005
2006 /* Consider using IN if OUT was not acceptable
2007 or if OUT dies in this insn (like the quotient in a divmod insn).
2008 We can't use IN unless it is dies in this insn,
2009 which means we must know accurately which hard regs are live.
2010 Also, the result can't go in IN if IN is used within OUT,
2011 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2012 if (hard_regs_live_known
2013 && REG_P (in)
2014 && REGNO (in) < FIRST_PSEUDO_REGISTER
2015 && (value == 0
2016 || find_reg_note (this_insn, REG_UNUSED, real_out))
2017 && find_reg_note (this_insn, REG_DEAD, real_in)
2018 && !fixed_regs[REGNO (in)]
2019 && HARD_REGNO_MODE_OK (REGNO (in),
2020 /* The only case where out and real_out might
2021 have different modes is where real_out
2022 is a subreg, and in that case, out
2023 has a real mode. */
2024 (GET_MODE (out) != VOIDmode
2025 ? GET_MODE (out) : outmode))
2026 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2027 /* However only do this if we can be sure that this input
2028 operand doesn't correspond with an uninitialized pseudo.
2029 global can assign some hardreg to it that is the same as
2030 the one assigned to a different, also live pseudo (as it
2031 can ignore the conflict). We must never introduce writes
2032 to such hardregs, as they would clobber the other live
2033 pseudo. See PR 20973. */
2034 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2035 ORIGINAL_REGNO (in))
2036 /* Similarly, only do this if we can be sure that the death
2037 note is still valid. global can assign some hardreg to
2038 the pseudo referenced in the note and simultaneously a
2039 subword of this hardreg to a different, also live pseudo,
2040 because only another subword of the hardreg is actually
2041 used in the insn. This cannot happen if the pseudo has
2042 been assigned exactly one hardreg. See PR 33732. */
2043 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2044 {
2045 unsigned int regno = REGNO (in) + in_offset;
2046 unsigned int nwords = hard_regno_nregs[regno][inmode];
2047
2048 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2049 && ! hard_reg_set_here_p (regno, regno + nwords,
2050 PATTERN (this_insn))
2051 && (! earlyclobber
2052 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2053 PATTERN (this_insn), inloc)))
2054 {
2055 unsigned int i;
2056
2057 for (i = 0; i < nwords; i++)
2058 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2059 regno + i))
2060 break;
2061
2062 if (i == nwords)
2063 {
2064 /* If we were going to use OUT as the reload reg
2065 and changed our mind, it means OUT is a dummy that
2066 dies here. So don't bother copying value to it. */
2067 if (for_real >= 0 && value == real_out)
2068 rld[for_real].out = 0;
2069 if (REG_P (real_in))
2070 value = real_in;
2071 else
2072 value = gen_rtx_REG (inmode, regno);
2073 }
2074 }
2075 }
2076
2077 return value;
2078 }
2079 \f
2080 /* This page contains subroutines used mainly for determining
2081 whether the IN or an OUT of a reload can serve as the
2082 reload register. */
2083
2084 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2085
2086 int
2087 earlyclobber_operand_p (rtx x)
2088 {
2089 int i;
2090
2091 for (i = 0; i < n_earlyclobbers; i++)
2092 if (reload_earlyclobbers[i] == x)
2093 return 1;
2094
2095 return 0;
2096 }
2097
2098 /* Return 1 if expression X alters a hard reg in the range
2099 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2100 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2101 X should be the body of an instruction. */
2102
2103 static int
2104 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2105 {
2106 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2107 {
2108 rtx op0 = SET_DEST (x);
2109
2110 while (GET_CODE (op0) == SUBREG)
2111 op0 = SUBREG_REG (op0);
2112 if (REG_P (op0))
2113 {
2114 unsigned int r = REGNO (op0);
2115
2116 /* See if this reg overlaps range under consideration. */
2117 if (r < end_regno
2118 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2119 return 1;
2120 }
2121 }
2122 else if (GET_CODE (x) == PARALLEL)
2123 {
2124 int i = XVECLEN (x, 0) - 1;
2125
2126 for (; i >= 0; i--)
2127 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2128 return 1;
2129 }
2130
2131 return 0;
2132 }
2133
2134 /* Return 1 if ADDR is a valid memory address for mode MODE
2135 in address space AS, and check that each pseudo reg has the
2136 proper kind of hard reg. */
2137
2138 int
2139 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2140 rtx addr, addr_space_t as)
2141 {
2142 #ifdef GO_IF_LEGITIMATE_ADDRESS
2143 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2144 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2145 return 0;
2146
2147 win:
2148 return 1;
2149 #else
2150 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2151 #endif
2152 }
2153 \f
2154 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2155 if they are the same hard reg, and has special hacks for
2156 autoincrement and autodecrement.
2157 This is specifically intended for find_reloads to use
2158 in determining whether two operands match.
2159 X is the operand whose number is the lower of the two.
2160
2161 The value is 2 if Y contains a pre-increment that matches
2162 a non-incrementing address in X. */
2163
2164 /* ??? To be completely correct, we should arrange to pass
2165 for X the output operand and for Y the input operand.
2166 For now, we assume that the output operand has the lower number
2167 because that is natural in (SET output (... input ...)). */
2168
2169 int
2170 operands_match_p (rtx x, rtx y)
2171 {
2172 int i;
2173 RTX_CODE code = GET_CODE (x);
2174 const char *fmt;
2175 int success_2;
2176
2177 if (x == y)
2178 return 1;
2179 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2180 && (REG_P (y) || (GET_CODE (y) == SUBREG
2181 && REG_P (SUBREG_REG (y)))))
2182 {
2183 int j;
2184
2185 if (code == SUBREG)
2186 {
2187 i = REGNO (SUBREG_REG (x));
2188 if (i >= FIRST_PSEUDO_REGISTER)
2189 goto slow;
2190 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2191 GET_MODE (SUBREG_REG (x)),
2192 SUBREG_BYTE (x),
2193 GET_MODE (x));
2194 }
2195 else
2196 i = REGNO (x);
2197
2198 if (GET_CODE (y) == SUBREG)
2199 {
2200 j = REGNO (SUBREG_REG (y));
2201 if (j >= FIRST_PSEUDO_REGISTER)
2202 goto slow;
2203 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2204 GET_MODE (SUBREG_REG (y)),
2205 SUBREG_BYTE (y),
2206 GET_MODE (y));
2207 }
2208 else
2209 j = REGNO (y);
2210
2211 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2212 multiple hard register group of scalar integer registers, so that
2213 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2214 register. */
2215 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2216 && SCALAR_INT_MODE_P (GET_MODE (x))
2217 && i < FIRST_PSEUDO_REGISTER)
2218 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2219 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2220 && SCALAR_INT_MODE_P (GET_MODE (y))
2221 && j < FIRST_PSEUDO_REGISTER)
2222 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2223
2224 return i == j;
2225 }
2226 /* If two operands must match, because they are really a single
2227 operand of an assembler insn, then two postincrements are invalid
2228 because the assembler insn would increment only once.
2229 On the other hand, a postincrement matches ordinary indexing
2230 if the postincrement is the output operand. */
2231 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2232 return operands_match_p (XEXP (x, 0), y);
2233 /* Two preincrements are invalid
2234 because the assembler insn would increment only once.
2235 On the other hand, a preincrement matches ordinary indexing
2236 if the preincrement is the input operand.
2237 In this case, return 2, since some callers need to do special
2238 things when this happens. */
2239 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2240 || GET_CODE (y) == PRE_MODIFY)
2241 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2242
2243 slow:
2244
2245 /* Now we have disposed of all the cases in which different rtx codes
2246 can match. */
2247 if (code != GET_CODE (y))
2248 return 0;
2249
2250 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2251 if (GET_MODE (x) != GET_MODE (y))
2252 return 0;
2253
2254 /* MEMs refering to different address space are not equivalent. */
2255 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2256 return 0;
2257
2258 switch (code)
2259 {
2260 case CONST_INT:
2261 case CONST_DOUBLE:
2262 case CONST_FIXED:
2263 return 0;
2264
2265 case LABEL_REF:
2266 return XEXP (x, 0) == XEXP (y, 0);
2267 case SYMBOL_REF:
2268 return XSTR (x, 0) == XSTR (y, 0);
2269
2270 default:
2271 break;
2272 }
2273
2274 /* Compare the elements. If any pair of corresponding elements
2275 fail to match, return 0 for the whole things. */
2276
2277 success_2 = 0;
2278 fmt = GET_RTX_FORMAT (code);
2279 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2280 {
2281 int val, j;
2282 switch (fmt[i])
2283 {
2284 case 'w':
2285 if (XWINT (x, i) != XWINT (y, i))
2286 return 0;
2287 break;
2288
2289 case 'i':
2290 if (XINT (x, i) != XINT (y, i))
2291 return 0;
2292 break;
2293
2294 case 'e':
2295 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2296 if (val == 0)
2297 return 0;
2298 /* If any subexpression returns 2,
2299 we should return 2 if we are successful. */
2300 if (val == 2)
2301 success_2 = 1;
2302 break;
2303
2304 case '0':
2305 break;
2306
2307 case 'E':
2308 if (XVECLEN (x, i) != XVECLEN (y, i))
2309 return 0;
2310 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2311 {
2312 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2313 if (val == 0)
2314 return 0;
2315 if (val == 2)
2316 success_2 = 1;
2317 }
2318 break;
2319
2320 /* It is believed that rtx's at this level will never
2321 contain anything but integers and other rtx's,
2322 except for within LABEL_REFs and SYMBOL_REFs. */
2323 default:
2324 gcc_unreachable ();
2325 }
2326 }
2327 return 1 + success_2;
2328 }
2329 \f
2330 /* Describe the range of registers or memory referenced by X.
2331 If X is a register, set REG_FLAG and put the first register
2332 number into START and the last plus one into END.
2333 If X is a memory reference, put a base address into BASE
2334 and a range of integer offsets into START and END.
2335 If X is pushing on the stack, we can assume it causes no trouble,
2336 so we set the SAFE field. */
2337
2338 static struct decomposition
2339 decompose (rtx x)
2340 {
2341 struct decomposition val;
2342 int all_const = 0;
2343
2344 memset (&val, 0, sizeof (val));
2345
2346 switch (GET_CODE (x))
2347 {
2348 case MEM:
2349 {
2350 rtx base = NULL_RTX, offset = 0;
2351 rtx addr = XEXP (x, 0);
2352
2353 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2354 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2355 {
2356 val.base = XEXP (addr, 0);
2357 val.start = -GET_MODE_SIZE (GET_MODE (x));
2358 val.end = GET_MODE_SIZE (GET_MODE (x));
2359 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2360 return val;
2361 }
2362
2363 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2364 {
2365 if (GET_CODE (XEXP (addr, 1)) == PLUS
2366 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2367 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2368 {
2369 val.base = XEXP (addr, 0);
2370 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2371 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2372 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2373 return val;
2374 }
2375 }
2376
2377 if (GET_CODE (addr) == CONST)
2378 {
2379 addr = XEXP (addr, 0);
2380 all_const = 1;
2381 }
2382 if (GET_CODE (addr) == PLUS)
2383 {
2384 if (CONSTANT_P (XEXP (addr, 0)))
2385 {
2386 base = XEXP (addr, 1);
2387 offset = XEXP (addr, 0);
2388 }
2389 else if (CONSTANT_P (XEXP (addr, 1)))
2390 {
2391 base = XEXP (addr, 0);
2392 offset = XEXP (addr, 1);
2393 }
2394 }
2395
2396 if (offset == 0)
2397 {
2398 base = addr;
2399 offset = const0_rtx;
2400 }
2401 if (GET_CODE (offset) == CONST)
2402 offset = XEXP (offset, 0);
2403 if (GET_CODE (offset) == PLUS)
2404 {
2405 if (CONST_INT_P (XEXP (offset, 0)))
2406 {
2407 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2408 offset = XEXP (offset, 0);
2409 }
2410 else if (CONST_INT_P (XEXP (offset, 1)))
2411 {
2412 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2413 offset = XEXP (offset, 1);
2414 }
2415 else
2416 {
2417 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2418 offset = const0_rtx;
2419 }
2420 }
2421 else if (!CONST_INT_P (offset))
2422 {
2423 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2424 offset = const0_rtx;
2425 }
2426
2427 if (all_const && GET_CODE (base) == PLUS)
2428 base = gen_rtx_CONST (GET_MODE (base), base);
2429
2430 gcc_assert (CONST_INT_P (offset));
2431
2432 val.start = INTVAL (offset);
2433 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2434 val.base = base;
2435 }
2436 break;
2437
2438 case REG:
2439 val.reg_flag = 1;
2440 val.start = true_regnum (x);
2441 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2442 {
2443 /* A pseudo with no hard reg. */
2444 val.start = REGNO (x);
2445 val.end = val.start + 1;
2446 }
2447 else
2448 /* A hard reg. */
2449 val.end = end_hard_regno (GET_MODE (x), val.start);
2450 break;
2451
2452 case SUBREG:
2453 if (!REG_P (SUBREG_REG (x)))
2454 /* This could be more precise, but it's good enough. */
2455 return decompose (SUBREG_REG (x));
2456 val.reg_flag = 1;
2457 val.start = true_regnum (x);
2458 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2459 return decompose (SUBREG_REG (x));
2460 else
2461 /* A hard reg. */
2462 val.end = val.start + subreg_nregs (x);
2463 break;
2464
2465 case SCRATCH:
2466 /* This hasn't been assigned yet, so it can't conflict yet. */
2467 val.safe = 1;
2468 break;
2469
2470 default:
2471 gcc_assert (CONSTANT_P (x));
2472 val.safe = 1;
2473 break;
2474 }
2475 return val;
2476 }
2477
2478 /* Return 1 if altering Y will not modify the value of X.
2479 Y is also described by YDATA, which should be decompose (Y). */
2480
2481 static int
2482 immune_p (rtx x, rtx y, struct decomposition ydata)
2483 {
2484 struct decomposition xdata;
2485
2486 if (ydata.reg_flag)
2487 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2488 if (ydata.safe)
2489 return 1;
2490
2491 gcc_assert (MEM_P (y));
2492 /* If Y is memory and X is not, Y can't affect X. */
2493 if (!MEM_P (x))
2494 return 1;
2495
2496 xdata = decompose (x);
2497
2498 if (! rtx_equal_p (xdata.base, ydata.base))
2499 {
2500 /* If bases are distinct symbolic constants, there is no overlap. */
2501 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2502 return 1;
2503 /* Constants and stack slots never overlap. */
2504 if (CONSTANT_P (xdata.base)
2505 && (ydata.base == frame_pointer_rtx
2506 || ydata.base == hard_frame_pointer_rtx
2507 || ydata.base == stack_pointer_rtx))
2508 return 1;
2509 if (CONSTANT_P (ydata.base)
2510 && (xdata.base == frame_pointer_rtx
2511 || xdata.base == hard_frame_pointer_rtx
2512 || xdata.base == stack_pointer_rtx))
2513 return 1;
2514 /* If either base is variable, we don't know anything. */
2515 return 0;
2516 }
2517
2518 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2519 }
2520
2521 /* Similar, but calls decompose. */
2522
2523 int
2524 safe_from_earlyclobber (rtx op, rtx clobber)
2525 {
2526 struct decomposition early_data;
2527
2528 early_data = decompose (clobber);
2529 return immune_p (op, clobber, early_data);
2530 }
2531 \f
2532 /* Main entry point of this file: search the body of INSN
2533 for values that need reloading and record them with push_reload.
2534 REPLACE nonzero means record also where the values occur
2535 so that subst_reloads can be used.
2536
2537 IND_LEVELS says how many levels of indirection are supported by this
2538 machine; a value of zero means that a memory reference is not a valid
2539 memory address.
2540
2541 LIVE_KNOWN says we have valid information about which hard
2542 regs are live at each point in the program; this is true when
2543 we are called from global_alloc but false when stupid register
2544 allocation has been done.
2545
2546 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2547 which is nonnegative if the reg has been commandeered for reloading into.
2548 It is copied into STATIC_RELOAD_REG_P and referenced from there
2549 by various subroutines.
2550
2551 Return TRUE if some operands need to be changed, because of swapping
2552 commutative operands, reg_equiv_address substitution, or whatever. */
2553
2554 int
2555 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2556 short *reload_reg_p)
2557 {
2558 int insn_code_number;
2559 int i, j;
2560 int noperands;
2561 /* These start out as the constraints for the insn
2562 and they are chewed up as we consider alternatives. */
2563 const char *constraints[MAX_RECOG_OPERANDS];
2564 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2565 a register. */
2566 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2567 char pref_or_nothing[MAX_RECOG_OPERANDS];
2568 /* Nonzero for a MEM operand whose entire address needs a reload.
2569 May be -1 to indicate the entire address may or may not need a reload. */
2570 int address_reloaded[MAX_RECOG_OPERANDS];
2571 /* Nonzero for an address operand that needs to be completely reloaded.
2572 May be -1 to indicate the entire operand may or may not need a reload. */
2573 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2574 /* Value of enum reload_type to use for operand. */
2575 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2576 /* Value of enum reload_type to use within address of operand. */
2577 enum reload_type address_type[MAX_RECOG_OPERANDS];
2578 /* Save the usage of each operand. */
2579 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2580 int no_input_reloads = 0, no_output_reloads = 0;
2581 int n_alternatives;
2582 enum reg_class this_alternative[MAX_RECOG_OPERANDS];
2583 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2584 char this_alternative_win[MAX_RECOG_OPERANDS];
2585 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2586 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2587 int this_alternative_matches[MAX_RECOG_OPERANDS];
2588 int swapped;
2589 int goal_alternative[MAX_RECOG_OPERANDS];
2590 int this_alternative_number;
2591 int goal_alternative_number = 0;
2592 int operand_reloadnum[MAX_RECOG_OPERANDS];
2593 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2594 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2595 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2596 char goal_alternative_win[MAX_RECOG_OPERANDS];
2597 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2598 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2599 int goal_alternative_swapped;
2600 int best;
2601 int best_small_class_operands_num;
2602 int commutative;
2603 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2604 rtx substed_operand[MAX_RECOG_OPERANDS];
2605 rtx body = PATTERN (insn);
2606 rtx set = single_set (insn);
2607 int goal_earlyclobber = 0, this_earlyclobber;
2608 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2609 int retval = 0;
2610
2611 this_insn = insn;
2612 n_reloads = 0;
2613 n_replacements = 0;
2614 n_earlyclobbers = 0;
2615 replace_reloads = replace;
2616 hard_regs_live_known = live_known;
2617 static_reload_reg_p = reload_reg_p;
2618
2619 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2620 neither are insns that SET cc0. Insns that use CC0 are not allowed
2621 to have any input reloads. */
2622 if (JUMP_P (insn) || CALL_P (insn))
2623 no_output_reloads = 1;
2624
2625 #ifdef HAVE_cc0
2626 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2627 no_input_reloads = 1;
2628 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2629 no_output_reloads = 1;
2630 #endif
2631
2632 #ifdef SECONDARY_MEMORY_NEEDED
2633 /* The eliminated forms of any secondary memory locations are per-insn, so
2634 clear them out here. */
2635
2636 if (secondary_memlocs_elim_used)
2637 {
2638 memset (secondary_memlocs_elim, 0,
2639 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2640 secondary_memlocs_elim_used = 0;
2641 }
2642 #endif
2643
2644 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2645 is cheap to move between them. If it is not, there may not be an insn
2646 to do the copy, so we may need a reload. */
2647 if (GET_CODE (body) == SET
2648 && REG_P (SET_DEST (body))
2649 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2650 && REG_P (SET_SRC (body))
2651 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2652 && REGISTER_MOVE_COST (GET_MODE (SET_SRC (body)),
2653 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2654 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2655 return 0;
2656
2657 extract_insn (insn);
2658
2659 noperands = reload_n_operands = recog_data.n_operands;
2660 n_alternatives = recog_data.n_alternatives;
2661
2662 /* Just return "no reloads" if insn has no operands with constraints. */
2663 if (noperands == 0 || n_alternatives == 0)
2664 return 0;
2665
2666 insn_code_number = INSN_CODE (insn);
2667 this_insn_is_asm = insn_code_number < 0;
2668
2669 memcpy (operand_mode, recog_data.operand_mode,
2670 noperands * sizeof (enum machine_mode));
2671 memcpy (constraints, recog_data.constraints,
2672 noperands * sizeof (const char *));
2673
2674 commutative = -1;
2675
2676 /* If we will need to know, later, whether some pair of operands
2677 are the same, we must compare them now and save the result.
2678 Reloading the base and index registers will clobber them
2679 and afterward they will fail to match. */
2680
2681 for (i = 0; i < noperands; i++)
2682 {
2683 const char *p;
2684 int c;
2685 char *end;
2686
2687 substed_operand[i] = recog_data.operand[i];
2688 p = constraints[i];
2689
2690 modified[i] = RELOAD_READ;
2691
2692 /* Scan this operand's constraint to see if it is an output operand,
2693 an in-out operand, is commutative, or should match another. */
2694
2695 while ((c = *p))
2696 {
2697 p += CONSTRAINT_LEN (c, p);
2698 switch (c)
2699 {
2700 case '=':
2701 modified[i] = RELOAD_WRITE;
2702 break;
2703 case '+':
2704 modified[i] = RELOAD_READ_WRITE;
2705 break;
2706 case '%':
2707 {
2708 /* The last operand should not be marked commutative. */
2709 gcc_assert (i != noperands - 1);
2710
2711 /* We currently only support one commutative pair of
2712 operands. Some existing asm code currently uses more
2713 than one pair. Previously, that would usually work,
2714 but sometimes it would crash the compiler. We
2715 continue supporting that case as well as we can by
2716 silently ignoring all but the first pair. In the
2717 future we may handle it correctly. */
2718 if (commutative < 0)
2719 commutative = i;
2720 else
2721 gcc_assert (this_insn_is_asm);
2722 }
2723 break;
2724 /* Use of ISDIGIT is tempting here, but it may get expensive because
2725 of locale support we don't want. */
2726 case '0': case '1': case '2': case '3': case '4':
2727 case '5': case '6': case '7': case '8': case '9':
2728 {
2729 c = strtoul (p - 1, &end, 10);
2730 p = end;
2731
2732 operands_match[c][i]
2733 = operands_match_p (recog_data.operand[c],
2734 recog_data.operand[i]);
2735
2736 /* An operand may not match itself. */
2737 gcc_assert (c != i);
2738
2739 /* If C can be commuted with C+1, and C might need to match I,
2740 then C+1 might also need to match I. */
2741 if (commutative >= 0)
2742 {
2743 if (c == commutative || c == commutative + 1)
2744 {
2745 int other = c + (c == commutative ? 1 : -1);
2746 operands_match[other][i]
2747 = operands_match_p (recog_data.operand[other],
2748 recog_data.operand[i]);
2749 }
2750 if (i == commutative || i == commutative + 1)
2751 {
2752 int other = i + (i == commutative ? 1 : -1);
2753 operands_match[c][other]
2754 = operands_match_p (recog_data.operand[c],
2755 recog_data.operand[other]);
2756 }
2757 /* Note that C is supposed to be less than I.
2758 No need to consider altering both C and I because in
2759 that case we would alter one into the other. */
2760 }
2761 }
2762 }
2763 }
2764 }
2765
2766 /* Examine each operand that is a memory reference or memory address
2767 and reload parts of the addresses into index registers.
2768 Also here any references to pseudo regs that didn't get hard regs
2769 but are equivalent to constants get replaced in the insn itself
2770 with those constants. Nobody will ever see them again.
2771
2772 Finally, set up the preferred classes of each operand. */
2773
2774 for (i = 0; i < noperands; i++)
2775 {
2776 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2777
2778 address_reloaded[i] = 0;
2779 address_operand_reloaded[i] = 0;
2780 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2781 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2782 : RELOAD_OTHER);
2783 address_type[i]
2784 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2785 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2786 : RELOAD_OTHER);
2787
2788 if (*constraints[i] == 0)
2789 /* Ignore things like match_operator operands. */
2790 ;
2791 else if (constraints[i][0] == 'p'
2792 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2793 {
2794 address_operand_reloaded[i]
2795 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2796 recog_data.operand[i],
2797 recog_data.operand_loc[i],
2798 i, operand_type[i], ind_levels, insn);
2799
2800 /* If we now have a simple operand where we used to have a
2801 PLUS or MULT, re-recognize and try again. */
2802 if ((OBJECT_P (*recog_data.operand_loc[i])
2803 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2804 && (GET_CODE (recog_data.operand[i]) == MULT
2805 || GET_CODE (recog_data.operand[i]) == PLUS))
2806 {
2807 INSN_CODE (insn) = -1;
2808 retval = find_reloads (insn, replace, ind_levels, live_known,
2809 reload_reg_p);
2810 return retval;
2811 }
2812
2813 recog_data.operand[i] = *recog_data.operand_loc[i];
2814 substed_operand[i] = recog_data.operand[i];
2815
2816 /* Address operands are reloaded in their existing mode,
2817 no matter what is specified in the machine description. */
2818 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2819 }
2820 else if (code == MEM)
2821 {
2822 address_reloaded[i]
2823 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2824 recog_data.operand_loc[i],
2825 XEXP (recog_data.operand[i], 0),
2826 &XEXP (recog_data.operand[i], 0),
2827 i, address_type[i], ind_levels, insn);
2828 recog_data.operand[i] = *recog_data.operand_loc[i];
2829 substed_operand[i] = recog_data.operand[i];
2830 }
2831 else if (code == SUBREG)
2832 {
2833 rtx reg = SUBREG_REG (recog_data.operand[i]);
2834 rtx op
2835 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2836 ind_levels,
2837 set != 0
2838 && &SET_DEST (set) == recog_data.operand_loc[i],
2839 insn,
2840 &address_reloaded[i]);
2841
2842 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2843 that didn't get a hard register, emit a USE with a REG_EQUAL
2844 note in front so that we might inherit a previous, possibly
2845 wider reload. */
2846
2847 if (replace
2848 && MEM_P (op)
2849 && REG_P (reg)
2850 && (GET_MODE_SIZE (GET_MODE (reg))
2851 >= GET_MODE_SIZE (GET_MODE (op)))
2852 && reg_equiv_constant[REGNO (reg)] == 0)
2853 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2854 insn),
2855 REG_EQUAL, reg_equiv_memory_loc[REGNO (reg)]);
2856
2857 substed_operand[i] = recog_data.operand[i] = op;
2858 }
2859 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2860 /* We can get a PLUS as an "operand" as a result of register
2861 elimination. See eliminate_regs and gen_reload. We handle
2862 a unary operator by reloading the operand. */
2863 substed_operand[i] = recog_data.operand[i]
2864 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2865 ind_levels, 0, insn,
2866 &address_reloaded[i]);
2867 else if (code == REG)
2868 {
2869 /* This is equivalent to calling find_reloads_toplev.
2870 The code is duplicated for speed.
2871 When we find a pseudo always equivalent to a constant,
2872 we replace it by the constant. We must be sure, however,
2873 that we don't try to replace it in the insn in which it
2874 is being set. */
2875 int regno = REGNO (recog_data.operand[i]);
2876 if (reg_equiv_constant[regno] != 0
2877 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2878 {
2879 /* Record the existing mode so that the check if constants are
2880 allowed will work when operand_mode isn't specified. */
2881
2882 if (operand_mode[i] == VOIDmode)
2883 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2884
2885 substed_operand[i] = recog_data.operand[i]
2886 = reg_equiv_constant[regno];
2887 }
2888 if (reg_equiv_memory_loc[regno] != 0
2889 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
2890 /* We need not give a valid is_set_dest argument since the case
2891 of a constant equivalence was checked above. */
2892 substed_operand[i] = recog_data.operand[i]
2893 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2894 ind_levels, 0, insn,
2895 &address_reloaded[i]);
2896 }
2897 /* If the operand is still a register (we didn't replace it with an
2898 equivalent), get the preferred class to reload it into. */
2899 code = GET_CODE (recog_data.operand[i]);
2900 preferred_class[i]
2901 = ((code == REG && REGNO (recog_data.operand[i])
2902 >= FIRST_PSEUDO_REGISTER)
2903 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2904 : NO_REGS);
2905 pref_or_nothing[i]
2906 = (code == REG
2907 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2908 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2909 }
2910
2911 /* If this is simply a copy from operand 1 to operand 0, merge the
2912 preferred classes for the operands. */
2913 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2914 && recog_data.operand[1] == SET_SRC (set))
2915 {
2916 preferred_class[0] = preferred_class[1]
2917 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2918 pref_or_nothing[0] |= pref_or_nothing[1];
2919 pref_or_nothing[1] |= pref_or_nothing[0];
2920 }
2921
2922 /* Now see what we need for pseudo-regs that didn't get hard regs
2923 or got the wrong kind of hard reg. For this, we must consider
2924 all the operands together against the register constraints. */
2925
2926 best = MAX_RECOG_OPERANDS * 2 + 600;
2927 best_small_class_operands_num = 0;
2928
2929 swapped = 0;
2930 goal_alternative_swapped = 0;
2931 try_swapped:
2932
2933 /* The constraints are made of several alternatives.
2934 Each operand's constraint looks like foo,bar,... with commas
2935 separating the alternatives. The first alternatives for all
2936 operands go together, the second alternatives go together, etc.
2937
2938 First loop over alternatives. */
2939
2940 for (this_alternative_number = 0;
2941 this_alternative_number < n_alternatives;
2942 this_alternative_number++)
2943 {
2944 /* Loop over operands for one constraint alternative. */
2945 /* LOSERS counts those that don't fit this alternative
2946 and would require loading. */
2947 int losers = 0;
2948 /* BAD is set to 1 if it some operand can't fit this alternative
2949 even after reloading. */
2950 int bad = 0;
2951 /* REJECT is a count of how undesirable this alternative says it is
2952 if any reloading is required. If the alternative matches exactly
2953 then REJECT is ignored, but otherwise it gets this much
2954 counted against it in addition to the reloading needed. Each
2955 ? counts three times here since we want the disparaging caused by
2956 a bad register class to only count 1/3 as much. */
2957 int reject = 0;
2958
2959 if (!recog_data.alternative_enabled_p[this_alternative_number])
2960 {
2961 int i;
2962
2963 for (i = 0; i < recog_data.n_operands; i++)
2964 constraints[i] = skip_alternative (constraints[i]);
2965
2966 continue;
2967 }
2968
2969 this_earlyclobber = 0;
2970
2971 for (i = 0; i < noperands; i++)
2972 {
2973 const char *p = constraints[i];
2974 char *end;
2975 int len;
2976 int win = 0;
2977 int did_match = 0;
2978 /* 0 => this operand can be reloaded somehow for this alternative. */
2979 int badop = 1;
2980 /* 0 => this operand can be reloaded if the alternative allows regs. */
2981 int winreg = 0;
2982 int c;
2983 int m;
2984 rtx operand = recog_data.operand[i];
2985 int offset = 0;
2986 /* Nonzero means this is a MEM that must be reloaded into a reg
2987 regardless of what the constraint says. */
2988 int force_reload = 0;
2989 int offmemok = 0;
2990 /* Nonzero if a constant forced into memory would be OK for this
2991 operand. */
2992 int constmemok = 0;
2993 int earlyclobber = 0;
2994
2995 /* If the predicate accepts a unary operator, it means that
2996 we need to reload the operand, but do not do this for
2997 match_operator and friends. */
2998 if (UNARY_P (operand) && *p != 0)
2999 operand = XEXP (operand, 0);
3000
3001 /* If the operand is a SUBREG, extract
3002 the REG or MEM (or maybe even a constant) within.
3003 (Constants can occur as a result of reg_equiv_constant.) */
3004
3005 while (GET_CODE (operand) == SUBREG)
3006 {
3007 /* Offset only matters when operand is a REG and
3008 it is a hard reg. This is because it is passed
3009 to reg_fits_class_p if it is a REG and all pseudos
3010 return 0 from that function. */
3011 if (REG_P (SUBREG_REG (operand))
3012 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3013 {
3014 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3015 GET_MODE (SUBREG_REG (operand)),
3016 SUBREG_BYTE (operand),
3017 GET_MODE (operand)) < 0)
3018 force_reload = 1;
3019 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3020 GET_MODE (SUBREG_REG (operand)),
3021 SUBREG_BYTE (operand),
3022 GET_MODE (operand));
3023 }
3024 operand = SUBREG_REG (operand);
3025 /* Force reload if this is a constant or PLUS or if there may
3026 be a problem accessing OPERAND in the outer mode. */
3027 if (CONSTANT_P (operand)
3028 || GET_CODE (operand) == PLUS
3029 /* We must force a reload of paradoxical SUBREGs
3030 of a MEM because the alignment of the inner value
3031 may not be enough to do the outer reference. On
3032 big-endian machines, it may also reference outside
3033 the object.
3034
3035 On machines that extend byte operations and we have a
3036 SUBREG where both the inner and outer modes are no wider
3037 than a word and the inner mode is narrower, is integral,
3038 and gets extended when loaded from memory, combine.c has
3039 made assumptions about the behavior of the machine in such
3040 register access. If the data is, in fact, in memory we
3041 must always load using the size assumed to be in the
3042 register and let the insn do the different-sized
3043 accesses.
3044
3045 This is doubly true if WORD_REGISTER_OPERATIONS. In
3046 this case eliminate_regs has left non-paradoxical
3047 subregs for push_reload to see. Make sure it does
3048 by forcing the reload.
3049
3050 ??? When is it right at this stage to have a subreg
3051 of a mem that is _not_ to be handled specially? IMO
3052 those should have been reduced to just a mem. */
3053 || ((MEM_P (operand)
3054 || (REG_P (operand)
3055 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3056 #ifndef WORD_REGISTER_OPERATIONS
3057 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3058 < BIGGEST_ALIGNMENT)
3059 && (GET_MODE_SIZE (operand_mode[i])
3060 > GET_MODE_SIZE (GET_MODE (operand))))
3061 || BYTES_BIG_ENDIAN
3062 #ifdef LOAD_EXTEND_OP
3063 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3064 && (GET_MODE_SIZE (GET_MODE (operand))
3065 <= UNITS_PER_WORD)
3066 && (GET_MODE_SIZE (operand_mode[i])
3067 > GET_MODE_SIZE (GET_MODE (operand)))
3068 && INTEGRAL_MODE_P (GET_MODE (operand))
3069 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3070 #endif
3071 )
3072 #endif
3073 )
3074 )
3075 force_reload = 1;
3076 }
3077
3078 this_alternative[i] = NO_REGS;
3079 this_alternative_win[i] = 0;
3080 this_alternative_match_win[i] = 0;
3081 this_alternative_offmemok[i] = 0;
3082 this_alternative_earlyclobber[i] = 0;
3083 this_alternative_matches[i] = -1;
3084
3085 /* An empty constraint or empty alternative
3086 allows anything which matched the pattern. */
3087 if (*p == 0 || *p == ',')
3088 win = 1, badop = 0;
3089
3090 /* Scan this alternative's specs for this operand;
3091 set WIN if the operand fits any letter in this alternative.
3092 Otherwise, clear BADOP if this operand could
3093 fit some letter after reloads,
3094 or set WINREG if this operand could fit after reloads
3095 provided the constraint allows some registers. */
3096
3097 do
3098 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3099 {
3100 case '\0':
3101 len = 0;
3102 break;
3103 case ',':
3104 c = '\0';
3105 break;
3106
3107 case '=': case '+': case '*':
3108 break;
3109
3110 case '%':
3111 /* We only support one commutative marker, the first
3112 one. We already set commutative above. */
3113 break;
3114
3115 case '?':
3116 reject += 6;
3117 break;
3118
3119 case '!':
3120 reject = 600;
3121 break;
3122
3123 case '#':
3124 /* Ignore rest of this alternative as far as
3125 reloading is concerned. */
3126 do
3127 p++;
3128 while (*p && *p != ',');
3129 len = 0;
3130 break;
3131
3132 case '0': case '1': case '2': case '3': case '4':
3133 case '5': case '6': case '7': case '8': case '9':
3134 m = strtoul (p, &end, 10);
3135 p = end;
3136 len = 0;
3137
3138 this_alternative_matches[i] = m;
3139 /* We are supposed to match a previous operand.
3140 If we do, we win if that one did.
3141 If we do not, count both of the operands as losers.
3142 (This is too conservative, since most of the time
3143 only a single reload insn will be needed to make
3144 the two operands win. As a result, this alternative
3145 may be rejected when it is actually desirable.) */
3146 if ((swapped && (m != commutative || i != commutative + 1))
3147 /* If we are matching as if two operands were swapped,
3148 also pretend that operands_match had been computed
3149 with swapped.
3150 But if I is the second of those and C is the first,
3151 don't exchange them, because operands_match is valid
3152 only on one side of its diagonal. */
3153 ? (operands_match
3154 [(m == commutative || m == commutative + 1)
3155 ? 2 * commutative + 1 - m : m]
3156 [(i == commutative || i == commutative + 1)
3157 ? 2 * commutative + 1 - i : i])
3158 : operands_match[m][i])
3159 {
3160 /* If we are matching a non-offsettable address where an
3161 offsettable address was expected, then we must reject
3162 this combination, because we can't reload it. */
3163 if (this_alternative_offmemok[m]
3164 && MEM_P (recog_data.operand[m])
3165 && this_alternative[m] == NO_REGS
3166 && ! this_alternative_win[m])
3167 bad = 1;
3168
3169 did_match = this_alternative_win[m];
3170 }
3171 else
3172 {
3173 /* Operands don't match. */
3174 rtx value;
3175 int loc1, loc2;
3176 /* Retroactively mark the operand we had to match
3177 as a loser, if it wasn't already. */
3178 if (this_alternative_win[m])
3179 losers++;
3180 this_alternative_win[m] = 0;
3181 if (this_alternative[m] == NO_REGS)
3182 bad = 1;
3183 /* But count the pair only once in the total badness of
3184 this alternative, if the pair can be a dummy reload.
3185 The pointers in operand_loc are not swapped; swap
3186 them by hand if necessary. */
3187 if (swapped && i == commutative)
3188 loc1 = commutative + 1;
3189 else if (swapped && i == commutative + 1)
3190 loc1 = commutative;
3191 else
3192 loc1 = i;
3193 if (swapped && m == commutative)
3194 loc2 = commutative + 1;
3195 else if (swapped && m == commutative + 1)
3196 loc2 = commutative;
3197 else
3198 loc2 = m;
3199 value
3200 = find_dummy_reload (recog_data.operand[i],
3201 recog_data.operand[m],
3202 recog_data.operand_loc[loc1],
3203 recog_data.operand_loc[loc2],
3204 operand_mode[i], operand_mode[m],
3205 this_alternative[m], -1,
3206 this_alternative_earlyclobber[m]);
3207
3208 if (value != 0)
3209 losers--;
3210 }
3211 /* This can be fixed with reloads if the operand
3212 we are supposed to match can be fixed with reloads. */
3213 badop = 0;
3214 this_alternative[i] = this_alternative[m];
3215
3216 /* If we have to reload this operand and some previous
3217 operand also had to match the same thing as this
3218 operand, we don't know how to do that. So reject this
3219 alternative. */
3220 if (! did_match || force_reload)
3221 for (j = 0; j < i; j++)
3222 if (this_alternative_matches[j]
3223 == this_alternative_matches[i])
3224 badop = 1;
3225 break;
3226
3227 case 'p':
3228 /* All necessary reloads for an address_operand
3229 were handled in find_reloads_address. */
3230 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3231 SCRATCH);
3232 win = 1;
3233 badop = 0;
3234 break;
3235
3236 case TARGET_MEM_CONSTRAINT:
3237 if (force_reload)
3238 break;
3239 if (MEM_P (operand)
3240 || (REG_P (operand)
3241 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3242 && reg_renumber[REGNO (operand)] < 0))
3243 win = 1;
3244 if (CONST_POOL_OK_P (operand))
3245 badop = 0;
3246 constmemok = 1;
3247 break;
3248
3249 case '<':
3250 if (MEM_P (operand)
3251 && ! address_reloaded[i]
3252 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3253 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3254 win = 1;
3255 break;
3256
3257 case '>':
3258 if (MEM_P (operand)
3259 && ! address_reloaded[i]
3260 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3261 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3262 win = 1;
3263 break;
3264
3265 /* Memory operand whose address is not offsettable. */
3266 case 'V':
3267 if (force_reload)
3268 break;
3269 if (MEM_P (operand)
3270 && ! (ind_levels ? offsettable_memref_p (operand)
3271 : offsettable_nonstrict_memref_p (operand))
3272 /* Certain mem addresses will become offsettable
3273 after they themselves are reloaded. This is important;
3274 we don't want our own handling of unoffsettables
3275 to override the handling of reg_equiv_address. */
3276 && !(REG_P (XEXP (operand, 0))
3277 && (ind_levels == 0
3278 || reg_equiv_address[REGNO (XEXP (operand, 0))] != 0)))
3279 win = 1;
3280 break;
3281
3282 /* Memory operand whose address is offsettable. */
3283 case 'o':
3284 if (force_reload)
3285 break;
3286 if ((MEM_P (operand)
3287 /* If IND_LEVELS, find_reloads_address won't reload a
3288 pseudo that didn't get a hard reg, so we have to
3289 reject that case. */
3290 && ((ind_levels ? offsettable_memref_p (operand)
3291 : offsettable_nonstrict_memref_p (operand))
3292 /* A reloaded address is offsettable because it is now
3293 just a simple register indirect. */
3294 || address_reloaded[i] == 1))
3295 || (REG_P (operand)
3296 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3297 && reg_renumber[REGNO (operand)] < 0
3298 /* If reg_equiv_address is nonzero, we will be
3299 loading it into a register; hence it will be
3300 offsettable, but we cannot say that reg_equiv_mem
3301 is offsettable without checking. */
3302 && ((reg_equiv_mem[REGNO (operand)] != 0
3303 && offsettable_memref_p (reg_equiv_mem[REGNO (operand)]))
3304 || (reg_equiv_address[REGNO (operand)] != 0))))
3305 win = 1;
3306 if (CONST_POOL_OK_P (operand)
3307 || MEM_P (operand))
3308 badop = 0;
3309 constmemok = 1;
3310 offmemok = 1;
3311 break;
3312
3313 case '&':
3314 /* Output operand that is stored before the need for the
3315 input operands (and their index registers) is over. */
3316 earlyclobber = 1, this_earlyclobber = 1;
3317 break;
3318
3319 case 'E':
3320 case 'F':
3321 if (GET_CODE (operand) == CONST_DOUBLE
3322 || (GET_CODE (operand) == CONST_VECTOR
3323 && (GET_MODE_CLASS (GET_MODE (operand))
3324 == MODE_VECTOR_FLOAT)))
3325 win = 1;
3326 break;
3327
3328 case 'G':
3329 case 'H':
3330 if (GET_CODE (operand) == CONST_DOUBLE
3331 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3332 win = 1;
3333 break;
3334
3335 case 's':
3336 if (CONST_INT_P (operand)
3337 || (GET_CODE (operand) == CONST_DOUBLE
3338 && GET_MODE (operand) == VOIDmode))
3339 break;
3340 case 'i':
3341 if (CONSTANT_P (operand)
3342 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3343 win = 1;
3344 break;
3345
3346 case 'n':
3347 if (CONST_INT_P (operand)
3348 || (GET_CODE (operand) == CONST_DOUBLE
3349 && GET_MODE (operand) == VOIDmode))
3350 win = 1;
3351 break;
3352
3353 case 'I':
3354 case 'J':
3355 case 'K':
3356 case 'L':
3357 case 'M':
3358 case 'N':
3359 case 'O':
3360 case 'P':
3361 if (CONST_INT_P (operand)
3362 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3363 win = 1;
3364 break;
3365
3366 case 'X':
3367 force_reload = 0;
3368 win = 1;
3369 break;
3370
3371 case 'g':
3372 if (! force_reload
3373 /* A PLUS is never a valid operand, but reload can make
3374 it from a register when eliminating registers. */
3375 && GET_CODE (operand) != PLUS
3376 /* A SCRATCH is not a valid operand. */
3377 && GET_CODE (operand) != SCRATCH
3378 && (! CONSTANT_P (operand)
3379 || ! flag_pic
3380 || LEGITIMATE_PIC_OPERAND_P (operand))
3381 && (GENERAL_REGS == ALL_REGS
3382 || !REG_P (operand)
3383 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3384 && reg_renumber[REGNO (operand)] < 0)))
3385 win = 1;
3386 /* Drop through into 'r' case. */
3387
3388 case 'r':
3389 this_alternative[i]
3390 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3391 goto reg;
3392
3393 default:
3394 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3395 {
3396 #ifdef EXTRA_CONSTRAINT_STR
3397 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3398 {
3399 if (force_reload)
3400 break;
3401 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3402 win = 1;
3403 /* If the address was already reloaded,
3404 we win as well. */
3405 else if (MEM_P (operand)
3406 && address_reloaded[i] == 1)
3407 win = 1;
3408 /* Likewise if the address will be reloaded because
3409 reg_equiv_address is nonzero. For reg_equiv_mem
3410 we have to check. */
3411 else if (REG_P (operand)
3412 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3413 && reg_renumber[REGNO (operand)] < 0
3414 && ((reg_equiv_mem[REGNO (operand)] != 0
3415 && EXTRA_CONSTRAINT_STR (reg_equiv_mem[REGNO (operand)], c, p))
3416 || (reg_equiv_address[REGNO (operand)] != 0)))
3417 win = 1;
3418
3419 /* If we didn't already win, we can reload
3420 constants via force_const_mem, and other
3421 MEMs by reloading the address like for 'o'. */
3422 if (CONST_POOL_OK_P (operand)
3423 || MEM_P (operand))
3424 badop = 0;
3425 constmemok = 1;
3426 offmemok = 1;
3427 break;
3428 }
3429 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3430 {
3431 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3432 win = 1;
3433
3434 /* If we didn't already win, we can reload
3435 the address into a base register. */
3436 this_alternative[i] = base_reg_class (VOIDmode,
3437 ADDRESS,
3438 SCRATCH);
3439 badop = 0;
3440 break;
3441 }
3442
3443 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3444 win = 1;
3445 #endif
3446 break;
3447 }
3448
3449 this_alternative[i]
3450 = (reg_class_subunion
3451 [this_alternative[i]]
3452 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3453 reg:
3454 if (GET_MODE (operand) == BLKmode)
3455 break;
3456 winreg = 1;
3457 if (REG_P (operand)
3458 && reg_fits_class_p (operand, this_alternative[i],
3459 offset, GET_MODE (recog_data.operand[i])))
3460 win = 1;
3461 break;
3462 }
3463 while ((p += len), c);
3464
3465 constraints[i] = p;
3466
3467 /* If this operand could be handled with a reg,
3468 and some reg is allowed, then this operand can be handled. */
3469 if (winreg && this_alternative[i] != NO_REGS)
3470 badop = 0;
3471
3472 /* Record which operands fit this alternative. */
3473 this_alternative_earlyclobber[i] = earlyclobber;
3474 if (win && ! force_reload)
3475 this_alternative_win[i] = 1;
3476 else if (did_match && ! force_reload)
3477 this_alternative_match_win[i] = 1;
3478 else
3479 {
3480 int const_to_mem = 0;
3481
3482 this_alternative_offmemok[i] = offmemok;
3483 losers++;
3484 if (badop)
3485 bad = 1;
3486 /* Alternative loses if it has no regs for a reg operand. */
3487 if (REG_P (operand)
3488 && this_alternative[i] == NO_REGS
3489 && this_alternative_matches[i] < 0)
3490 bad = 1;
3491
3492 /* If this is a constant that is reloaded into the desired
3493 class by copying it to memory first, count that as another
3494 reload. This is consistent with other code and is
3495 required to avoid choosing another alternative when
3496 the constant is moved into memory by this function on
3497 an early reload pass. Note that the test here is
3498 precisely the same as in the code below that calls
3499 force_const_mem. */
3500 if (CONST_POOL_OK_P (operand)
3501 && ((PREFERRED_RELOAD_CLASS (operand, this_alternative[i])
3502 == NO_REGS)
3503 || no_input_reloads)
3504 && operand_mode[i] != VOIDmode)
3505 {
3506 const_to_mem = 1;
3507 if (this_alternative[i] != NO_REGS)
3508 losers++;
3509 }
3510
3511 /* Alternative loses if it requires a type of reload not
3512 permitted for this insn. We can always reload SCRATCH
3513 and objects with a REG_UNUSED note. */
3514 if (GET_CODE (operand) != SCRATCH
3515 && modified[i] != RELOAD_READ && no_output_reloads
3516 && ! find_reg_note (insn, REG_UNUSED, operand))
3517 bad = 1;
3518 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3519 && ! const_to_mem)
3520 bad = 1;
3521
3522 /* If we can't reload this value at all, reject this
3523 alternative. Note that we could also lose due to
3524 LIMIT_RELOAD_CLASS, but we don't check that
3525 here. */
3526
3527 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3528 {
3529 if (PREFERRED_RELOAD_CLASS (operand, this_alternative[i])
3530 == NO_REGS)
3531 reject = 600;
3532
3533 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
3534 if (operand_type[i] == RELOAD_FOR_OUTPUT
3535 && (PREFERRED_OUTPUT_RELOAD_CLASS (operand,
3536 this_alternative[i])
3537 == NO_REGS))
3538 reject = 600;
3539 #endif
3540 }
3541
3542 /* We prefer to reload pseudos over reloading other things,
3543 since such reloads may be able to be eliminated later.
3544 If we are reloading a SCRATCH, we won't be generating any
3545 insns, just using a register, so it is also preferred.
3546 So bump REJECT in other cases. Don't do this in the
3547 case where we are forcing a constant into memory and
3548 it will then win since we don't want to have a different
3549 alternative match then. */
3550 if (! (REG_P (operand)
3551 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3552 && GET_CODE (operand) != SCRATCH
3553 && ! (const_to_mem && constmemok))
3554 reject += 2;
3555
3556 /* Input reloads can be inherited more often than output
3557 reloads can be removed, so penalize output reloads. */
3558 if (operand_type[i] != RELOAD_FOR_INPUT
3559 && GET_CODE (operand) != SCRATCH)
3560 reject++;
3561 }
3562
3563 /* If this operand is a pseudo register that didn't get a hard
3564 reg and this alternative accepts some register, see if the
3565 class that we want is a subset of the preferred class for this
3566 register. If not, but it intersects that class, use the
3567 preferred class instead. If it does not intersect the preferred
3568 class, show that usage of this alternative should be discouraged;
3569 it will be discouraged more still if the register is `preferred
3570 or nothing'. We do this because it increases the chance of
3571 reusing our spill register in a later insn and avoiding a pair
3572 of memory stores and loads.
3573
3574 Don't bother with this if this alternative will accept this
3575 operand.
3576
3577 Don't do this for a multiword operand, since it is only a
3578 small win and has the risk of requiring more spill registers,
3579 which could cause a large loss.
3580
3581 Don't do this if the preferred class has only one register
3582 because we might otherwise exhaust the class. */
3583
3584 if (! win && ! did_match
3585 && this_alternative[i] != NO_REGS
3586 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3587 && reg_class_size [(int) preferred_class[i]] > 0
3588 && ! SMALL_REGISTER_CLASS_P (preferred_class[i]))
3589 {
3590 if (! reg_class_subset_p (this_alternative[i],
3591 preferred_class[i]))
3592 {
3593 /* Since we don't have a way of forming the intersection,
3594 we just do something special if the preferred class
3595 is a subset of the class we have; that's the most
3596 common case anyway. */
3597 if (reg_class_subset_p (preferred_class[i],
3598 this_alternative[i]))
3599 this_alternative[i] = preferred_class[i];
3600 else
3601 reject += (2 + 2 * pref_or_nothing[i]);
3602 }
3603 }
3604 }
3605
3606 /* Now see if any output operands that are marked "earlyclobber"
3607 in this alternative conflict with any input operands
3608 or any memory addresses. */
3609
3610 for (i = 0; i < noperands; i++)
3611 if (this_alternative_earlyclobber[i]
3612 && (this_alternative_win[i] || this_alternative_match_win[i]))
3613 {
3614 struct decomposition early_data;
3615
3616 early_data = decompose (recog_data.operand[i]);
3617
3618 gcc_assert (modified[i] != RELOAD_READ);
3619
3620 if (this_alternative[i] == NO_REGS)
3621 {
3622 this_alternative_earlyclobber[i] = 0;
3623 gcc_assert (this_insn_is_asm);
3624 error_for_asm (this_insn,
3625 "%<&%> constraint used with no register class");
3626 }
3627
3628 for (j = 0; j < noperands; j++)
3629 /* Is this an input operand or a memory ref? */
3630 if ((MEM_P (recog_data.operand[j])
3631 || modified[j] != RELOAD_WRITE)
3632 && j != i
3633 /* Ignore things like match_operator operands. */
3634 && *recog_data.constraints[j] != 0
3635 /* Don't count an input operand that is constrained to match
3636 the early clobber operand. */
3637 && ! (this_alternative_matches[j] == i
3638 && rtx_equal_p (recog_data.operand[i],
3639 recog_data.operand[j]))
3640 /* Is it altered by storing the earlyclobber operand? */
3641 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3642 early_data))
3643 {
3644 /* If the output is in a non-empty few-regs class,
3645 it's costly to reload it, so reload the input instead. */
3646 if (SMALL_REGISTER_CLASS_P (this_alternative[i])
3647 && (REG_P (recog_data.operand[j])
3648 || GET_CODE (recog_data.operand[j]) == SUBREG))
3649 {
3650 losers++;
3651 this_alternative_win[j] = 0;
3652 this_alternative_match_win[j] = 0;
3653 }
3654 else
3655 break;
3656 }
3657 /* If an earlyclobber operand conflicts with something,
3658 it must be reloaded, so request this and count the cost. */
3659 if (j != noperands)
3660 {
3661 losers++;
3662 this_alternative_win[i] = 0;
3663 this_alternative_match_win[j] = 0;
3664 for (j = 0; j < noperands; j++)
3665 if (this_alternative_matches[j] == i
3666 && this_alternative_match_win[j])
3667 {
3668 this_alternative_win[j] = 0;
3669 this_alternative_match_win[j] = 0;
3670 losers++;
3671 }
3672 }
3673 }
3674
3675 /* If one alternative accepts all the operands, no reload required,
3676 choose that alternative; don't consider the remaining ones. */
3677 if (losers == 0)
3678 {
3679 /* Unswap these so that they are never swapped at `finish'. */
3680 if (commutative >= 0)
3681 {
3682 recog_data.operand[commutative] = substed_operand[commutative];
3683 recog_data.operand[commutative + 1]
3684 = substed_operand[commutative + 1];
3685 }
3686 for (i = 0; i < noperands; i++)
3687 {
3688 goal_alternative_win[i] = this_alternative_win[i];
3689 goal_alternative_match_win[i] = this_alternative_match_win[i];
3690 goal_alternative[i] = this_alternative[i];
3691 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3692 goal_alternative_matches[i] = this_alternative_matches[i];
3693 goal_alternative_earlyclobber[i]
3694 = this_alternative_earlyclobber[i];
3695 }
3696 goal_alternative_number = this_alternative_number;
3697 goal_alternative_swapped = swapped;
3698 goal_earlyclobber = this_earlyclobber;
3699 goto finish;
3700 }
3701
3702 /* REJECT, set by the ! and ? constraint characters and when a register
3703 would be reloaded into a non-preferred class, discourages the use of
3704 this alternative for a reload goal. REJECT is incremented by six
3705 for each ? and two for each non-preferred class. */
3706 losers = losers * 6 + reject;
3707
3708 /* If this alternative can be made to work by reloading,
3709 and it needs less reloading than the others checked so far,
3710 record it as the chosen goal for reloading. */
3711 if (! bad)
3712 {
3713 bool change_p = false;
3714 int small_class_operands_num = 0;
3715
3716 if (best >= losers)
3717 {
3718 for (i = 0; i < noperands; i++)
3719 small_class_operands_num
3720 += SMALL_REGISTER_CLASS_P (this_alternative[i]) ? 1 : 0;
3721 if (best > losers
3722 || (best == losers
3723 /* If the cost of the reloads is the same,
3724 prefer alternative which requires minimal
3725 number of small register classes for the
3726 operands. This improves chances of reloads
3727 for insn requiring small register
3728 classes. */
3729 && (small_class_operands_num
3730 < best_small_class_operands_num)))
3731 change_p = true;
3732 }
3733 if (change_p)
3734 {
3735 for (i = 0; i < noperands; i++)
3736 {
3737 goal_alternative[i] = this_alternative[i];
3738 goal_alternative_win[i] = this_alternative_win[i];
3739 goal_alternative_match_win[i]
3740 = this_alternative_match_win[i];
3741 goal_alternative_offmemok[i]
3742 = this_alternative_offmemok[i];
3743 goal_alternative_matches[i] = this_alternative_matches[i];
3744 goal_alternative_earlyclobber[i]
3745 = this_alternative_earlyclobber[i];
3746 }
3747 goal_alternative_swapped = swapped;
3748 best = losers;
3749 best_small_class_operands_num = small_class_operands_num;
3750 goal_alternative_number = this_alternative_number;
3751 goal_earlyclobber = this_earlyclobber;
3752 }
3753 }
3754 }
3755
3756 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3757 then we need to try each alternative twice,
3758 the second time matching those two operands
3759 as if we had exchanged them.
3760 To do this, really exchange them in operands.
3761
3762 If we have just tried the alternatives the second time,
3763 return operands to normal and drop through. */
3764
3765 if (commutative >= 0)
3766 {
3767 swapped = !swapped;
3768 if (swapped)
3769 {
3770 enum reg_class tclass;
3771 int t;
3772
3773 recog_data.operand[commutative] = substed_operand[commutative + 1];
3774 recog_data.operand[commutative + 1] = substed_operand[commutative];
3775 /* Swap the duplicates too. */
3776 for (i = 0; i < recog_data.n_dups; i++)
3777 if (recog_data.dup_num[i] == commutative
3778 || recog_data.dup_num[i] == commutative + 1)
3779 *recog_data.dup_loc[i]
3780 = recog_data.operand[(int) recog_data.dup_num[i]];
3781
3782 tclass = preferred_class[commutative];
3783 preferred_class[commutative] = preferred_class[commutative + 1];
3784 preferred_class[commutative + 1] = tclass;
3785
3786 t = pref_or_nothing[commutative];
3787 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3788 pref_or_nothing[commutative + 1] = t;
3789
3790 t = address_reloaded[commutative];
3791 address_reloaded[commutative] = address_reloaded[commutative + 1];
3792 address_reloaded[commutative + 1] = t;
3793
3794 memcpy (constraints, recog_data.constraints,
3795 noperands * sizeof (const char *));
3796 goto try_swapped;
3797 }
3798 else
3799 {
3800 recog_data.operand[commutative] = substed_operand[commutative];
3801 recog_data.operand[commutative + 1]
3802 = substed_operand[commutative + 1];
3803 /* Unswap the duplicates too. */
3804 for (i = 0; i < recog_data.n_dups; i++)
3805 if (recog_data.dup_num[i] == commutative
3806 || recog_data.dup_num[i] == commutative + 1)
3807 *recog_data.dup_loc[i]
3808 = recog_data.operand[(int) recog_data.dup_num[i]];
3809 }
3810 }
3811
3812 /* The operands don't meet the constraints.
3813 goal_alternative describes the alternative
3814 that we could reach by reloading the fewest operands.
3815 Reload so as to fit it. */
3816
3817 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3818 {
3819 /* No alternative works with reloads?? */
3820 if (insn_code_number >= 0)
3821 fatal_insn ("unable to generate reloads for:", insn);
3822 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3823 /* Avoid further trouble with this insn. */
3824 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3825 n_reloads = 0;
3826 return 0;
3827 }
3828
3829 /* Jump to `finish' from above if all operands are valid already.
3830 In that case, goal_alternative_win is all 1. */
3831 finish:
3832
3833 /* Right now, for any pair of operands I and J that are required to match,
3834 with I < J,
3835 goal_alternative_matches[J] is I.
3836 Set up goal_alternative_matched as the inverse function:
3837 goal_alternative_matched[I] = J. */
3838
3839 for (i = 0; i < noperands; i++)
3840 goal_alternative_matched[i] = -1;
3841
3842 for (i = 0; i < noperands; i++)
3843 if (! goal_alternative_win[i]
3844 && goal_alternative_matches[i] >= 0)
3845 goal_alternative_matched[goal_alternative_matches[i]] = i;
3846
3847 for (i = 0; i < noperands; i++)
3848 goal_alternative_win[i] |= goal_alternative_match_win[i];
3849
3850 /* If the best alternative is with operands 1 and 2 swapped,
3851 consider them swapped before reporting the reloads. Update the
3852 operand numbers of any reloads already pushed. */
3853
3854 if (goal_alternative_swapped)
3855 {
3856 rtx tem;
3857
3858 tem = substed_operand[commutative];
3859 substed_operand[commutative] = substed_operand[commutative + 1];
3860 substed_operand[commutative + 1] = tem;
3861 tem = recog_data.operand[commutative];
3862 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3863 recog_data.operand[commutative + 1] = tem;
3864 tem = *recog_data.operand_loc[commutative];
3865 *recog_data.operand_loc[commutative]
3866 = *recog_data.operand_loc[commutative + 1];
3867 *recog_data.operand_loc[commutative + 1] = tem;
3868
3869 for (i = 0; i < n_reloads; i++)
3870 {
3871 if (rld[i].opnum == commutative)
3872 rld[i].opnum = commutative + 1;
3873 else if (rld[i].opnum == commutative + 1)
3874 rld[i].opnum = commutative;
3875 }
3876 }
3877
3878 for (i = 0; i < noperands; i++)
3879 {
3880 operand_reloadnum[i] = -1;
3881
3882 /* If this is an earlyclobber operand, we need to widen the scope.
3883 The reload must remain valid from the start of the insn being
3884 reloaded until after the operand is stored into its destination.
3885 We approximate this with RELOAD_OTHER even though we know that we
3886 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3887
3888 One special case that is worth checking is when we have an
3889 output that is earlyclobber but isn't used past the insn (typically
3890 a SCRATCH). In this case, we only need have the reload live
3891 through the insn itself, but not for any of our input or output
3892 reloads.
3893 But we must not accidentally narrow the scope of an existing
3894 RELOAD_OTHER reload - leave these alone.
3895
3896 In any case, anything needed to address this operand can remain
3897 however they were previously categorized. */
3898
3899 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3900 operand_type[i]
3901 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3902 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3903 }
3904
3905 /* Any constants that aren't allowed and can't be reloaded
3906 into registers are here changed into memory references. */
3907 for (i = 0; i < noperands; i++)
3908 if (! goal_alternative_win[i])
3909 {
3910 rtx op = recog_data.operand[i];
3911 rtx subreg = NULL_RTX;
3912 rtx plus = NULL_RTX;
3913 enum machine_mode mode = operand_mode[i];
3914
3915 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3916 push_reload so we have to let them pass here. */
3917 if (GET_CODE (op) == SUBREG)
3918 {
3919 subreg = op;
3920 op = SUBREG_REG (op);
3921 mode = GET_MODE (op);
3922 }
3923
3924 if (GET_CODE (op) == PLUS)
3925 {
3926 plus = op;
3927 op = XEXP (op, 1);
3928 }
3929
3930 if (CONST_POOL_OK_P (op)
3931 && ((PREFERRED_RELOAD_CLASS (op,
3932 (enum reg_class) goal_alternative[i])
3933 == NO_REGS)
3934 || no_input_reloads)
3935 && mode != VOIDmode)
3936 {
3937 int this_address_reloaded;
3938 rtx tem = force_const_mem (mode, op);
3939
3940 /* If we stripped a SUBREG or a PLUS above add it back. */
3941 if (plus != NULL_RTX)
3942 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3943
3944 if (subreg != NULL_RTX)
3945 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3946
3947 this_address_reloaded = 0;
3948 substed_operand[i] = recog_data.operand[i]
3949 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3950 0, insn, &this_address_reloaded);
3951
3952 /* If the alternative accepts constant pool refs directly
3953 there will be no reload needed at all. */
3954 if (plus == NULL_RTX
3955 && subreg == NULL_RTX
3956 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3957 ? substed_operand[i]
3958 : NULL,
3959 recog_data.constraints[i],
3960 goal_alternative_number))
3961 goal_alternative_win[i] = 1;
3962 }
3963 }
3964
3965 /* Record the values of the earlyclobber operands for the caller. */
3966 if (goal_earlyclobber)
3967 for (i = 0; i < noperands; i++)
3968 if (goal_alternative_earlyclobber[i])
3969 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3970
3971 /* Now record reloads for all the operands that need them. */
3972 for (i = 0; i < noperands; i++)
3973 if (! goal_alternative_win[i])
3974 {
3975 /* Operands that match previous ones have already been handled. */
3976 if (goal_alternative_matches[i] >= 0)
3977 ;
3978 /* Handle an operand with a nonoffsettable address
3979 appearing where an offsettable address will do
3980 by reloading the address into a base register.
3981
3982 ??? We can also do this when the operand is a register and
3983 reg_equiv_mem is not offsettable, but this is a bit tricky,
3984 so we don't bother with it. It may not be worth doing. */
3985 else if (goal_alternative_matched[i] == -1
3986 && goal_alternative_offmemok[i]
3987 && MEM_P (recog_data.operand[i]))
3988 {
3989 /* If the address to be reloaded is a VOIDmode constant,
3990 use Pmode as mode of the reload register, as would have
3991 been done by find_reloads_address. */
3992 enum machine_mode address_mode;
3993 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3994 if (address_mode == VOIDmode)
3995 address_mode = Pmode;
3996
3997 operand_reloadnum[i]
3998 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3999 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4000 base_reg_class (VOIDmode, MEM, SCRATCH),
4001 address_mode,
4002 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4003 rld[operand_reloadnum[i]].inc
4004 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4005
4006 /* If this operand is an output, we will have made any
4007 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4008 now we are treating part of the operand as an input, so
4009 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4010
4011 if (modified[i] == RELOAD_WRITE)
4012 {
4013 for (j = 0; j < n_reloads; j++)
4014 {
4015 if (rld[j].opnum == i)
4016 {
4017 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4018 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4019 else if (rld[j].when_needed
4020 == RELOAD_FOR_OUTADDR_ADDRESS)
4021 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4022 }
4023 }
4024 }
4025 }
4026 else if (goal_alternative_matched[i] == -1)
4027 {
4028 operand_reloadnum[i]
4029 = push_reload ((modified[i] != RELOAD_WRITE
4030 ? recog_data.operand[i] : 0),
4031 (modified[i] != RELOAD_READ
4032 ? recog_data.operand[i] : 0),
4033 (modified[i] != RELOAD_WRITE
4034 ? recog_data.operand_loc[i] : 0),
4035 (modified[i] != RELOAD_READ
4036 ? recog_data.operand_loc[i] : 0),
4037 (enum reg_class) goal_alternative[i],
4038 (modified[i] == RELOAD_WRITE
4039 ? VOIDmode : operand_mode[i]),
4040 (modified[i] == RELOAD_READ
4041 ? VOIDmode : operand_mode[i]),
4042 (insn_code_number < 0 ? 0
4043 : insn_data[insn_code_number].operand[i].strict_low),
4044 0, i, operand_type[i]);
4045 }
4046 /* In a matching pair of operands, one must be input only
4047 and the other must be output only.
4048 Pass the input operand as IN and the other as OUT. */
4049 else if (modified[i] == RELOAD_READ
4050 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4051 {
4052 operand_reloadnum[i]
4053 = push_reload (recog_data.operand[i],
4054 recog_data.operand[goal_alternative_matched[i]],
4055 recog_data.operand_loc[i],
4056 recog_data.operand_loc[goal_alternative_matched[i]],
4057 (enum reg_class) goal_alternative[i],
4058 operand_mode[i],
4059 operand_mode[goal_alternative_matched[i]],
4060 0, 0, i, RELOAD_OTHER);
4061 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4062 }
4063 else if (modified[i] == RELOAD_WRITE
4064 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4065 {
4066 operand_reloadnum[goal_alternative_matched[i]]
4067 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4068 recog_data.operand[i],
4069 recog_data.operand_loc[goal_alternative_matched[i]],
4070 recog_data.operand_loc[i],
4071 (enum reg_class) goal_alternative[i],
4072 operand_mode[goal_alternative_matched[i]],
4073 operand_mode[i],
4074 0, 0, i, RELOAD_OTHER);
4075 operand_reloadnum[i] = output_reloadnum;
4076 }
4077 else
4078 {
4079 gcc_assert (insn_code_number < 0);
4080 error_for_asm (insn, "inconsistent operand constraints "
4081 "in an %<asm%>");
4082 /* Avoid further trouble with this insn. */
4083 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4084 n_reloads = 0;
4085 return 0;
4086 }
4087 }
4088 else if (goal_alternative_matched[i] < 0
4089 && goal_alternative_matches[i] < 0
4090 && address_operand_reloaded[i] != 1
4091 && optimize)
4092 {
4093 /* For each non-matching operand that's a MEM or a pseudo-register
4094 that didn't get a hard register, make an optional reload.
4095 This may get done even if the insn needs no reloads otherwise. */
4096
4097 rtx operand = recog_data.operand[i];
4098
4099 while (GET_CODE (operand) == SUBREG)
4100 operand = SUBREG_REG (operand);
4101 if ((MEM_P (operand)
4102 || (REG_P (operand)
4103 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4104 /* If this is only for an output, the optional reload would not
4105 actually cause us to use a register now, just note that
4106 something is stored here. */
4107 && ((enum reg_class) goal_alternative[i] != NO_REGS
4108 || modified[i] == RELOAD_WRITE)
4109 && ! no_input_reloads
4110 /* An optional output reload might allow to delete INSN later.
4111 We mustn't make in-out reloads on insns that are not permitted
4112 output reloads.
4113 If this is an asm, we can't delete it; we must not even call
4114 push_reload for an optional output reload in this case,
4115 because we can't be sure that the constraint allows a register,
4116 and push_reload verifies the constraints for asms. */
4117 && (modified[i] == RELOAD_READ
4118 || (! no_output_reloads && ! this_insn_is_asm)))
4119 operand_reloadnum[i]
4120 = push_reload ((modified[i] != RELOAD_WRITE
4121 ? recog_data.operand[i] : 0),
4122 (modified[i] != RELOAD_READ
4123 ? recog_data.operand[i] : 0),
4124 (modified[i] != RELOAD_WRITE
4125 ? recog_data.operand_loc[i] : 0),
4126 (modified[i] != RELOAD_READ
4127 ? recog_data.operand_loc[i] : 0),
4128 (enum reg_class) goal_alternative[i],
4129 (modified[i] == RELOAD_WRITE
4130 ? VOIDmode : operand_mode[i]),
4131 (modified[i] == RELOAD_READ
4132 ? VOIDmode : operand_mode[i]),
4133 (insn_code_number < 0 ? 0
4134 : insn_data[insn_code_number].operand[i].strict_low),
4135 1, i, operand_type[i]);
4136 /* If a memory reference remains (either as a MEM or a pseudo that
4137 did not get a hard register), yet we can't make an optional
4138 reload, check if this is actually a pseudo register reference;
4139 we then need to emit a USE and/or a CLOBBER so that reload
4140 inheritance will do the right thing. */
4141 else if (replace
4142 && (MEM_P (operand)
4143 || (REG_P (operand)
4144 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4145 && reg_renumber [REGNO (operand)] < 0)))
4146 {
4147 operand = *recog_data.operand_loc[i];
4148
4149 while (GET_CODE (operand) == SUBREG)
4150 operand = SUBREG_REG (operand);
4151 if (REG_P (operand))
4152 {
4153 if (modified[i] != RELOAD_WRITE)
4154 /* We mark the USE with QImode so that we recognize
4155 it as one that can be safely deleted at the end
4156 of reload. */
4157 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4158 insn), QImode);
4159 if (modified[i] != RELOAD_READ)
4160 emit_insn_after (gen_clobber (operand), insn);
4161 }
4162 }
4163 }
4164 else if (goal_alternative_matches[i] >= 0
4165 && goal_alternative_win[goal_alternative_matches[i]]
4166 && modified[i] == RELOAD_READ
4167 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4168 && ! no_input_reloads && ! no_output_reloads
4169 && optimize)
4170 {
4171 /* Similarly, make an optional reload for a pair of matching
4172 objects that are in MEM or a pseudo that didn't get a hard reg. */
4173
4174 rtx operand = recog_data.operand[i];
4175
4176 while (GET_CODE (operand) == SUBREG)
4177 operand = SUBREG_REG (operand);
4178 if ((MEM_P (operand)
4179 || (REG_P (operand)
4180 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4181 && ((enum reg_class) goal_alternative[goal_alternative_matches[i]]
4182 != NO_REGS))
4183 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4184 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4185 recog_data.operand[i],
4186 recog_data.operand_loc[goal_alternative_matches[i]],
4187 recog_data.operand_loc[i],
4188 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4189 operand_mode[goal_alternative_matches[i]],
4190 operand_mode[i],
4191 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4192 }
4193
4194 /* Perform whatever substitutions on the operands we are supposed
4195 to make due to commutativity or replacement of registers
4196 with equivalent constants or memory slots. */
4197
4198 for (i = 0; i < noperands; i++)
4199 {
4200 /* We only do this on the last pass through reload, because it is
4201 possible for some data (like reg_equiv_address) to be changed during
4202 later passes. Moreover, we lose the opportunity to get a useful
4203 reload_{in,out}_reg when we do these replacements. */
4204
4205 if (replace)
4206 {
4207 rtx substitution = substed_operand[i];
4208
4209 *recog_data.operand_loc[i] = substitution;
4210
4211 /* If we're replacing an operand with a LABEL_REF, we need to
4212 make sure that there's a REG_LABEL_OPERAND note attached to
4213 this instruction. */
4214 if (GET_CODE (substitution) == LABEL_REF
4215 && !find_reg_note (insn, REG_LABEL_OPERAND,
4216 XEXP (substitution, 0))
4217 /* For a JUMP_P, if it was a branch target it must have
4218 already been recorded as such. */
4219 && (!JUMP_P (insn)
4220 || !label_is_jump_target_p (XEXP (substitution, 0),
4221 insn)))
4222 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4223 }
4224 else
4225 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4226 }
4227
4228 /* If this insn pattern contains any MATCH_DUP's, make sure that
4229 they will be substituted if the operands they match are substituted.
4230 Also do now any substitutions we already did on the operands.
4231
4232 Don't do this if we aren't making replacements because we might be
4233 propagating things allocated by frame pointer elimination into places
4234 it doesn't expect. */
4235
4236 if (insn_code_number >= 0 && replace)
4237 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4238 {
4239 int opno = recog_data.dup_num[i];
4240 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4241 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4242 }
4243
4244 #if 0
4245 /* This loses because reloading of prior insns can invalidate the equivalence
4246 (or at least find_equiv_reg isn't smart enough to find it any more),
4247 causing this insn to need more reload regs than it needed before.
4248 It may be too late to make the reload regs available.
4249 Now this optimization is done safely in choose_reload_regs. */
4250
4251 /* For each reload of a reg into some other class of reg,
4252 search for an existing equivalent reg (same value now) in the right class.
4253 We can use it as long as we don't need to change its contents. */
4254 for (i = 0; i < n_reloads; i++)
4255 if (rld[i].reg_rtx == 0
4256 && rld[i].in != 0
4257 && REG_P (rld[i].in)
4258 && rld[i].out == 0)
4259 {
4260 rld[i].reg_rtx
4261 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4262 static_reload_reg_p, 0, rld[i].inmode);
4263 /* Prevent generation of insn to load the value
4264 because the one we found already has the value. */
4265 if (rld[i].reg_rtx)
4266 rld[i].in = rld[i].reg_rtx;
4267 }
4268 #endif
4269
4270 /* If we detected error and replaced asm instruction by USE, forget about the
4271 reloads. */
4272 if (GET_CODE (PATTERN (insn)) == USE
4273 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4274 n_reloads = 0;
4275
4276 /* Perhaps an output reload can be combined with another
4277 to reduce needs by one. */
4278 if (!goal_earlyclobber)
4279 combine_reloads ();
4280
4281 /* If we have a pair of reloads for parts of an address, they are reloading
4282 the same object, the operands themselves were not reloaded, and they
4283 are for two operands that are supposed to match, merge the reloads and
4284 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4285
4286 for (i = 0; i < n_reloads; i++)
4287 {
4288 int k;
4289
4290 for (j = i + 1; j < n_reloads; j++)
4291 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4292 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4293 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4294 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4295 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4296 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4297 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4298 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4299 && rtx_equal_p (rld[i].in, rld[j].in)
4300 && (operand_reloadnum[rld[i].opnum] < 0
4301 || rld[operand_reloadnum[rld[i].opnum]].optional)
4302 && (operand_reloadnum[rld[j].opnum] < 0
4303 || rld[operand_reloadnum[rld[j].opnum]].optional)
4304 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4305 || (goal_alternative_matches[rld[j].opnum]
4306 == rld[i].opnum)))
4307 {
4308 for (k = 0; k < n_replacements; k++)
4309 if (replacements[k].what == j)
4310 replacements[k].what = i;
4311
4312 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4313 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4314 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4315 else
4316 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4317 rld[j].in = 0;
4318 }
4319 }
4320
4321 /* Scan all the reloads and update their type.
4322 If a reload is for the address of an operand and we didn't reload
4323 that operand, change the type. Similarly, change the operand number
4324 of a reload when two operands match. If a reload is optional, treat it
4325 as though the operand isn't reloaded.
4326
4327 ??? This latter case is somewhat odd because if we do the optional
4328 reload, it means the object is hanging around. Thus we need only
4329 do the address reload if the optional reload was NOT done.
4330
4331 Change secondary reloads to be the address type of their operand, not
4332 the normal type.
4333
4334 If an operand's reload is now RELOAD_OTHER, change any
4335 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4336 RELOAD_FOR_OTHER_ADDRESS. */
4337
4338 for (i = 0; i < n_reloads; i++)
4339 {
4340 if (rld[i].secondary_p
4341 && rld[i].when_needed == operand_type[rld[i].opnum])
4342 rld[i].when_needed = address_type[rld[i].opnum];
4343
4344 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4345 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4346 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4347 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4348 && (operand_reloadnum[rld[i].opnum] < 0
4349 || rld[operand_reloadnum[rld[i].opnum]].optional))
4350 {
4351 /* If we have a secondary reload to go along with this reload,
4352 change its type to RELOAD_FOR_OPADDR_ADDR. */
4353
4354 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4355 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4356 && rld[i].secondary_in_reload != -1)
4357 {
4358 int secondary_in_reload = rld[i].secondary_in_reload;
4359
4360 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4361
4362 /* If there's a tertiary reload we have to change it also. */
4363 if (secondary_in_reload > 0
4364 && rld[secondary_in_reload].secondary_in_reload != -1)
4365 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4366 = RELOAD_FOR_OPADDR_ADDR;
4367 }
4368
4369 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4370 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4371 && rld[i].secondary_out_reload != -1)
4372 {
4373 int secondary_out_reload = rld[i].secondary_out_reload;
4374
4375 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4376
4377 /* If there's a tertiary reload we have to change it also. */
4378 if (secondary_out_reload
4379 && rld[secondary_out_reload].secondary_out_reload != -1)
4380 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4381 = RELOAD_FOR_OPADDR_ADDR;
4382 }
4383
4384 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4385 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4386 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4387 else
4388 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4389 }
4390
4391 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4392 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4393 && operand_reloadnum[rld[i].opnum] >= 0
4394 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4395 == RELOAD_OTHER))
4396 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4397
4398 if (goal_alternative_matches[rld[i].opnum] >= 0)
4399 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4400 }
4401
4402 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4403 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4404 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4405
4406 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4407 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4408 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4409 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4410 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4411 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4412 This is complicated by the fact that a single operand can have more
4413 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4414 choose_reload_regs without affecting code quality, and cases that
4415 actually fail are extremely rare, so it turns out to be better to fix
4416 the problem here by not generating cases that choose_reload_regs will
4417 fail for. */
4418 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4419 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4420 a single operand.
4421 We can reduce the register pressure by exploiting that a
4422 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4423 does not conflict with any of them, if it is only used for the first of
4424 the RELOAD_FOR_X_ADDRESS reloads. */
4425 {
4426 int first_op_addr_num = -2;
4427 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4428 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4429 int need_change = 0;
4430 /* We use last_op_addr_reload and the contents of the above arrays
4431 first as flags - -2 means no instance encountered, -1 means exactly
4432 one instance encountered.
4433 If more than one instance has been encountered, we store the reload
4434 number of the first reload of the kind in question; reload numbers
4435 are known to be non-negative. */
4436 for (i = 0; i < noperands; i++)
4437 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4438 for (i = n_reloads - 1; i >= 0; i--)
4439 {
4440 switch (rld[i].when_needed)
4441 {
4442 case RELOAD_FOR_OPERAND_ADDRESS:
4443 if (++first_op_addr_num >= 0)
4444 {
4445 first_op_addr_num = i;
4446 need_change = 1;
4447 }
4448 break;
4449 case RELOAD_FOR_INPUT_ADDRESS:
4450 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4451 {
4452 first_inpaddr_num[rld[i].opnum] = i;
4453 need_change = 1;
4454 }
4455 break;
4456 case RELOAD_FOR_OUTPUT_ADDRESS:
4457 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4458 {
4459 first_outpaddr_num[rld[i].opnum] = i;
4460 need_change = 1;
4461 }
4462 break;
4463 default:
4464 break;
4465 }
4466 }
4467
4468 if (need_change)
4469 {
4470 for (i = 0; i < n_reloads; i++)
4471 {
4472 int first_num;
4473 enum reload_type type;
4474
4475 switch (rld[i].when_needed)
4476 {
4477 case RELOAD_FOR_OPADDR_ADDR:
4478 first_num = first_op_addr_num;
4479 type = RELOAD_FOR_OPERAND_ADDRESS;
4480 break;
4481 case RELOAD_FOR_INPADDR_ADDRESS:
4482 first_num = first_inpaddr_num[rld[i].opnum];
4483 type = RELOAD_FOR_INPUT_ADDRESS;
4484 break;
4485 case RELOAD_FOR_OUTADDR_ADDRESS:
4486 first_num = first_outpaddr_num[rld[i].opnum];
4487 type = RELOAD_FOR_OUTPUT_ADDRESS;
4488 break;
4489 default:
4490 continue;
4491 }
4492 if (first_num < 0)
4493 continue;
4494 else if (i > first_num)
4495 rld[i].when_needed = type;
4496 else
4497 {
4498 /* Check if the only TYPE reload that uses reload I is
4499 reload FIRST_NUM. */
4500 for (j = n_reloads - 1; j > first_num; j--)
4501 {
4502 if (rld[j].when_needed == type
4503 && (rld[i].secondary_p
4504 ? rld[j].secondary_in_reload == i
4505 : reg_mentioned_p (rld[i].in, rld[j].in)))
4506 {
4507 rld[i].when_needed = type;
4508 break;
4509 }
4510 }
4511 }
4512 }
4513 }
4514 }
4515
4516 /* See if we have any reloads that are now allowed to be merged
4517 because we've changed when the reload is needed to
4518 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4519 check for the most common cases. */
4520
4521 for (i = 0; i < n_reloads; i++)
4522 if (rld[i].in != 0 && rld[i].out == 0
4523 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4524 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4525 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4526 for (j = 0; j < n_reloads; j++)
4527 if (i != j && rld[j].in != 0 && rld[j].out == 0
4528 && rld[j].when_needed == rld[i].when_needed
4529 && MATCHES (rld[i].in, rld[j].in)
4530 && rld[i].rclass == rld[j].rclass
4531 && !rld[i].nocombine && !rld[j].nocombine
4532 && rld[i].reg_rtx == rld[j].reg_rtx)
4533 {
4534 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4535 transfer_replacements (i, j);
4536 rld[j].in = 0;
4537 }
4538
4539 #ifdef HAVE_cc0
4540 /* If we made any reloads for addresses, see if they violate a
4541 "no input reloads" requirement for this insn. But loads that we
4542 do after the insn (such as for output addresses) are fine. */
4543 if (no_input_reloads)
4544 for (i = 0; i < n_reloads; i++)
4545 gcc_assert (rld[i].in == 0
4546 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4547 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4548 #endif
4549
4550 /* Compute reload_mode and reload_nregs. */
4551 for (i = 0; i < n_reloads; i++)
4552 {
4553 rld[i].mode
4554 = (rld[i].inmode == VOIDmode
4555 || (GET_MODE_SIZE (rld[i].outmode)
4556 > GET_MODE_SIZE (rld[i].inmode)))
4557 ? rld[i].outmode : rld[i].inmode;
4558
4559 rld[i].nregs = CLASS_MAX_NREGS (rld[i].rclass, rld[i].mode);
4560 }
4561
4562 /* Special case a simple move with an input reload and a
4563 destination of a hard reg, if the hard reg is ok, use it. */
4564 for (i = 0; i < n_reloads; i++)
4565 if (rld[i].when_needed == RELOAD_FOR_INPUT
4566 && GET_CODE (PATTERN (insn)) == SET
4567 && REG_P (SET_DEST (PATTERN (insn)))
4568 && (SET_SRC (PATTERN (insn)) == rld[i].in
4569 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4570 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4571 {
4572 rtx dest = SET_DEST (PATTERN (insn));
4573 unsigned int regno = REGNO (dest);
4574
4575 if (regno < FIRST_PSEUDO_REGISTER
4576 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4577 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4578 {
4579 int nr = hard_regno_nregs[regno][rld[i].mode];
4580 int ok = 1, nri;
4581
4582 for (nri = 1; nri < nr; nri ++)
4583 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4584 ok = 0;
4585
4586 if (ok)
4587 rld[i].reg_rtx = dest;
4588 }
4589 }
4590
4591 return retval;
4592 }
4593
4594 /* Return true if alternative number ALTNUM in constraint-string
4595 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4596 MEM gives the reference if it didn't need any reloads, otherwise it
4597 is null. */
4598
4599 static bool
4600 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4601 const char *constraint, int altnum)
4602 {
4603 int c;
4604
4605 /* Skip alternatives before the one requested. */
4606 while (altnum > 0)
4607 {
4608 while (*constraint++ != ',');
4609 altnum--;
4610 }
4611 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4612 If one of them is present, this alternative accepts the result of
4613 passing a constant-pool reference through find_reloads_toplev.
4614
4615 The same is true of extra memory constraints if the address
4616 was reloaded into a register. However, the target may elect
4617 to disallow the original constant address, forcing it to be
4618 reloaded into a register instead. */
4619 for (; (c = *constraint) && c != ',' && c != '#';
4620 constraint += CONSTRAINT_LEN (c, constraint))
4621 {
4622 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4623 return true;
4624 #ifdef EXTRA_CONSTRAINT_STR
4625 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4626 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4627 return true;
4628 #endif
4629 }
4630 return false;
4631 }
4632 \f
4633 /* Scan X for memory references and scan the addresses for reloading.
4634 Also checks for references to "constant" regs that we want to eliminate
4635 and replaces them with the values they stand for.
4636 We may alter X destructively if it contains a reference to such.
4637 If X is just a constant reg, we return the equivalent value
4638 instead of X.
4639
4640 IND_LEVELS says how many levels of indirect addressing this machine
4641 supports.
4642
4643 OPNUM and TYPE identify the purpose of the reload.
4644
4645 IS_SET_DEST is true if X is the destination of a SET, which is not
4646 appropriate to be replaced by a constant.
4647
4648 INSN, if nonzero, is the insn in which we do the reload. It is used
4649 to determine if we may generate output reloads, and where to put USEs
4650 for pseudos that we have to replace with stack slots.
4651
4652 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4653 result of find_reloads_address. */
4654
4655 static rtx
4656 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4657 int ind_levels, int is_set_dest, rtx insn,
4658 int *address_reloaded)
4659 {
4660 RTX_CODE code = GET_CODE (x);
4661
4662 const char *fmt = GET_RTX_FORMAT (code);
4663 int i;
4664 int copied;
4665
4666 if (code == REG)
4667 {
4668 /* This code is duplicated for speed in find_reloads. */
4669 int regno = REGNO (x);
4670 if (reg_equiv_constant[regno] != 0 && !is_set_dest)
4671 x = reg_equiv_constant[regno];
4672 #if 0
4673 /* This creates (subreg (mem...)) which would cause an unnecessary
4674 reload of the mem. */
4675 else if (reg_equiv_mem[regno] != 0)
4676 x = reg_equiv_mem[regno];
4677 #endif
4678 else if (reg_equiv_memory_loc[regno]
4679 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
4680 {
4681 rtx mem = make_memloc (x, regno);
4682 if (reg_equiv_address[regno]
4683 || ! rtx_equal_p (mem, reg_equiv_mem[regno]))
4684 {
4685 /* If this is not a toplevel operand, find_reloads doesn't see
4686 this substitution. We have to emit a USE of the pseudo so
4687 that delete_output_reload can see it. */
4688 if (replace_reloads && recog_data.operand[opnum] != x)
4689 /* We mark the USE with QImode so that we recognize it
4690 as one that can be safely deleted at the end of
4691 reload. */
4692 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4693 QImode);
4694 x = mem;
4695 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4696 opnum, type, ind_levels, insn);
4697 if (!rtx_equal_p (x, mem))
4698 push_reg_equiv_alt_mem (regno, x);
4699 if (address_reloaded)
4700 *address_reloaded = i;
4701 }
4702 }
4703 return x;
4704 }
4705 if (code == MEM)
4706 {
4707 rtx tem = x;
4708
4709 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4710 opnum, type, ind_levels, insn);
4711 if (address_reloaded)
4712 *address_reloaded = i;
4713
4714 return tem;
4715 }
4716
4717 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4718 {
4719 /* Check for SUBREG containing a REG that's equivalent to a
4720 constant. If the constant has a known value, truncate it
4721 right now. Similarly if we are extracting a single-word of a
4722 multi-word constant. If the constant is symbolic, allow it
4723 to be substituted normally. push_reload will strip the
4724 subreg later. The constant must not be VOIDmode, because we
4725 will lose the mode of the register (this should never happen
4726 because one of the cases above should handle it). */
4727
4728 int regno = REGNO (SUBREG_REG (x));
4729 rtx tem;
4730
4731 if (regno >= FIRST_PSEUDO_REGISTER
4732 && reg_renumber[regno] < 0
4733 && reg_equiv_constant[regno] != 0)
4734 {
4735 tem =
4736 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant[regno],
4737 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4738 gcc_assert (tem);
4739 if (CONSTANT_P (tem) && !LEGITIMATE_CONSTANT_P (tem))
4740 {
4741 tem = force_const_mem (GET_MODE (x), tem);
4742 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4743 &XEXP (tem, 0), opnum, type,
4744 ind_levels, insn);
4745 if (address_reloaded)
4746 *address_reloaded = i;
4747 }
4748 return tem;
4749 }
4750
4751 /* If the subreg contains a reg that will be converted to a mem,
4752 convert the subreg to a narrower memref now.
4753 Otherwise, we would get (subreg (mem ...) ...),
4754 which would force reload of the mem.
4755
4756 We also need to do this if there is an equivalent MEM that is
4757 not offsettable. In that case, alter_subreg would produce an
4758 invalid address on big-endian machines.
4759
4760 For machines that extend byte loads, we must not reload using
4761 a wider mode if we have a paradoxical SUBREG. find_reloads will
4762 force a reload in that case. So we should not do anything here. */
4763
4764 if (regno >= FIRST_PSEUDO_REGISTER
4765 #ifdef LOAD_EXTEND_OP
4766 && (GET_MODE_SIZE (GET_MODE (x))
4767 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4768 #endif
4769 && (reg_equiv_address[regno] != 0
4770 || (reg_equiv_mem[regno] != 0
4771 && (! strict_memory_address_addr_space_p
4772 (GET_MODE (x), XEXP (reg_equiv_mem[regno], 0),
4773 MEM_ADDR_SPACE (reg_equiv_mem[regno]))
4774 || ! offsettable_memref_p (reg_equiv_mem[regno])
4775 || num_not_at_initial_offset))))
4776 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4777 insn);
4778 }
4779
4780 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4781 {
4782 if (fmt[i] == 'e')
4783 {
4784 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4785 ind_levels, is_set_dest, insn,
4786 address_reloaded);
4787 /* If we have replaced a reg with it's equivalent memory loc -
4788 that can still be handled here e.g. if it's in a paradoxical
4789 subreg - we must make the change in a copy, rather than using
4790 a destructive change. This way, find_reloads can still elect
4791 not to do the change. */
4792 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4793 {
4794 x = shallow_copy_rtx (x);
4795 copied = 1;
4796 }
4797 XEXP (x, i) = new_part;
4798 }
4799 }
4800 return x;
4801 }
4802
4803 /* Return a mem ref for the memory equivalent of reg REGNO.
4804 This mem ref is not shared with anything. */
4805
4806 static rtx
4807 make_memloc (rtx ad, int regno)
4808 {
4809 /* We must rerun eliminate_regs, in case the elimination
4810 offsets have changed. */
4811 rtx tem
4812 = XEXP (eliminate_regs (reg_equiv_memory_loc[regno], VOIDmode, NULL_RTX),
4813 0);
4814
4815 /* If TEM might contain a pseudo, we must copy it to avoid
4816 modifying it when we do the substitution for the reload. */
4817 if (rtx_varies_p (tem, 0))
4818 tem = copy_rtx (tem);
4819
4820 tem = replace_equiv_address_nv (reg_equiv_memory_loc[regno], tem);
4821 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4822
4823 /* Copy the result if it's still the same as the equivalence, to avoid
4824 modifying it when we do the substitution for the reload. */
4825 if (tem == reg_equiv_memory_loc[regno])
4826 tem = copy_rtx (tem);
4827 return tem;
4828 }
4829
4830 /* Returns true if AD could be turned into a valid memory reference
4831 to mode MODE in address space AS by reloading the part pointed to
4832 by PART into a register. */
4833
4834 static int
4835 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4836 addr_space_t as, rtx *part)
4837 {
4838 int retv;
4839 rtx tem = *part;
4840 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4841
4842 *part = reg;
4843 retv = memory_address_addr_space_p (mode, ad, as);
4844 *part = tem;
4845
4846 return retv;
4847 }
4848
4849 /* Record all reloads needed for handling memory address AD
4850 which appears in *LOC in a memory reference to mode MODE
4851 which itself is found in location *MEMREFLOC.
4852 Note that we take shortcuts assuming that no multi-reg machine mode
4853 occurs as part of an address.
4854
4855 OPNUM and TYPE specify the purpose of this reload.
4856
4857 IND_LEVELS says how many levels of indirect addressing this machine
4858 supports.
4859
4860 INSN, if nonzero, is the insn in which we do the reload. It is used
4861 to determine if we may generate output reloads, and where to put USEs
4862 for pseudos that we have to replace with stack slots.
4863
4864 Value is one if this address is reloaded or replaced as a whole; it is
4865 zero if the top level of this address was not reloaded or replaced, and
4866 it is -1 if it may or may not have been reloaded or replaced.
4867
4868 Note that there is no verification that the address will be valid after
4869 this routine does its work. Instead, we rely on the fact that the address
4870 was valid when reload started. So we need only undo things that reload
4871 could have broken. These are wrong register types, pseudos not allocated
4872 to a hard register, and frame pointer elimination. */
4873
4874 static int
4875 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4876 rtx *loc, int opnum, enum reload_type type,
4877 int ind_levels, rtx insn)
4878 {
4879 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4880 : ADDR_SPACE_GENERIC;
4881 int regno;
4882 int removed_and = 0;
4883 int op_index;
4884 rtx tem;
4885
4886 /* If the address is a register, see if it is a legitimate address and
4887 reload if not. We first handle the cases where we need not reload
4888 or where we must reload in a non-standard way. */
4889
4890 if (REG_P (ad))
4891 {
4892 regno = REGNO (ad);
4893
4894 if (reg_equiv_constant[regno] != 0)
4895 {
4896 find_reloads_address_part (reg_equiv_constant[regno], loc,
4897 base_reg_class (mode, MEM, SCRATCH),
4898 GET_MODE (ad), opnum, type, ind_levels);
4899 return 1;
4900 }
4901
4902 tem = reg_equiv_memory_loc[regno];
4903 if (tem != 0)
4904 {
4905 if (reg_equiv_address[regno] != 0 || num_not_at_initial_offset)
4906 {
4907 tem = make_memloc (ad, regno);
4908 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4909 XEXP (tem, 0),
4910 MEM_ADDR_SPACE (tem)))
4911 {
4912 rtx orig = tem;
4913
4914 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4915 &XEXP (tem, 0), opnum,
4916 ADDR_TYPE (type), ind_levels, insn);
4917 if (!rtx_equal_p (tem, orig))
4918 push_reg_equiv_alt_mem (regno, tem);
4919 }
4920 /* We can avoid a reload if the register's equivalent memory
4921 expression is valid as an indirect memory address.
4922 But not all addresses are valid in a mem used as an indirect
4923 address: only reg or reg+constant. */
4924
4925 if (ind_levels > 0
4926 && strict_memory_address_addr_space_p (mode, tem, as)
4927 && (REG_P (XEXP (tem, 0))
4928 || (GET_CODE (XEXP (tem, 0)) == PLUS
4929 && REG_P (XEXP (XEXP (tem, 0), 0))
4930 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4931 {
4932 /* TEM is not the same as what we'll be replacing the
4933 pseudo with after reload, put a USE in front of INSN
4934 in the final reload pass. */
4935 if (replace_reloads
4936 && num_not_at_initial_offset
4937 && ! rtx_equal_p (tem, reg_equiv_mem[regno]))
4938 {
4939 *loc = tem;
4940 /* We mark the USE with QImode so that we
4941 recognize it as one that can be safely
4942 deleted at the end of reload. */
4943 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4944 insn), QImode);
4945
4946 /* This doesn't really count as replacing the address
4947 as a whole, since it is still a memory access. */
4948 }
4949 return 0;
4950 }
4951 ad = tem;
4952 }
4953 }
4954
4955 /* The only remaining case where we can avoid a reload is if this is a
4956 hard register that is valid as a base register and which is not the
4957 subject of a CLOBBER in this insn. */
4958
4959 else if (regno < FIRST_PSEUDO_REGISTER
4960 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4961 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4962 return 0;
4963
4964 /* If we do not have one of the cases above, we must do the reload. */
4965 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4966 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4967 return 1;
4968 }
4969
4970 if (strict_memory_address_addr_space_p (mode, ad, as))
4971 {
4972 /* The address appears valid, so reloads are not needed.
4973 But the address may contain an eliminable register.
4974 This can happen because a machine with indirect addressing
4975 may consider a pseudo register by itself a valid address even when
4976 it has failed to get a hard reg.
4977 So do a tree-walk to find and eliminate all such regs. */
4978
4979 /* But first quickly dispose of a common case. */
4980 if (GET_CODE (ad) == PLUS
4981 && CONST_INT_P (XEXP (ad, 1))
4982 && REG_P (XEXP (ad, 0))
4983 && reg_equiv_constant[REGNO (XEXP (ad, 0))] == 0)
4984 return 0;
4985
4986 subst_reg_equivs_changed = 0;
4987 *loc = subst_reg_equivs (ad, insn);
4988
4989 if (! subst_reg_equivs_changed)
4990 return 0;
4991
4992 /* Check result for validity after substitution. */
4993 if (strict_memory_address_addr_space_p (mode, ad, as))
4994 return 0;
4995 }
4996
4997 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4998 do
4999 {
5000 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5001 {
5002 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5003 ind_levels, win);
5004 }
5005 break;
5006 win:
5007 *memrefloc = copy_rtx (*memrefloc);
5008 XEXP (*memrefloc, 0) = ad;
5009 move_replacements (&ad, &XEXP (*memrefloc, 0));
5010 return -1;
5011 }
5012 while (0);
5013 #endif
5014
5015 /* The address is not valid. We have to figure out why. First see if
5016 we have an outer AND and remove it if so. Then analyze what's inside. */
5017
5018 if (GET_CODE (ad) == AND)
5019 {
5020 removed_and = 1;
5021 loc = &XEXP (ad, 0);
5022 ad = *loc;
5023 }
5024
5025 /* One possibility for why the address is invalid is that it is itself
5026 a MEM. This can happen when the frame pointer is being eliminated, a
5027 pseudo is not allocated to a hard register, and the offset between the
5028 frame and stack pointers is not its initial value. In that case the
5029 pseudo will have been replaced by a MEM referring to the
5030 stack pointer. */
5031 if (MEM_P (ad))
5032 {
5033 /* First ensure that the address in this MEM is valid. Then, unless
5034 indirect addresses are valid, reload the MEM into a register. */
5035 tem = ad;
5036 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5037 opnum, ADDR_TYPE (type),
5038 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5039
5040 /* If tem was changed, then we must create a new memory reference to
5041 hold it and store it back into memrefloc. */
5042 if (tem != ad && memrefloc)
5043 {
5044 *memrefloc = copy_rtx (*memrefloc);
5045 copy_replacements (tem, XEXP (*memrefloc, 0));
5046 loc = &XEXP (*memrefloc, 0);
5047 if (removed_and)
5048 loc = &XEXP (*loc, 0);
5049 }
5050
5051 /* Check similar cases as for indirect addresses as above except
5052 that we can allow pseudos and a MEM since they should have been
5053 taken care of above. */
5054
5055 if (ind_levels == 0
5056 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5057 || MEM_P (XEXP (tem, 0))
5058 || ! (REG_P (XEXP (tem, 0))
5059 || (GET_CODE (XEXP (tem, 0)) == PLUS
5060 && REG_P (XEXP (XEXP (tem, 0), 0))
5061 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5062 {
5063 /* Must use TEM here, not AD, since it is the one that will
5064 have any subexpressions reloaded, if needed. */
5065 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5066 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5067 VOIDmode, 0,
5068 0, opnum, type);
5069 return ! removed_and;
5070 }
5071 else
5072 return 0;
5073 }
5074
5075 /* If we have address of a stack slot but it's not valid because the
5076 displacement is too large, compute the sum in a register.
5077 Handle all base registers here, not just fp/ap/sp, because on some
5078 targets (namely SH) we can also get too large displacements from
5079 big-endian corrections. */
5080 else if (GET_CODE (ad) == PLUS
5081 && REG_P (XEXP (ad, 0))
5082 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5083 && CONST_INT_P (XEXP (ad, 1))
5084 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5085 CONST_INT))
5086
5087 {
5088 /* Unshare the MEM rtx so we can safely alter it. */
5089 if (memrefloc)
5090 {
5091 *memrefloc = copy_rtx (*memrefloc);
5092 loc = &XEXP (*memrefloc, 0);
5093 if (removed_and)
5094 loc = &XEXP (*loc, 0);
5095 }
5096
5097 if (double_reg_address_ok)
5098 {
5099 /* Unshare the sum as well. */
5100 *loc = ad = copy_rtx (ad);
5101
5102 /* Reload the displacement into an index reg.
5103 We assume the frame pointer or arg pointer is a base reg. */
5104 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5105 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5106 type, ind_levels);
5107 return 0;
5108 }
5109 else
5110 {
5111 /* If the sum of two regs is not necessarily valid,
5112 reload the sum into a base reg.
5113 That will at least work. */
5114 find_reloads_address_part (ad, loc,
5115 base_reg_class (mode, MEM, SCRATCH),
5116 Pmode, opnum, type, ind_levels);
5117 }
5118 return ! removed_and;
5119 }
5120
5121 /* If we have an indexed stack slot, there are three possible reasons why
5122 it might be invalid: The index might need to be reloaded, the address
5123 might have been made by frame pointer elimination and hence have a
5124 constant out of range, or both reasons might apply.
5125
5126 We can easily check for an index needing reload, but even if that is the
5127 case, we might also have an invalid constant. To avoid making the
5128 conservative assumption and requiring two reloads, we see if this address
5129 is valid when not interpreted strictly. If it is, the only problem is
5130 that the index needs a reload and find_reloads_address_1 will take care
5131 of it.
5132
5133 Handle all base registers here, not just fp/ap/sp, because on some
5134 targets (namely SPARC) we can also get invalid addresses from preventive
5135 subreg big-endian corrections made by find_reloads_toplev. We
5136 can also get expressions involving LO_SUM (rather than PLUS) from
5137 find_reloads_subreg_address.
5138
5139 If we decide to do something, it must be that `double_reg_address_ok'
5140 is true. We generate a reload of the base register + constant and
5141 rework the sum so that the reload register will be added to the index.
5142 This is safe because we know the address isn't shared.
5143
5144 We check for the base register as both the first and second operand of
5145 the innermost PLUS and/or LO_SUM. */
5146
5147 for (op_index = 0; op_index < 2; ++op_index)
5148 {
5149 rtx operand, addend;
5150 enum rtx_code inner_code;
5151
5152 if (GET_CODE (ad) != PLUS)
5153 continue;
5154
5155 inner_code = GET_CODE (XEXP (ad, 0));
5156 if (!(GET_CODE (ad) == PLUS
5157 && CONST_INT_P (XEXP (ad, 1))
5158 && (inner_code == PLUS || inner_code == LO_SUM)))
5159 continue;
5160
5161 operand = XEXP (XEXP (ad, 0), op_index);
5162 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5163 continue;
5164
5165 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5166
5167 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5168 GET_CODE (addend))
5169 || operand == frame_pointer_rtx
5170 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5171 || operand == hard_frame_pointer_rtx
5172 #endif
5173 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5174 || operand == arg_pointer_rtx
5175 #endif
5176 || operand == stack_pointer_rtx)
5177 && ! maybe_memory_address_addr_space_p
5178 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5179 {
5180 rtx offset_reg;
5181 enum reg_class cls;
5182
5183 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5184
5185 /* Form the adjusted address. */
5186 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5187 ad = gen_rtx_PLUS (GET_MODE (ad),
5188 op_index == 0 ? offset_reg : addend,
5189 op_index == 0 ? addend : offset_reg);
5190 else
5191 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5192 op_index == 0 ? offset_reg : addend,
5193 op_index == 0 ? addend : offset_reg);
5194 *loc = ad;
5195
5196 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5197 find_reloads_address_part (XEXP (ad, op_index),
5198 &XEXP (ad, op_index), cls,
5199 GET_MODE (ad), opnum, type, ind_levels);
5200 find_reloads_address_1 (mode,
5201 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5202 GET_CODE (XEXP (ad, op_index)),
5203 &XEXP (ad, 1 - op_index), opnum,
5204 type, 0, insn);
5205
5206 return 0;
5207 }
5208 }
5209
5210 /* See if address becomes valid when an eliminable register
5211 in a sum is replaced. */
5212
5213 tem = ad;
5214 if (GET_CODE (ad) == PLUS)
5215 tem = subst_indexed_address (ad);
5216 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5217 {
5218 /* Ok, we win that way. Replace any additional eliminable
5219 registers. */
5220
5221 subst_reg_equivs_changed = 0;
5222 tem = subst_reg_equivs (tem, insn);
5223
5224 /* Make sure that didn't make the address invalid again. */
5225
5226 if (! subst_reg_equivs_changed
5227 || strict_memory_address_addr_space_p (mode, tem, as))
5228 {
5229 *loc = tem;
5230 return 0;
5231 }
5232 }
5233
5234 /* If constants aren't valid addresses, reload the constant address
5235 into a register. */
5236 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5237 {
5238 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5239 Unshare it so we can safely alter it. */
5240 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5241 && CONSTANT_POOL_ADDRESS_P (ad))
5242 {
5243 *memrefloc = copy_rtx (*memrefloc);
5244 loc = &XEXP (*memrefloc, 0);
5245 if (removed_and)
5246 loc = &XEXP (*loc, 0);
5247 }
5248
5249 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5250 Pmode, opnum, type, ind_levels);
5251 return ! removed_and;
5252 }
5253
5254 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5255 ind_levels, insn);
5256 }
5257 \f
5258 /* Find all pseudo regs appearing in AD
5259 that are eliminable in favor of equivalent values
5260 and do not have hard regs; replace them by their equivalents.
5261 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5262 front of it for pseudos that we have to replace with stack slots. */
5263
5264 static rtx
5265 subst_reg_equivs (rtx ad, rtx insn)
5266 {
5267 RTX_CODE code = GET_CODE (ad);
5268 int i;
5269 const char *fmt;
5270
5271 switch (code)
5272 {
5273 case HIGH:
5274 case CONST_INT:
5275 case CONST:
5276 case CONST_DOUBLE:
5277 case CONST_FIXED:
5278 case CONST_VECTOR:
5279 case SYMBOL_REF:
5280 case LABEL_REF:
5281 case PC:
5282 case CC0:
5283 return ad;
5284
5285 case REG:
5286 {
5287 int regno = REGNO (ad);
5288
5289 if (reg_equiv_constant[regno] != 0)
5290 {
5291 subst_reg_equivs_changed = 1;
5292 return reg_equiv_constant[regno];
5293 }
5294 if (reg_equiv_memory_loc[regno] && num_not_at_initial_offset)
5295 {
5296 rtx mem = make_memloc (ad, regno);
5297 if (! rtx_equal_p (mem, reg_equiv_mem[regno]))
5298 {
5299 subst_reg_equivs_changed = 1;
5300 /* We mark the USE with QImode so that we recognize it
5301 as one that can be safely deleted at the end of
5302 reload. */
5303 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5304 QImode);
5305 return mem;
5306 }
5307 }
5308 }
5309 return ad;
5310
5311 case PLUS:
5312 /* Quickly dispose of a common case. */
5313 if (XEXP (ad, 0) == frame_pointer_rtx
5314 && CONST_INT_P (XEXP (ad, 1)))
5315 return ad;
5316 break;
5317
5318 default:
5319 break;
5320 }
5321
5322 fmt = GET_RTX_FORMAT (code);
5323 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5324 if (fmt[i] == 'e')
5325 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5326 return ad;
5327 }
5328 \f
5329 /* Compute the sum of X and Y, making canonicalizations assumed in an
5330 address, namely: sum constant integers, surround the sum of two
5331 constants with a CONST, put the constant as the second operand, and
5332 group the constant on the outermost sum.
5333
5334 This routine assumes both inputs are already in canonical form. */
5335
5336 rtx
5337 form_sum (rtx x, rtx y)
5338 {
5339 rtx tem;
5340 enum machine_mode mode = GET_MODE (x);
5341
5342 if (mode == VOIDmode)
5343 mode = GET_MODE (y);
5344
5345 if (mode == VOIDmode)
5346 mode = Pmode;
5347
5348 if (CONST_INT_P (x))
5349 return plus_constant (y, INTVAL (x));
5350 else if (CONST_INT_P (y))
5351 return plus_constant (x, INTVAL (y));
5352 else if (CONSTANT_P (x))
5353 tem = x, x = y, y = tem;
5354
5355 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5356 return form_sum (XEXP (x, 0), form_sum (XEXP (x, 1), y));
5357
5358 /* Note that if the operands of Y are specified in the opposite
5359 order in the recursive calls below, infinite recursion will occur. */
5360 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5361 return form_sum (form_sum (x, XEXP (y, 0)), XEXP (y, 1));
5362
5363 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5364 constant will have been placed second. */
5365 if (CONSTANT_P (x) && CONSTANT_P (y))
5366 {
5367 if (GET_CODE (x) == CONST)
5368 x = XEXP (x, 0);
5369 if (GET_CODE (y) == CONST)
5370 y = XEXP (y, 0);
5371
5372 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5373 }
5374
5375 return gen_rtx_PLUS (mode, x, y);
5376 }
5377 \f
5378 /* If ADDR is a sum containing a pseudo register that should be
5379 replaced with a constant (from reg_equiv_constant),
5380 return the result of doing so, and also apply the associative
5381 law so that the result is more likely to be a valid address.
5382 (But it is not guaranteed to be one.)
5383
5384 Note that at most one register is replaced, even if more are
5385 replaceable. Also, we try to put the result into a canonical form
5386 so it is more likely to be a valid address.
5387
5388 In all other cases, return ADDR. */
5389
5390 static rtx
5391 subst_indexed_address (rtx addr)
5392 {
5393 rtx op0 = 0, op1 = 0, op2 = 0;
5394 rtx tem;
5395 int regno;
5396
5397 if (GET_CODE (addr) == PLUS)
5398 {
5399 /* Try to find a register to replace. */
5400 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5401 if (REG_P (op0)
5402 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5403 && reg_renumber[regno] < 0
5404 && reg_equiv_constant[regno] != 0)
5405 op0 = reg_equiv_constant[regno];
5406 else if (REG_P (op1)
5407 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5408 && reg_renumber[regno] < 0
5409 && reg_equiv_constant[regno] != 0)
5410 op1 = reg_equiv_constant[regno];
5411 else if (GET_CODE (op0) == PLUS
5412 && (tem = subst_indexed_address (op0)) != op0)
5413 op0 = tem;
5414 else if (GET_CODE (op1) == PLUS
5415 && (tem = subst_indexed_address (op1)) != op1)
5416 op1 = tem;
5417 else
5418 return addr;
5419
5420 /* Pick out up to three things to add. */
5421 if (GET_CODE (op1) == PLUS)
5422 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5423 else if (GET_CODE (op0) == PLUS)
5424 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5425
5426 /* Compute the sum. */
5427 if (op2 != 0)
5428 op1 = form_sum (op1, op2);
5429 if (op1 != 0)
5430 op0 = form_sum (op0, op1);
5431
5432 return op0;
5433 }
5434 return addr;
5435 }
5436 \f
5437 /* Update the REG_INC notes for an insn. It updates all REG_INC
5438 notes for the instruction which refer to REGNO the to refer
5439 to the reload number.
5440
5441 INSN is the insn for which any REG_INC notes need updating.
5442
5443 REGNO is the register number which has been reloaded.
5444
5445 RELOADNUM is the reload number. */
5446
5447 static void
5448 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5449 int reloadnum ATTRIBUTE_UNUSED)
5450 {
5451 #ifdef AUTO_INC_DEC
5452 rtx link;
5453
5454 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5455 if (REG_NOTE_KIND (link) == REG_INC
5456 && (int) REGNO (XEXP (link, 0)) == regno)
5457 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5458 #endif
5459 }
5460 \f
5461 /* Record the pseudo registers we must reload into hard registers in a
5462 subexpression of a would-be memory address, X referring to a value
5463 in mode MODE. (This function is not called if the address we find
5464 is strictly valid.)
5465
5466 CONTEXT = 1 means we are considering regs as index regs,
5467 = 0 means we are considering them as base regs.
5468 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5469 or an autoinc code.
5470 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5471 is the code of the index part of the address. Otherwise, pass SCRATCH
5472 for this argument.
5473 OPNUM and TYPE specify the purpose of any reloads made.
5474
5475 IND_LEVELS says how many levels of indirect addressing are
5476 supported at this point in the address.
5477
5478 INSN, if nonzero, is the insn in which we do the reload. It is used
5479 to determine if we may generate output reloads.
5480
5481 We return nonzero if X, as a whole, is reloaded or replaced. */
5482
5483 /* Note that we take shortcuts assuming that no multi-reg machine mode
5484 occurs as part of an address.
5485 Also, this is not fully machine-customizable; it works for machines
5486 such as VAXen and 68000's and 32000's, but other possible machines
5487 could have addressing modes that this does not handle right.
5488 If you add push_reload calls here, you need to make sure gen_reload
5489 handles those cases gracefully. */
5490
5491 static int
5492 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5493 enum rtx_code outer_code, enum rtx_code index_code,
5494 rtx *loc, int opnum, enum reload_type type,
5495 int ind_levels, rtx insn)
5496 {
5497 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5498 ((CONTEXT) == 0 \
5499 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5500 : REGNO_OK_FOR_INDEX_P (REGNO))
5501
5502 enum reg_class context_reg_class;
5503 RTX_CODE code = GET_CODE (x);
5504
5505 if (context == 1)
5506 context_reg_class = INDEX_REG_CLASS;
5507 else
5508 context_reg_class = base_reg_class (mode, outer_code, index_code);
5509
5510 switch (code)
5511 {
5512 case PLUS:
5513 {
5514 rtx orig_op0 = XEXP (x, 0);
5515 rtx orig_op1 = XEXP (x, 1);
5516 RTX_CODE code0 = GET_CODE (orig_op0);
5517 RTX_CODE code1 = GET_CODE (orig_op1);
5518 rtx op0 = orig_op0;
5519 rtx op1 = orig_op1;
5520
5521 if (GET_CODE (op0) == SUBREG)
5522 {
5523 op0 = SUBREG_REG (op0);
5524 code0 = GET_CODE (op0);
5525 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5526 op0 = gen_rtx_REG (word_mode,
5527 (REGNO (op0) +
5528 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5529 GET_MODE (SUBREG_REG (orig_op0)),
5530 SUBREG_BYTE (orig_op0),
5531 GET_MODE (orig_op0))));
5532 }
5533
5534 if (GET_CODE (op1) == SUBREG)
5535 {
5536 op1 = SUBREG_REG (op1);
5537 code1 = GET_CODE (op1);
5538 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5539 /* ??? Why is this given op1's mode and above for
5540 ??? op0 SUBREGs we use word_mode? */
5541 op1 = gen_rtx_REG (GET_MODE (op1),
5542 (REGNO (op1) +
5543 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5544 GET_MODE (SUBREG_REG (orig_op1)),
5545 SUBREG_BYTE (orig_op1),
5546 GET_MODE (orig_op1))));
5547 }
5548 /* Plus in the index register may be created only as a result of
5549 register rematerialization for expression like &localvar*4. Reload it.
5550 It may be possible to combine the displacement on the outer level,
5551 but it is probably not worthwhile to do so. */
5552 if (context == 1)
5553 {
5554 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5555 opnum, ADDR_TYPE (type), ind_levels, insn);
5556 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5557 context_reg_class,
5558 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5559 return 1;
5560 }
5561
5562 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5563 || code0 == ZERO_EXTEND || code1 == MEM)
5564 {
5565 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5566 &XEXP (x, 0), opnum, type, ind_levels,
5567 insn);
5568 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5569 &XEXP (x, 1), opnum, type, ind_levels,
5570 insn);
5571 }
5572
5573 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5574 || code1 == ZERO_EXTEND || code0 == MEM)
5575 {
5576 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5577 &XEXP (x, 0), opnum, type, ind_levels,
5578 insn);
5579 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5580 &XEXP (x, 1), opnum, type, ind_levels,
5581 insn);
5582 }
5583
5584 else if (code0 == CONST_INT || code0 == CONST
5585 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5586 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5587 &XEXP (x, 1), opnum, type, ind_levels,
5588 insn);
5589
5590 else if (code1 == CONST_INT || code1 == CONST
5591 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5592 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5593 &XEXP (x, 0), opnum, type, ind_levels,
5594 insn);
5595
5596 else if (code0 == REG && code1 == REG)
5597 {
5598 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5599 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5600 return 0;
5601 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5602 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5603 return 0;
5604 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5605 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5606 &XEXP (x, 1), opnum, type, ind_levels,
5607 insn);
5608 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5609 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5610 &XEXP (x, 0), opnum, type, ind_levels,
5611 insn);
5612 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5613 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5614 &XEXP (x, 0), opnum, type, ind_levels,
5615 insn);
5616 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5617 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5618 &XEXP (x, 1), opnum, type, ind_levels,
5619 insn);
5620 else
5621 {
5622 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5623 &XEXP (x, 0), opnum, type, ind_levels,
5624 insn);
5625 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5626 &XEXP (x, 1), opnum, type, ind_levels,
5627 insn);
5628 }
5629 }
5630
5631 else if (code0 == REG)
5632 {
5633 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5634 &XEXP (x, 0), opnum, type, ind_levels,
5635 insn);
5636 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5637 &XEXP (x, 1), opnum, type, ind_levels,
5638 insn);
5639 }
5640
5641 else if (code1 == REG)
5642 {
5643 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5644 &XEXP (x, 1), opnum, type, ind_levels,
5645 insn);
5646 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5647 &XEXP (x, 0), opnum, type, ind_levels,
5648 insn);
5649 }
5650 }
5651
5652 return 0;
5653
5654 case POST_MODIFY:
5655 case PRE_MODIFY:
5656 {
5657 rtx op0 = XEXP (x, 0);
5658 rtx op1 = XEXP (x, 1);
5659 enum rtx_code index_code;
5660 int regno;
5661 int reloadnum;
5662
5663 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5664 return 0;
5665
5666 /* Currently, we only support {PRE,POST}_MODIFY constructs
5667 where a base register is {inc,dec}remented by the contents
5668 of another register or by a constant value. Thus, these
5669 operands must match. */
5670 gcc_assert (op0 == XEXP (op1, 0));
5671
5672 /* Require index register (or constant). Let's just handle the
5673 register case in the meantime... If the target allows
5674 auto-modify by a constant then we could try replacing a pseudo
5675 register with its equivalent constant where applicable.
5676
5677 We also handle the case where the register was eliminated
5678 resulting in a PLUS subexpression.
5679
5680 If we later decide to reload the whole PRE_MODIFY or
5681 POST_MODIFY, inc_for_reload might clobber the reload register
5682 before reading the index. The index register might therefore
5683 need to live longer than a TYPE reload normally would, so be
5684 conservative and class it as RELOAD_OTHER. */
5685 if ((REG_P (XEXP (op1, 1))
5686 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5687 || GET_CODE (XEXP (op1, 1)) == PLUS)
5688 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5689 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5690 ind_levels, insn);
5691
5692 gcc_assert (REG_P (XEXP (op1, 0)));
5693
5694 regno = REGNO (XEXP (op1, 0));
5695 index_code = GET_CODE (XEXP (op1, 1));
5696
5697 /* A register that is incremented cannot be constant! */
5698 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5699 || reg_equiv_constant[regno] == 0);
5700
5701 /* Handle a register that is equivalent to a memory location
5702 which cannot be addressed directly. */
5703 if (reg_equiv_memory_loc[regno] != 0
5704 && (reg_equiv_address[regno] != 0
5705 || num_not_at_initial_offset))
5706 {
5707 rtx tem = make_memloc (XEXP (x, 0), regno);
5708
5709 if (reg_equiv_address[regno]
5710 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5711 {
5712 rtx orig = tem;
5713
5714 /* First reload the memory location's address.
5715 We can't use ADDR_TYPE (type) here, because we need to
5716 write back the value after reading it, hence we actually
5717 need two registers. */
5718 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5719 &XEXP (tem, 0), opnum,
5720 RELOAD_OTHER,
5721 ind_levels, insn);
5722
5723 if (!rtx_equal_p (tem, orig))
5724 push_reg_equiv_alt_mem (regno, tem);
5725
5726 /* Then reload the memory location into a base
5727 register. */
5728 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5729 &XEXP (op1, 0),
5730 base_reg_class (mode, code,
5731 index_code),
5732 GET_MODE (x), GET_MODE (x), 0,
5733 0, opnum, RELOAD_OTHER);
5734
5735 update_auto_inc_notes (this_insn, regno, reloadnum);
5736 return 0;
5737 }
5738 }
5739
5740 if (reg_renumber[regno] >= 0)
5741 regno = reg_renumber[regno];
5742
5743 /* We require a base register here... */
5744 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5745 {
5746 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5747 &XEXP (op1, 0), &XEXP (x, 0),
5748 base_reg_class (mode, code, index_code),
5749 GET_MODE (x), GET_MODE (x), 0, 0,
5750 opnum, RELOAD_OTHER);
5751
5752 update_auto_inc_notes (this_insn, regno, reloadnum);
5753 return 0;
5754 }
5755 }
5756 return 0;
5757
5758 case POST_INC:
5759 case POST_DEC:
5760 case PRE_INC:
5761 case PRE_DEC:
5762 if (REG_P (XEXP (x, 0)))
5763 {
5764 int regno = REGNO (XEXP (x, 0));
5765 int value = 0;
5766 rtx x_orig = x;
5767
5768 /* A register that is incremented cannot be constant! */
5769 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5770 || reg_equiv_constant[regno] == 0);
5771
5772 /* Handle a register that is equivalent to a memory location
5773 which cannot be addressed directly. */
5774 if (reg_equiv_memory_loc[regno] != 0
5775 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5776 {
5777 rtx tem = make_memloc (XEXP (x, 0), regno);
5778 if (reg_equiv_address[regno]
5779 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5780 {
5781 rtx orig = tem;
5782
5783 /* First reload the memory location's address.
5784 We can't use ADDR_TYPE (type) here, because we need to
5785 write back the value after reading it, hence we actually
5786 need two registers. */
5787 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5788 &XEXP (tem, 0), opnum, type,
5789 ind_levels, insn);
5790 if (!rtx_equal_p (tem, orig))
5791 push_reg_equiv_alt_mem (regno, tem);
5792 /* Put this inside a new increment-expression. */
5793 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5794 /* Proceed to reload that, as if it contained a register. */
5795 }
5796 }
5797
5798 /* If we have a hard register that is ok in this incdec context,
5799 don't make a reload. If the register isn't nice enough for
5800 autoincdec, we can reload it. But, if an autoincrement of a
5801 register that we here verified as playing nice, still outside
5802 isn't "valid", it must be that no autoincrement is "valid".
5803 If that is true and something made an autoincrement anyway,
5804 this must be a special context where one is allowed.
5805 (For example, a "push" instruction.)
5806 We can't improve this address, so leave it alone. */
5807
5808 /* Otherwise, reload the autoincrement into a suitable hard reg
5809 and record how much to increment by. */
5810
5811 if (reg_renumber[regno] >= 0)
5812 regno = reg_renumber[regno];
5813 if (regno >= FIRST_PSEUDO_REGISTER
5814 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5815 index_code))
5816 {
5817 int reloadnum;
5818
5819 /* If we can output the register afterwards, do so, this
5820 saves the extra update.
5821 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5822 CALL_INSN - and it does not set CC0.
5823 But don't do this if we cannot directly address the
5824 memory location, since this will make it harder to
5825 reuse address reloads, and increases register pressure.
5826 Also don't do this if we can probably update x directly. */
5827 rtx equiv = (MEM_P (XEXP (x, 0))
5828 ? XEXP (x, 0)
5829 : reg_equiv_mem[regno]);
5830 int icode = (int) optab_handler (add_optab, Pmode)->insn_code;
5831 if (insn && NONJUMP_INSN_P (insn) && equiv
5832 && memory_operand (equiv, GET_MODE (equiv))
5833 #ifdef HAVE_cc0
5834 && ! sets_cc0_p (PATTERN (insn))
5835 #endif
5836 && ! (icode != CODE_FOR_nothing
5837 && ((*insn_data[icode].operand[0].predicate)
5838 (equiv, Pmode))
5839 && ((*insn_data[icode].operand[1].predicate)
5840 (equiv, Pmode))))
5841 {
5842 /* We use the original pseudo for loc, so that
5843 emit_reload_insns() knows which pseudo this
5844 reload refers to and updates the pseudo rtx, not
5845 its equivalent memory location, as well as the
5846 corresponding entry in reg_last_reload_reg. */
5847 loc = &XEXP (x_orig, 0);
5848 x = XEXP (x, 0);
5849 reloadnum
5850 = push_reload (x, x, loc, loc,
5851 context_reg_class,
5852 GET_MODE (x), GET_MODE (x), 0, 0,
5853 opnum, RELOAD_OTHER);
5854 }
5855 else
5856 {
5857 reloadnum
5858 = push_reload (x, x, loc, (rtx*) 0,
5859 context_reg_class,
5860 GET_MODE (x), GET_MODE (x), 0, 0,
5861 opnum, type);
5862 rld[reloadnum].inc
5863 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5864
5865 value = 1;
5866 }
5867
5868 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5869 reloadnum);
5870 }
5871 return value;
5872 }
5873 return 0;
5874
5875 case TRUNCATE:
5876 case SIGN_EXTEND:
5877 case ZERO_EXTEND:
5878 /* Look for parts to reload in the inner expression and reload them
5879 too, in addition to this operation. Reloading all inner parts in
5880 addition to this one shouldn't be necessary, but at this point,
5881 we don't know if we can possibly omit any part that *can* be
5882 reloaded. Targets that are better off reloading just either part
5883 (or perhaps even a different part of an outer expression), should
5884 define LEGITIMIZE_RELOAD_ADDRESS. */
5885 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5886 context, code, SCRATCH, &XEXP (x, 0), opnum,
5887 type, ind_levels, insn);
5888 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5889 context_reg_class,
5890 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5891 return 1;
5892
5893 case MEM:
5894 /* This is probably the result of a substitution, by eliminate_regs, of
5895 an equivalent address for a pseudo that was not allocated to a hard
5896 register. Verify that the specified address is valid and reload it
5897 into a register.
5898
5899 Since we know we are going to reload this item, don't decrement for
5900 the indirection level.
5901
5902 Note that this is actually conservative: it would be slightly more
5903 efficient to use the value of SPILL_INDIRECT_LEVELS from
5904 reload1.c here. */
5905
5906 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5907 opnum, ADDR_TYPE (type), ind_levels, insn);
5908 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5909 context_reg_class,
5910 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5911 return 1;
5912
5913 case REG:
5914 {
5915 int regno = REGNO (x);
5916
5917 if (reg_equiv_constant[regno] != 0)
5918 {
5919 find_reloads_address_part (reg_equiv_constant[regno], loc,
5920 context_reg_class,
5921 GET_MODE (x), opnum, type, ind_levels);
5922 return 1;
5923 }
5924
5925 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5926 that feeds this insn. */
5927 if (reg_equiv_mem[regno] != 0)
5928 {
5929 push_reload (reg_equiv_mem[regno], NULL_RTX, loc, (rtx*) 0,
5930 context_reg_class,
5931 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5932 return 1;
5933 }
5934 #endif
5935
5936 if (reg_equiv_memory_loc[regno]
5937 && (reg_equiv_address[regno] != 0 || num_not_at_initial_offset))
5938 {
5939 rtx tem = make_memloc (x, regno);
5940 if (reg_equiv_address[regno] != 0
5941 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
5942 {
5943 x = tem;
5944 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5945 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5946 ind_levels, insn);
5947 if (!rtx_equal_p (x, tem))
5948 push_reg_equiv_alt_mem (regno, x);
5949 }
5950 }
5951
5952 if (reg_renumber[regno] >= 0)
5953 regno = reg_renumber[regno];
5954
5955 if (regno >= FIRST_PSEUDO_REGISTER
5956 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5957 index_code))
5958 {
5959 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5960 context_reg_class,
5961 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5962 return 1;
5963 }
5964
5965 /* If a register appearing in an address is the subject of a CLOBBER
5966 in this insn, reload it into some other register to be safe.
5967 The CLOBBER is supposed to make the register unavailable
5968 from before this insn to after it. */
5969 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5970 {
5971 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5972 context_reg_class,
5973 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5974 return 1;
5975 }
5976 }
5977 return 0;
5978
5979 case SUBREG:
5980 if (REG_P (SUBREG_REG (x)))
5981 {
5982 /* If this is a SUBREG of a hard register and the resulting register
5983 is of the wrong class, reload the whole SUBREG. This avoids
5984 needless copies if SUBREG_REG is multi-word. */
5985 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5986 {
5987 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5988
5989 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5990 index_code))
5991 {
5992 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5993 context_reg_class,
5994 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5995 return 1;
5996 }
5997 }
5998 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5999 is larger than the class size, then reload the whole SUBREG. */
6000 else
6001 {
6002 enum reg_class rclass = context_reg_class;
6003 if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
6004 > reg_class_size[rclass])
6005 {
6006 x = find_reloads_subreg_address (x, 0, opnum,
6007 ADDR_TYPE (type),
6008 ind_levels, insn);
6009 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6010 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6011 return 1;
6012 }
6013 }
6014 }
6015 break;
6016
6017 default:
6018 break;
6019 }
6020
6021 {
6022 const char *fmt = GET_RTX_FORMAT (code);
6023 int i;
6024
6025 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6026 {
6027 if (fmt[i] == 'e')
6028 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6029 we get here. */
6030 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
6031 &XEXP (x, i), opnum, type, ind_levels, insn);
6032 }
6033 }
6034
6035 #undef REG_OK_FOR_CONTEXT
6036 return 0;
6037 }
6038 \f
6039 /* X, which is found at *LOC, is a part of an address that needs to be
6040 reloaded into a register of class RCLASS. If X is a constant, or if
6041 X is a PLUS that contains a constant, check that the constant is a
6042 legitimate operand and that we are supposed to be able to load
6043 it into the register.
6044
6045 If not, force the constant into memory and reload the MEM instead.
6046
6047 MODE is the mode to use, in case X is an integer constant.
6048
6049 OPNUM and TYPE describe the purpose of any reloads made.
6050
6051 IND_LEVELS says how many levels of indirect addressing this machine
6052 supports. */
6053
6054 static void
6055 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6056 enum machine_mode mode, int opnum,
6057 enum reload_type type, int ind_levels)
6058 {
6059 if (CONSTANT_P (x)
6060 && (! LEGITIMATE_CONSTANT_P (x)
6061 || PREFERRED_RELOAD_CLASS (x, rclass) == NO_REGS))
6062 {
6063 x = force_const_mem (mode, x);
6064 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6065 opnum, type, ind_levels, 0);
6066 }
6067
6068 else if (GET_CODE (x) == PLUS
6069 && CONSTANT_P (XEXP (x, 1))
6070 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
6071 || PREFERRED_RELOAD_CLASS (XEXP (x, 1), rclass) == NO_REGS))
6072 {
6073 rtx tem;
6074
6075 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6076 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6077 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6078 opnum, type, ind_levels, 0);
6079 }
6080
6081 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6082 mode, VOIDmode, 0, 0, opnum, type);
6083 }
6084 \f
6085 /* X, a subreg of a pseudo, is a part of an address that needs to be
6086 reloaded.
6087
6088 If the pseudo is equivalent to a memory location that cannot be directly
6089 addressed, make the necessary address reloads.
6090
6091 If address reloads have been necessary, or if the address is changed
6092 by register elimination, return the rtx of the memory location;
6093 otherwise, return X.
6094
6095 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6096 memory location.
6097
6098 OPNUM and TYPE identify the purpose of the reload.
6099
6100 IND_LEVELS says how many levels of indirect addressing are
6101 supported at this point in the address.
6102
6103 INSN, if nonzero, is the insn in which we do the reload. It is used
6104 to determine where to put USEs for pseudos that we have to replace with
6105 stack slots. */
6106
6107 static rtx
6108 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6109 enum reload_type type, int ind_levels, rtx insn)
6110 {
6111 int regno = REGNO (SUBREG_REG (x));
6112
6113 if (reg_equiv_memory_loc[regno])
6114 {
6115 /* If the address is not directly addressable, or if the address is not
6116 offsettable, then it must be replaced. */
6117 if (! force_replace
6118 && (reg_equiv_address[regno]
6119 || ! offsettable_memref_p (reg_equiv_mem[regno])))
6120 force_replace = 1;
6121
6122 if (force_replace || num_not_at_initial_offset)
6123 {
6124 rtx tem = make_memloc (SUBREG_REG (x), regno);
6125
6126 /* If the address changes because of register elimination, then
6127 it must be replaced. */
6128 if (force_replace
6129 || ! rtx_equal_p (tem, reg_equiv_mem[regno]))
6130 {
6131 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6132 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6133 int offset;
6134 rtx orig = tem;
6135 int reloaded;
6136
6137 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6138 hold the correct (negative) byte offset. */
6139 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6140 offset = inner_size - outer_size;
6141 else
6142 offset = SUBREG_BYTE (x);
6143
6144 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6145 PUT_MODE (tem, GET_MODE (x));
6146 if (MEM_OFFSET (tem))
6147 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6148
6149 /* If this was a paradoxical subreg that we replaced, the
6150 resulting memory must be sufficiently aligned to allow
6151 us to widen the mode of the memory. */
6152 if (outer_size > inner_size)
6153 {
6154 rtx base;
6155
6156 base = XEXP (tem, 0);
6157 if (GET_CODE (base) == PLUS)
6158 {
6159 if (CONST_INT_P (XEXP (base, 1))
6160 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6161 return x;
6162 base = XEXP (base, 0);
6163 }
6164 if (!REG_P (base)
6165 || (REGNO_POINTER_ALIGN (REGNO (base))
6166 < outer_size * BITS_PER_UNIT))
6167 return x;
6168 }
6169
6170 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6171 XEXP (tem, 0), &XEXP (tem, 0),
6172 opnum, type, ind_levels, insn);
6173 /* ??? Do we need to handle nonzero offsets somehow? */
6174 if (!offset && !rtx_equal_p (tem, orig))
6175 push_reg_equiv_alt_mem (regno, tem);
6176
6177 /* For some processors an address may be valid in the
6178 original mode but not in a smaller mode. For
6179 example, ARM accepts a scaled index register in
6180 SImode but not in HImode. Note that this is only
6181 a problem if the address in reg_equiv_mem is already
6182 invalid in the new mode; other cases would be fixed
6183 by find_reloads_address as usual.
6184
6185 ??? We attempt to handle such cases here by doing an
6186 additional reload of the full address after the
6187 usual processing by find_reloads_address. Note that
6188 this may not work in the general case, but it seems
6189 to cover the cases where this situation currently
6190 occurs. A more general fix might be to reload the
6191 *value* instead of the address, but this would not
6192 be expected by the callers of this routine as-is.
6193
6194 If find_reloads_address already completed replaced
6195 the address, there is nothing further to do. */
6196 if (reloaded == 0
6197 && reg_equiv_mem[regno] != 0
6198 && !strict_memory_address_addr_space_p
6199 (GET_MODE (x), XEXP (reg_equiv_mem[regno], 0),
6200 MEM_ADDR_SPACE (reg_equiv_mem[regno])))
6201 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6202 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6203 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6204 opnum, type);
6205
6206 /* If this is not a toplevel operand, find_reloads doesn't see
6207 this substitution. We have to emit a USE of the pseudo so
6208 that delete_output_reload can see it. */
6209 if (replace_reloads && recog_data.operand[opnum] != x)
6210 /* We mark the USE with QImode so that we recognize it
6211 as one that can be safely deleted at the end of
6212 reload. */
6213 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6214 SUBREG_REG (x)),
6215 insn), QImode);
6216 x = tem;
6217 }
6218 }
6219 }
6220 return x;
6221 }
6222 \f
6223 /* Substitute into the current INSN the registers into which we have reloaded
6224 the things that need reloading. The array `replacements'
6225 contains the locations of all pointers that must be changed
6226 and says what to replace them with.
6227
6228 Return the rtx that X translates into; usually X, but modified. */
6229
6230 void
6231 subst_reloads (rtx insn)
6232 {
6233 int i;
6234
6235 for (i = 0; i < n_replacements; i++)
6236 {
6237 struct replacement *r = &replacements[i];
6238 rtx reloadreg = rld[r->what].reg_rtx;
6239 if (reloadreg)
6240 {
6241 #ifdef DEBUG_RELOAD
6242 /* This checking takes a very long time on some platforms
6243 causing the gcc.c-torture/compile/limits-fnargs.c test
6244 to time out during testing. See PR 31850.
6245
6246 Internal consistency test. Check that we don't modify
6247 anything in the equivalence arrays. Whenever something from
6248 those arrays needs to be reloaded, it must be unshared before
6249 being substituted into; the equivalence must not be modified.
6250 Otherwise, if the equivalence is used after that, it will
6251 have been modified, and the thing substituted (probably a
6252 register) is likely overwritten and not a usable equivalence. */
6253 int check_regno;
6254
6255 for (check_regno = 0; check_regno < max_regno; check_regno++)
6256 {
6257 #define CHECK_MODF(ARRAY) \
6258 gcc_assert (!ARRAY[check_regno] \
6259 || !loc_mentioned_in_p (r->where, \
6260 ARRAY[check_regno]))
6261
6262 CHECK_MODF (reg_equiv_constant);
6263 CHECK_MODF (reg_equiv_memory_loc);
6264 CHECK_MODF (reg_equiv_address);
6265 CHECK_MODF (reg_equiv_mem);
6266 #undef CHECK_MODF
6267 }
6268 #endif /* DEBUG_RELOAD */
6269
6270 /* If we're replacing a LABEL_REF with a register, there must
6271 already be an indication (to e.g. flow) which label this
6272 register refers to. */
6273 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6274 || !JUMP_P (insn)
6275 || find_reg_note (insn,
6276 REG_LABEL_OPERAND,
6277 XEXP (*r->where, 0))
6278 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6279
6280 /* Encapsulate RELOADREG so its machine mode matches what
6281 used to be there. Note that gen_lowpart_common will
6282 do the wrong thing if RELOADREG is multi-word. RELOADREG
6283 will always be a REG here. */
6284 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6285 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6286
6287 /* If we are putting this into a SUBREG and RELOADREG is a
6288 SUBREG, we would be making nested SUBREGs, so we have to fix
6289 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6290
6291 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
6292 {
6293 if (GET_MODE (*r->subreg_loc)
6294 == GET_MODE (SUBREG_REG (reloadreg)))
6295 *r->subreg_loc = SUBREG_REG (reloadreg);
6296 else
6297 {
6298 int final_offset =
6299 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6300
6301 /* When working with SUBREGs the rule is that the byte
6302 offset must be a multiple of the SUBREG's mode. */
6303 final_offset = (final_offset /
6304 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6305 final_offset = (final_offset *
6306 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6307
6308 *r->where = SUBREG_REG (reloadreg);
6309 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6310 }
6311 }
6312 else
6313 *r->where = reloadreg;
6314 }
6315 /* If reload got no reg and isn't optional, something's wrong. */
6316 else
6317 gcc_assert (rld[r->what].optional);
6318 }
6319 }
6320 \f
6321 /* Make a copy of any replacements being done into X and move those
6322 copies to locations in Y, a copy of X. */
6323
6324 void
6325 copy_replacements (rtx x, rtx y)
6326 {
6327 /* We can't support X being a SUBREG because we might then need to know its
6328 location if something inside it was replaced. */
6329 gcc_assert (GET_CODE (x) != SUBREG);
6330
6331 copy_replacements_1 (&x, &y, n_replacements);
6332 }
6333
6334 static void
6335 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6336 {
6337 int i, j;
6338 rtx x, y;
6339 struct replacement *r;
6340 enum rtx_code code;
6341 const char *fmt;
6342
6343 for (j = 0; j < orig_replacements; j++)
6344 {
6345 if (replacements[j].subreg_loc == px)
6346 {
6347 r = &replacements[n_replacements++];
6348 r->where = replacements[j].where;
6349 r->subreg_loc = py;
6350 r->what = replacements[j].what;
6351 r->mode = replacements[j].mode;
6352 }
6353 else if (replacements[j].where == px)
6354 {
6355 r = &replacements[n_replacements++];
6356 r->where = py;
6357 r->subreg_loc = 0;
6358 r->what = replacements[j].what;
6359 r->mode = replacements[j].mode;
6360 }
6361 }
6362
6363 x = *px;
6364 y = *py;
6365 code = GET_CODE (x);
6366 fmt = GET_RTX_FORMAT (code);
6367
6368 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6369 {
6370 if (fmt[i] == 'e')
6371 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6372 else if (fmt[i] == 'E')
6373 for (j = XVECLEN (x, i); --j >= 0; )
6374 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6375 orig_replacements);
6376 }
6377 }
6378
6379 /* Change any replacements being done to *X to be done to *Y. */
6380
6381 void
6382 move_replacements (rtx *x, rtx *y)
6383 {
6384 int i;
6385
6386 for (i = 0; i < n_replacements; i++)
6387 if (replacements[i].subreg_loc == x)
6388 replacements[i].subreg_loc = y;
6389 else if (replacements[i].where == x)
6390 {
6391 replacements[i].where = y;
6392 replacements[i].subreg_loc = 0;
6393 }
6394 }
6395 \f
6396 /* If LOC was scheduled to be replaced by something, return the replacement.
6397 Otherwise, return *LOC. */
6398
6399 rtx
6400 find_replacement (rtx *loc)
6401 {
6402 struct replacement *r;
6403
6404 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6405 {
6406 rtx reloadreg = rld[r->what].reg_rtx;
6407
6408 if (reloadreg && r->where == loc)
6409 {
6410 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6411 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6412
6413 return reloadreg;
6414 }
6415 else if (reloadreg && r->subreg_loc == loc)
6416 {
6417 /* RELOADREG must be either a REG or a SUBREG.
6418
6419 ??? Is it actually still ever a SUBREG? If so, why? */
6420
6421 if (REG_P (reloadreg))
6422 return gen_rtx_REG (GET_MODE (*loc),
6423 (REGNO (reloadreg) +
6424 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6425 GET_MODE (SUBREG_REG (*loc)),
6426 SUBREG_BYTE (*loc),
6427 GET_MODE (*loc))));
6428 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6429 return reloadreg;
6430 else
6431 {
6432 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6433
6434 /* When working with SUBREGs the rule is that the byte
6435 offset must be a multiple of the SUBREG's mode. */
6436 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6437 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6438 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6439 final_offset);
6440 }
6441 }
6442 }
6443
6444 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6445 what's inside and make a new rtl if so. */
6446 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6447 || GET_CODE (*loc) == MULT)
6448 {
6449 rtx x = find_replacement (&XEXP (*loc, 0));
6450 rtx y = find_replacement (&XEXP (*loc, 1));
6451
6452 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6453 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6454 }
6455
6456 return *loc;
6457 }
6458 \f
6459 /* Return nonzero if register in range [REGNO, ENDREGNO)
6460 appears either explicitly or implicitly in X
6461 other than being stored into (except for earlyclobber operands).
6462
6463 References contained within the substructure at LOC do not count.
6464 LOC may be zero, meaning don't ignore anything.
6465
6466 This is similar to refers_to_regno_p in rtlanal.c except that we
6467 look at equivalences for pseudos that didn't get hard registers. */
6468
6469 static int
6470 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6471 rtx x, rtx *loc)
6472 {
6473 int i;
6474 unsigned int r;
6475 RTX_CODE code;
6476 const char *fmt;
6477
6478 if (x == 0)
6479 return 0;
6480
6481 repeat:
6482 code = GET_CODE (x);
6483
6484 switch (code)
6485 {
6486 case REG:
6487 r = REGNO (x);
6488
6489 /* If this is a pseudo, a hard register must not have been allocated.
6490 X must therefore either be a constant or be in memory. */
6491 if (r >= FIRST_PSEUDO_REGISTER)
6492 {
6493 if (reg_equiv_memory_loc[r])
6494 return refers_to_regno_for_reload_p (regno, endregno,
6495 reg_equiv_memory_loc[r],
6496 (rtx*) 0);
6497
6498 gcc_assert (reg_equiv_constant[r] || reg_equiv_invariant[r]);
6499 return 0;
6500 }
6501
6502 return (endregno > r
6503 && regno < r + (r < FIRST_PSEUDO_REGISTER
6504 ? hard_regno_nregs[r][GET_MODE (x)]
6505 : 1));
6506
6507 case SUBREG:
6508 /* If this is a SUBREG of a hard reg, we can see exactly which
6509 registers are being modified. Otherwise, handle normally. */
6510 if (REG_P (SUBREG_REG (x))
6511 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6512 {
6513 unsigned int inner_regno = subreg_regno (x);
6514 unsigned int inner_endregno
6515 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6516 ? subreg_nregs (x) : 1);
6517
6518 return endregno > inner_regno && regno < inner_endregno;
6519 }
6520 break;
6521
6522 case CLOBBER:
6523 case SET:
6524 if (&SET_DEST (x) != loc
6525 /* Note setting a SUBREG counts as referring to the REG it is in for
6526 a pseudo but not for hard registers since we can
6527 treat each word individually. */
6528 && ((GET_CODE (SET_DEST (x)) == SUBREG
6529 && loc != &SUBREG_REG (SET_DEST (x))
6530 && REG_P (SUBREG_REG (SET_DEST (x)))
6531 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6532 && refers_to_regno_for_reload_p (regno, endregno,
6533 SUBREG_REG (SET_DEST (x)),
6534 loc))
6535 /* If the output is an earlyclobber operand, this is
6536 a conflict. */
6537 || ((!REG_P (SET_DEST (x))
6538 || earlyclobber_operand_p (SET_DEST (x)))
6539 && refers_to_regno_for_reload_p (regno, endregno,
6540 SET_DEST (x), loc))))
6541 return 1;
6542
6543 if (code == CLOBBER || loc == &SET_SRC (x))
6544 return 0;
6545 x = SET_SRC (x);
6546 goto repeat;
6547
6548 default:
6549 break;
6550 }
6551
6552 /* X does not match, so try its subexpressions. */
6553
6554 fmt = GET_RTX_FORMAT (code);
6555 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6556 {
6557 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6558 {
6559 if (i == 0)
6560 {
6561 x = XEXP (x, 0);
6562 goto repeat;
6563 }
6564 else
6565 if (refers_to_regno_for_reload_p (regno, endregno,
6566 XEXP (x, i), loc))
6567 return 1;
6568 }
6569 else if (fmt[i] == 'E')
6570 {
6571 int j;
6572 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6573 if (loc != &XVECEXP (x, i, j)
6574 && refers_to_regno_for_reload_p (regno, endregno,
6575 XVECEXP (x, i, j), loc))
6576 return 1;
6577 }
6578 }
6579 return 0;
6580 }
6581
6582 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6583 we check if any register number in X conflicts with the relevant register
6584 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6585 contains a MEM (we don't bother checking for memory addresses that can't
6586 conflict because we expect this to be a rare case.
6587
6588 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6589 that we look at equivalences for pseudos that didn't get hard registers. */
6590
6591 int
6592 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6593 {
6594 int regno, endregno;
6595
6596 /* Overly conservative. */
6597 if (GET_CODE (x) == STRICT_LOW_PART
6598 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6599 x = XEXP (x, 0);
6600
6601 /* If either argument is a constant, then modifying X can not affect IN. */
6602 if (CONSTANT_P (x) || CONSTANT_P (in))
6603 return 0;
6604 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6605 return refers_to_mem_for_reload_p (in);
6606 else if (GET_CODE (x) == SUBREG)
6607 {
6608 regno = REGNO (SUBREG_REG (x));
6609 if (regno < FIRST_PSEUDO_REGISTER)
6610 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6611 GET_MODE (SUBREG_REG (x)),
6612 SUBREG_BYTE (x),
6613 GET_MODE (x));
6614 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6615 ? subreg_nregs (x) : 1);
6616
6617 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6618 }
6619 else if (REG_P (x))
6620 {
6621 regno = REGNO (x);
6622
6623 /* If this is a pseudo, it must not have been assigned a hard register.
6624 Therefore, it must either be in memory or be a constant. */
6625
6626 if (regno >= FIRST_PSEUDO_REGISTER)
6627 {
6628 if (reg_equiv_memory_loc[regno])
6629 return refers_to_mem_for_reload_p (in);
6630 gcc_assert (reg_equiv_constant[regno]);
6631 return 0;
6632 }
6633
6634 endregno = END_HARD_REGNO (x);
6635
6636 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6637 }
6638 else if (MEM_P (x))
6639 return refers_to_mem_for_reload_p (in);
6640 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6641 || GET_CODE (x) == CC0)
6642 return reg_mentioned_p (x, in);
6643 else
6644 {
6645 gcc_assert (GET_CODE (x) == PLUS);
6646
6647 /* We actually want to know if X is mentioned somewhere inside IN.
6648 We must not say that (plus (sp) (const_int 124)) is in
6649 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6650 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6651 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6652 while (MEM_P (in))
6653 in = XEXP (in, 0);
6654 if (REG_P (in))
6655 return 0;
6656 else if (GET_CODE (in) == PLUS)
6657 return (rtx_equal_p (x, in)
6658 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6659 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6660 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6661 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6662 }
6663
6664 gcc_unreachable ();
6665 }
6666
6667 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6668 registers. */
6669
6670 static int
6671 refers_to_mem_for_reload_p (rtx x)
6672 {
6673 const char *fmt;
6674 int i;
6675
6676 if (MEM_P (x))
6677 return 1;
6678
6679 if (REG_P (x))
6680 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6681 && reg_equiv_memory_loc[REGNO (x)]);
6682
6683 fmt = GET_RTX_FORMAT (GET_CODE (x));
6684 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6685 if (fmt[i] == 'e'
6686 && (MEM_P (XEXP (x, i))
6687 || refers_to_mem_for_reload_p (XEXP (x, i))))
6688 return 1;
6689
6690 return 0;
6691 }
6692 \f
6693 /* Check the insns before INSN to see if there is a suitable register
6694 containing the same value as GOAL.
6695 If OTHER is -1, look for a register in class RCLASS.
6696 Otherwise, just see if register number OTHER shares GOAL's value.
6697
6698 Return an rtx for the register found, or zero if none is found.
6699
6700 If RELOAD_REG_P is (short *)1,
6701 we reject any hard reg that appears in reload_reg_rtx
6702 because such a hard reg is also needed coming into this insn.
6703
6704 If RELOAD_REG_P is any other nonzero value,
6705 it is a vector indexed by hard reg number
6706 and we reject any hard reg whose element in the vector is nonnegative
6707 as well as any that appears in reload_reg_rtx.
6708
6709 If GOAL is zero, then GOALREG is a register number; we look
6710 for an equivalent for that register.
6711
6712 MODE is the machine mode of the value we want an equivalence for.
6713 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6714
6715 This function is used by jump.c as well as in the reload pass.
6716
6717 If GOAL is the sum of the stack pointer and a constant, we treat it
6718 as if it were a constant except that sp is required to be unchanging. */
6719
6720 rtx
6721 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6722 short *reload_reg_p, int goalreg, enum machine_mode mode)
6723 {
6724 rtx p = insn;
6725 rtx goaltry, valtry, value, where;
6726 rtx pat;
6727 int regno = -1;
6728 int valueno;
6729 int goal_mem = 0;
6730 int goal_const = 0;
6731 int goal_mem_addr_varies = 0;
6732 int need_stable_sp = 0;
6733 int nregs;
6734 int valuenregs;
6735 int num = 0;
6736
6737 if (goal == 0)
6738 regno = goalreg;
6739 else if (REG_P (goal))
6740 regno = REGNO (goal);
6741 else if (MEM_P (goal))
6742 {
6743 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6744 if (MEM_VOLATILE_P (goal))
6745 return 0;
6746 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6747 return 0;
6748 /* An address with side effects must be reexecuted. */
6749 switch (code)
6750 {
6751 case POST_INC:
6752 case PRE_INC:
6753 case POST_DEC:
6754 case PRE_DEC:
6755 case POST_MODIFY:
6756 case PRE_MODIFY:
6757 return 0;
6758 default:
6759 break;
6760 }
6761 goal_mem = 1;
6762 }
6763 else if (CONSTANT_P (goal))
6764 goal_const = 1;
6765 else if (GET_CODE (goal) == PLUS
6766 && XEXP (goal, 0) == stack_pointer_rtx
6767 && CONSTANT_P (XEXP (goal, 1)))
6768 goal_const = need_stable_sp = 1;
6769 else if (GET_CODE (goal) == PLUS
6770 && XEXP (goal, 0) == frame_pointer_rtx
6771 && CONSTANT_P (XEXP (goal, 1)))
6772 goal_const = 1;
6773 else
6774 return 0;
6775
6776 num = 0;
6777 /* Scan insns back from INSN, looking for one that copies
6778 a value into or out of GOAL.
6779 Stop and give up if we reach a label. */
6780
6781 while (1)
6782 {
6783 p = PREV_INSN (p);
6784 if (p && DEBUG_INSN_P (p))
6785 continue;
6786 num++;
6787 if (p == 0 || LABEL_P (p)
6788 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6789 return 0;
6790
6791 if (NONJUMP_INSN_P (p)
6792 /* If we don't want spill regs ... */
6793 && (! (reload_reg_p != 0
6794 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6795 /* ... then ignore insns introduced by reload; they aren't
6796 useful and can cause results in reload_as_needed to be
6797 different from what they were when calculating the need for
6798 spills. If we notice an input-reload insn here, we will
6799 reject it below, but it might hide a usable equivalent.
6800 That makes bad code. It may even fail: perhaps no reg was
6801 spilled for this insn because it was assumed we would find
6802 that equivalent. */
6803 || INSN_UID (p) < reload_first_uid))
6804 {
6805 rtx tem;
6806 pat = single_set (p);
6807
6808 /* First check for something that sets some reg equal to GOAL. */
6809 if (pat != 0
6810 && ((regno >= 0
6811 && true_regnum (SET_SRC (pat)) == regno
6812 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6813 ||
6814 (regno >= 0
6815 && true_regnum (SET_DEST (pat)) == regno
6816 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6817 ||
6818 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6819 /* When looking for stack pointer + const,
6820 make sure we don't use a stack adjust. */
6821 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6822 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6823 || (goal_mem
6824 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6825 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6826 || (goal_mem
6827 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6828 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6829 /* If we are looking for a constant,
6830 and something equivalent to that constant was copied
6831 into a reg, we can use that reg. */
6832 || (goal_const && REG_NOTES (p) != 0
6833 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6834 && ((rtx_equal_p (XEXP (tem, 0), goal)
6835 && (valueno
6836 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6837 || (REG_P (SET_DEST (pat))
6838 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6839 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6840 && CONST_INT_P (goal)
6841 && 0 != (goaltry
6842 = operand_subword (XEXP (tem, 0), 0, 0,
6843 VOIDmode))
6844 && rtx_equal_p (goal, goaltry)
6845 && (valtry
6846 = operand_subword (SET_DEST (pat), 0, 0,
6847 VOIDmode))
6848 && (valueno = true_regnum (valtry)) >= 0)))
6849 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6850 NULL_RTX))
6851 && REG_P (SET_DEST (pat))
6852 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6853 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6854 && CONST_INT_P (goal)
6855 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6856 VOIDmode))
6857 && rtx_equal_p (goal, goaltry)
6858 && (valtry
6859 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6860 && (valueno = true_regnum (valtry)) >= 0)))
6861 {
6862 if (other >= 0)
6863 {
6864 if (valueno != other)
6865 continue;
6866 }
6867 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6868 continue;
6869 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6870 mode, valueno))
6871 continue;
6872 value = valtry;
6873 where = p;
6874 break;
6875 }
6876 }
6877 }
6878
6879 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6880 (or copying VALUE into GOAL, if GOAL is also a register).
6881 Now verify that VALUE is really valid. */
6882
6883 /* VALUENO is the register number of VALUE; a hard register. */
6884
6885 /* Don't try to re-use something that is killed in this insn. We want
6886 to be able to trust REG_UNUSED notes. */
6887 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6888 return 0;
6889
6890 /* If we propose to get the value from the stack pointer or if GOAL is
6891 a MEM based on the stack pointer, we need a stable SP. */
6892 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6893 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6894 goal)))
6895 need_stable_sp = 1;
6896
6897 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6898 if (GET_MODE (value) != mode)
6899 return 0;
6900
6901 /* Reject VALUE if it was loaded from GOAL
6902 and is also a register that appears in the address of GOAL. */
6903
6904 if (goal_mem && value == SET_DEST (single_set (where))
6905 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6906 goal, (rtx*) 0))
6907 return 0;
6908
6909 /* Reject registers that overlap GOAL. */
6910
6911 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6912 nregs = hard_regno_nregs[regno][mode];
6913 else
6914 nregs = 1;
6915 valuenregs = hard_regno_nregs[valueno][mode];
6916
6917 if (!goal_mem && !goal_const
6918 && regno + nregs > valueno && regno < valueno + valuenregs)
6919 return 0;
6920
6921 /* Reject VALUE if it is one of the regs reserved for reloads.
6922 Reload1 knows how to reuse them anyway, and it would get
6923 confused if we allocated one without its knowledge.
6924 (Now that insns introduced by reload are ignored above,
6925 this case shouldn't happen, but I'm not positive.) */
6926
6927 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6928 {
6929 int i;
6930 for (i = 0; i < valuenregs; ++i)
6931 if (reload_reg_p[valueno + i] >= 0)
6932 return 0;
6933 }
6934
6935 /* Reject VALUE if it is a register being used for an input reload
6936 even if it is not one of those reserved. */
6937
6938 if (reload_reg_p != 0)
6939 {
6940 int i;
6941 for (i = 0; i < n_reloads; i++)
6942 if (rld[i].reg_rtx != 0 && rld[i].in)
6943 {
6944 int regno1 = REGNO (rld[i].reg_rtx);
6945 int nregs1 = hard_regno_nregs[regno1]
6946 [GET_MODE (rld[i].reg_rtx)];
6947 if (regno1 < valueno + valuenregs
6948 && regno1 + nregs1 > valueno)
6949 return 0;
6950 }
6951 }
6952
6953 if (goal_mem)
6954 /* We must treat frame pointer as varying here,
6955 since it can vary--in a nonlocal goto as generated by expand_goto. */
6956 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6957
6958 /* Now verify that the values of GOAL and VALUE remain unaltered
6959 until INSN is reached. */
6960
6961 p = insn;
6962 while (1)
6963 {
6964 p = PREV_INSN (p);
6965 if (p == where)
6966 return value;
6967
6968 /* Don't trust the conversion past a function call
6969 if either of the two is in a call-clobbered register, or memory. */
6970 if (CALL_P (p))
6971 {
6972 int i;
6973
6974 if (goal_mem || need_stable_sp)
6975 return 0;
6976
6977 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6978 for (i = 0; i < nregs; ++i)
6979 if (call_used_regs[regno + i]
6980 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6981 return 0;
6982
6983 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6984 for (i = 0; i < valuenregs; ++i)
6985 if (call_used_regs[valueno + i]
6986 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6987 return 0;
6988 }
6989
6990 if (INSN_P (p))
6991 {
6992 pat = PATTERN (p);
6993
6994 /* Watch out for unspec_volatile, and volatile asms. */
6995 if (volatile_insn_p (pat))
6996 return 0;
6997
6998 /* If this insn P stores in either GOAL or VALUE, return 0.
6999 If GOAL is a memory ref and this insn writes memory, return 0.
7000 If GOAL is a memory ref and its address is not constant,
7001 and this insn P changes a register used in GOAL, return 0. */
7002
7003 if (GET_CODE (pat) == COND_EXEC)
7004 pat = COND_EXEC_CODE (pat);
7005 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7006 {
7007 rtx dest = SET_DEST (pat);
7008 while (GET_CODE (dest) == SUBREG
7009 || GET_CODE (dest) == ZERO_EXTRACT
7010 || GET_CODE (dest) == STRICT_LOW_PART)
7011 dest = XEXP (dest, 0);
7012 if (REG_P (dest))
7013 {
7014 int xregno = REGNO (dest);
7015 int xnregs;
7016 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7017 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7018 else
7019 xnregs = 1;
7020 if (xregno < regno + nregs && xregno + xnregs > regno)
7021 return 0;
7022 if (xregno < valueno + valuenregs
7023 && xregno + xnregs > valueno)
7024 return 0;
7025 if (goal_mem_addr_varies
7026 && reg_overlap_mentioned_for_reload_p (dest, goal))
7027 return 0;
7028 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7029 return 0;
7030 }
7031 else if (goal_mem && MEM_P (dest)
7032 && ! push_operand (dest, GET_MODE (dest)))
7033 return 0;
7034 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7035 && reg_equiv_memory_loc[regno] != 0)
7036 return 0;
7037 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7038 return 0;
7039 }
7040 else if (GET_CODE (pat) == PARALLEL)
7041 {
7042 int i;
7043 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7044 {
7045 rtx v1 = XVECEXP (pat, 0, i);
7046 if (GET_CODE (v1) == COND_EXEC)
7047 v1 = COND_EXEC_CODE (v1);
7048 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7049 {
7050 rtx dest = SET_DEST (v1);
7051 while (GET_CODE (dest) == SUBREG
7052 || GET_CODE (dest) == ZERO_EXTRACT
7053 || GET_CODE (dest) == STRICT_LOW_PART)
7054 dest = XEXP (dest, 0);
7055 if (REG_P (dest))
7056 {
7057 int xregno = REGNO (dest);
7058 int xnregs;
7059 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7060 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7061 else
7062 xnregs = 1;
7063 if (xregno < regno + nregs
7064 && xregno + xnregs > regno)
7065 return 0;
7066 if (xregno < valueno + valuenregs
7067 && xregno + xnregs > valueno)
7068 return 0;
7069 if (goal_mem_addr_varies
7070 && reg_overlap_mentioned_for_reload_p (dest,
7071 goal))
7072 return 0;
7073 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7074 return 0;
7075 }
7076 else if (goal_mem && MEM_P (dest)
7077 && ! push_operand (dest, GET_MODE (dest)))
7078 return 0;
7079 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7080 && reg_equiv_memory_loc[regno] != 0)
7081 return 0;
7082 else if (need_stable_sp
7083 && push_operand (dest, GET_MODE (dest)))
7084 return 0;
7085 }
7086 }
7087 }
7088
7089 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7090 {
7091 rtx link;
7092
7093 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7094 link = XEXP (link, 1))
7095 {
7096 pat = XEXP (link, 0);
7097 if (GET_CODE (pat) == CLOBBER)
7098 {
7099 rtx dest = SET_DEST (pat);
7100
7101 if (REG_P (dest))
7102 {
7103 int xregno = REGNO (dest);
7104 int xnregs
7105 = hard_regno_nregs[xregno][GET_MODE (dest)];
7106
7107 if (xregno < regno + nregs
7108 && xregno + xnregs > regno)
7109 return 0;
7110 else if (xregno < valueno + valuenregs
7111 && xregno + xnregs > valueno)
7112 return 0;
7113 else if (goal_mem_addr_varies
7114 && reg_overlap_mentioned_for_reload_p (dest,
7115 goal))
7116 return 0;
7117 }
7118
7119 else if (goal_mem && MEM_P (dest)
7120 && ! push_operand (dest, GET_MODE (dest)))
7121 return 0;
7122 else if (need_stable_sp
7123 && push_operand (dest, GET_MODE (dest)))
7124 return 0;
7125 }
7126 }
7127 }
7128
7129 #ifdef AUTO_INC_DEC
7130 /* If this insn auto-increments or auto-decrements
7131 either regno or valueno, return 0 now.
7132 If GOAL is a memory ref and its address is not constant,
7133 and this insn P increments a register used in GOAL, return 0. */
7134 {
7135 rtx link;
7136
7137 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7138 if (REG_NOTE_KIND (link) == REG_INC
7139 && REG_P (XEXP (link, 0)))
7140 {
7141 int incno = REGNO (XEXP (link, 0));
7142 if (incno < regno + nregs && incno >= regno)
7143 return 0;
7144 if (incno < valueno + valuenregs && incno >= valueno)
7145 return 0;
7146 if (goal_mem_addr_varies
7147 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7148 goal))
7149 return 0;
7150 }
7151 }
7152 #endif
7153 }
7154 }
7155 }
7156 \f
7157 /* Find a place where INCED appears in an increment or decrement operator
7158 within X, and return the amount INCED is incremented or decremented by.
7159 The value is always positive. */
7160
7161 static int
7162 find_inc_amount (rtx x, rtx inced)
7163 {
7164 enum rtx_code code = GET_CODE (x);
7165 const char *fmt;
7166 int i;
7167
7168 if (code == MEM)
7169 {
7170 rtx addr = XEXP (x, 0);
7171 if ((GET_CODE (addr) == PRE_DEC
7172 || GET_CODE (addr) == POST_DEC
7173 || GET_CODE (addr) == PRE_INC
7174 || GET_CODE (addr) == POST_INC)
7175 && XEXP (addr, 0) == inced)
7176 return GET_MODE_SIZE (GET_MODE (x));
7177 else if ((GET_CODE (addr) == PRE_MODIFY
7178 || GET_CODE (addr) == POST_MODIFY)
7179 && GET_CODE (XEXP (addr, 1)) == PLUS
7180 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7181 && XEXP (addr, 0) == inced
7182 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7183 {
7184 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7185 return i < 0 ? -i : i;
7186 }
7187 }
7188
7189 fmt = GET_RTX_FORMAT (code);
7190 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7191 {
7192 if (fmt[i] == 'e')
7193 {
7194 int tem = find_inc_amount (XEXP (x, i), inced);
7195 if (tem != 0)
7196 return tem;
7197 }
7198 if (fmt[i] == 'E')
7199 {
7200 int j;
7201 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7202 {
7203 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7204 if (tem != 0)
7205 return tem;
7206 }
7207 }
7208 }
7209
7210 return 0;
7211 }
7212 \f
7213 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7214 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7215
7216 #ifdef AUTO_INC_DEC
7217 static int
7218 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7219 rtx insn)
7220 {
7221 rtx link;
7222
7223 gcc_assert (insn);
7224
7225 if (! INSN_P (insn))
7226 return 0;
7227
7228 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7229 if (REG_NOTE_KIND (link) == REG_INC)
7230 {
7231 unsigned int test = (int) REGNO (XEXP (link, 0));
7232 if (test >= regno && test < endregno)
7233 return 1;
7234 }
7235 return 0;
7236 }
7237 #else
7238
7239 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7240
7241 #endif
7242
7243 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7244 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7245 REG_INC. REGNO must refer to a hard register. */
7246
7247 int
7248 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7249 int sets)
7250 {
7251 unsigned int nregs, endregno;
7252
7253 /* regno must be a hard register. */
7254 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7255
7256 nregs = hard_regno_nregs[regno][mode];
7257 endregno = regno + nregs;
7258
7259 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7260 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7261 && REG_P (XEXP (PATTERN (insn), 0)))
7262 {
7263 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7264
7265 return test >= regno && test < endregno;
7266 }
7267
7268 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7269 return 1;
7270
7271 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7272 {
7273 int i = XVECLEN (PATTERN (insn), 0) - 1;
7274
7275 for (; i >= 0; i--)
7276 {
7277 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7278 if ((GET_CODE (elt) == CLOBBER
7279 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7280 && REG_P (XEXP (elt, 0)))
7281 {
7282 unsigned int test = REGNO (XEXP (elt, 0));
7283
7284 if (test >= regno && test < endregno)
7285 return 1;
7286 }
7287 if (sets == 2
7288 && reg_inc_found_and_valid_p (regno, endregno, elt))
7289 return 1;
7290 }
7291 }
7292
7293 return 0;
7294 }
7295
7296 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7297 rtx
7298 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7299 {
7300 int regno;
7301
7302 if (GET_MODE (reloadreg) == mode)
7303 return reloadreg;
7304
7305 regno = REGNO (reloadreg);
7306
7307 if (WORDS_BIG_ENDIAN)
7308 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7309 - (int) hard_regno_nregs[regno][mode];
7310
7311 return gen_rtx_REG (mode, regno);
7312 }
7313
7314 static const char *const reload_when_needed_name[] =
7315 {
7316 "RELOAD_FOR_INPUT",
7317 "RELOAD_FOR_OUTPUT",
7318 "RELOAD_FOR_INSN",
7319 "RELOAD_FOR_INPUT_ADDRESS",
7320 "RELOAD_FOR_INPADDR_ADDRESS",
7321 "RELOAD_FOR_OUTPUT_ADDRESS",
7322 "RELOAD_FOR_OUTADDR_ADDRESS",
7323 "RELOAD_FOR_OPERAND_ADDRESS",
7324 "RELOAD_FOR_OPADDR_ADDR",
7325 "RELOAD_OTHER",
7326 "RELOAD_FOR_OTHER_ADDRESS"
7327 };
7328
7329 /* These functions are used to print the variables set by 'find_reloads' */
7330
7331 void
7332 debug_reload_to_stream (FILE *f)
7333 {
7334 int r;
7335 const char *prefix;
7336
7337 if (! f)
7338 f = stderr;
7339 for (r = 0; r < n_reloads; r++)
7340 {
7341 fprintf (f, "Reload %d: ", r);
7342
7343 if (rld[r].in != 0)
7344 {
7345 fprintf (f, "reload_in (%s) = ",
7346 GET_MODE_NAME (rld[r].inmode));
7347 print_inline_rtx (f, rld[r].in, 24);
7348 fprintf (f, "\n\t");
7349 }
7350
7351 if (rld[r].out != 0)
7352 {
7353 fprintf (f, "reload_out (%s) = ",
7354 GET_MODE_NAME (rld[r].outmode));
7355 print_inline_rtx (f, rld[r].out, 24);
7356 fprintf (f, "\n\t");
7357 }
7358
7359 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7360
7361 fprintf (f, "%s (opnum = %d)",
7362 reload_when_needed_name[(int) rld[r].when_needed],
7363 rld[r].opnum);
7364
7365 if (rld[r].optional)
7366 fprintf (f, ", optional");
7367
7368 if (rld[r].nongroup)
7369 fprintf (f, ", nongroup");
7370
7371 if (rld[r].inc != 0)
7372 fprintf (f, ", inc by %d", rld[r].inc);
7373
7374 if (rld[r].nocombine)
7375 fprintf (f, ", can't combine");
7376
7377 if (rld[r].secondary_p)
7378 fprintf (f, ", secondary_reload_p");
7379
7380 if (rld[r].in_reg != 0)
7381 {
7382 fprintf (f, "\n\treload_in_reg: ");
7383 print_inline_rtx (f, rld[r].in_reg, 24);
7384 }
7385
7386 if (rld[r].out_reg != 0)
7387 {
7388 fprintf (f, "\n\treload_out_reg: ");
7389 print_inline_rtx (f, rld[r].out_reg, 24);
7390 }
7391
7392 if (rld[r].reg_rtx != 0)
7393 {
7394 fprintf (f, "\n\treload_reg_rtx: ");
7395 print_inline_rtx (f, rld[r].reg_rtx, 24);
7396 }
7397
7398 prefix = "\n\t";
7399 if (rld[r].secondary_in_reload != -1)
7400 {
7401 fprintf (f, "%ssecondary_in_reload = %d",
7402 prefix, rld[r].secondary_in_reload);
7403 prefix = ", ";
7404 }
7405
7406 if (rld[r].secondary_out_reload != -1)
7407 fprintf (f, "%ssecondary_out_reload = %d\n",
7408 prefix, rld[r].secondary_out_reload);
7409
7410 prefix = "\n\t";
7411 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7412 {
7413 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7414 insn_data[rld[r].secondary_in_icode].name);
7415 prefix = ", ";
7416 }
7417
7418 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7419 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7420 insn_data[rld[r].secondary_out_icode].name);
7421
7422 fprintf (f, "\n");
7423 }
7424 }
7425
7426 void
7427 debug_reload (void)
7428 {
7429 debug_reload_to_stream (stderr);
7430 }