]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload.c
372eccc0fb9f16dbe6d7086c3e02b5777674d7a0
[thirdparty/gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This file contains subroutines used only from the file reload1.c.
23 It knows how to scan one insn for operands and values
24 that need to be copied into registers to make valid code.
25 It also finds other operands and values which are valid
26 but for which equivalent values in registers exist and
27 ought to be used instead.
28
29 Before processing the first insn of the function, call `init_reload'.
30 init_reload actually has to be called earlier anyway.
31
32 To scan an insn, call `find_reloads'. This does two things:
33 1. sets up tables describing which values must be reloaded
34 for this insn, and what kind of hard regs they must be reloaded into;
35 2. optionally record the locations where those values appear in
36 the data, so they can be replaced properly later.
37 This is done only if the second arg to `find_reloads' is nonzero.
38
39 The third arg to `find_reloads' specifies the number of levels
40 of indirect addressing supported by the machine. If it is zero,
41 indirect addressing is not valid. If it is one, (MEM (REG n))
42 is valid even if (REG n) did not get a hard register; if it is two,
43 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
44 hard register, and similarly for higher values.
45
46 Then you must choose the hard regs to reload those pseudo regs into,
47 and generate appropriate load insns before this insn and perhaps
48 also store insns after this insn. Set up the array `reload_reg_rtx'
49 to contain the REG rtx's for the registers you used. In some
50 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
51 for certain reloads. Then that tells you which register to use,
52 so you do not need to allocate one. But you still do need to add extra
53 instructions to copy the value into and out of that register.
54
55 Finally you must call `subst_reloads' to substitute the reload reg rtx's
56 into the locations already recorded.
57
58 NOTE SIDE EFFECTS:
59
60 find_reloads can alter the operands of the instruction it is called on.
61
62 1. Two operands of any sort may be interchanged, if they are in a
63 commutative instruction.
64 This happens only if find_reloads thinks the instruction will compile
65 better that way.
66
67 2. Pseudo-registers that are equivalent to constants are replaced
68 with those constants if they are not in hard registers.
69
70 1 happens every time find_reloads is called.
71 2 happens only when REPLACE is 1, which is only when
72 actually doing the reloads, not when just counting them.
73
74 Using a reload register for several reloads in one insn:
75
76 When an insn has reloads, it is considered as having three parts:
77 the input reloads, the insn itself after reloading, and the output reloads.
78 Reloads of values used in memory addresses are often needed for only one part.
79
80 When this is so, reload_when_needed records which part needs the reload.
81 Two reloads for different parts of the insn can share the same reload
82 register.
83
84 When a reload is used for addresses in multiple parts, or when it is
85 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
86 a register with any other reload. */
87
88 #define REG_OK_STRICT
89
90 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
91 #undef DEBUG_RELOAD
92
93 #include "config.h"
94 #include "system.h"
95 #include "coretypes.h"
96 #include "tm.h"
97 #include "rtl-error.h"
98 #include "tm_p.h"
99 #include "insn-config.h"
100 #include "expr.h"
101 #include "optabs.h"
102 #include "recog.h"
103 #include "df.h"
104 #include "reload.h"
105 #include "regs.h"
106 #include "addresses.h"
107 #include "hard-reg-set.h"
108 #include "flags.h"
109 #include "output.h"
110 #include "function.h"
111 #include "params.h"
112 #include "target.h"
113 #include "ira.h"
114
115 /* True if X is a constant that can be forced into the constant pool.
116 MODE is the mode of the operand, or VOIDmode if not known. */
117 #define CONST_POOL_OK_P(MODE, X) \
118 ((MODE) != VOIDmode \
119 && CONSTANT_P (X) \
120 && GET_CODE (X) != HIGH \
121 && !targetm.cannot_force_const_mem (MODE, X))
122
123 /* True if C is a non-empty register class that has too few registers
124 to be safely used as a reload target class. */
125
126 static inline bool
127 small_register_class_p (reg_class_t rclass)
128 {
129 return (reg_class_size [(int) rclass] == 1
130 || (reg_class_size [(int) rclass] >= 1
131 && targetm.class_likely_spilled_p (rclass)));
132 }
133
134 \f
135 /* All reloads of the current insn are recorded here. See reload.h for
136 comments. */
137 int n_reloads;
138 struct reload rld[MAX_RELOADS];
139
140 /* All the "earlyclobber" operands of the current insn
141 are recorded here. */
142 int n_earlyclobbers;
143 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
144
145 int reload_n_operands;
146
147 /* Replacing reloads.
148
149 If `replace_reloads' is nonzero, then as each reload is recorded
150 an entry is made for it in the table `replacements'.
151 Then later `subst_reloads' can look through that table and
152 perform all the replacements needed. */
153
154 /* Nonzero means record the places to replace. */
155 static int replace_reloads;
156
157 /* Each replacement is recorded with a structure like this. */
158 struct replacement
159 {
160 rtx *where; /* Location to store in */
161 rtx *subreg_loc; /* Location of SUBREG if WHERE is inside
162 a SUBREG; 0 otherwise. */
163 int what; /* which reload this is for */
164 enum machine_mode mode; /* mode it must have */
165 };
166
167 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
168
169 /* Number of replacements currently recorded. */
170 static int n_replacements;
171
172 /* Used to track what is modified by an operand. */
173 struct decomposition
174 {
175 int reg_flag; /* Nonzero if referencing a register. */
176 int safe; /* Nonzero if this can't conflict with anything. */
177 rtx base; /* Base address for MEM. */
178 HOST_WIDE_INT start; /* Starting offset or register number. */
179 HOST_WIDE_INT end; /* Ending offset or register number. */
180 };
181
182 #ifdef SECONDARY_MEMORY_NEEDED
183
184 /* Save MEMs needed to copy from one class of registers to another. One MEM
185 is used per mode, but normally only one or two modes are ever used.
186
187 We keep two versions, before and after register elimination. The one
188 after register elimination is record separately for each operand. This
189 is done in case the address is not valid to be sure that we separately
190 reload each. */
191
192 static rtx secondary_memlocs[NUM_MACHINE_MODES];
193 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
194 static int secondary_memlocs_elim_used = 0;
195 #endif
196
197 /* The instruction we are doing reloads for;
198 so we can test whether a register dies in it. */
199 static rtx this_insn;
200
201 /* Nonzero if this instruction is a user-specified asm with operands. */
202 static int this_insn_is_asm;
203
204 /* If hard_regs_live_known is nonzero,
205 we can tell which hard regs are currently live,
206 at least enough to succeed in choosing dummy reloads. */
207 static int hard_regs_live_known;
208
209 /* Indexed by hard reg number,
210 element is nonnegative if hard reg has been spilled.
211 This vector is passed to `find_reloads' as an argument
212 and is not changed here. */
213 static short *static_reload_reg_p;
214
215 /* Set to 1 in subst_reg_equivs if it changes anything. */
216 static int subst_reg_equivs_changed;
217
218 /* On return from push_reload, holds the reload-number for the OUT
219 operand, which can be different for that from the input operand. */
220 static int output_reloadnum;
221
222 /* Compare two RTX's. */
223 #define MATCHES(x, y) \
224 (x == y || (x != 0 && (REG_P (x) \
225 ? REG_P (y) && REGNO (x) == REGNO (y) \
226 : rtx_equal_p (x, y) && ! side_effects_p (x))))
227
228 /* Indicates if two reloads purposes are for similar enough things that we
229 can merge their reloads. */
230 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
231 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
232 || ((when1) == (when2) && (op1) == (op2)) \
233 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
234 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
236 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
237 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
238
239 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
240 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
241 ((when1) != (when2) \
242 || ! ((op1) == (op2) \
243 || (when1) == RELOAD_FOR_INPUT \
244 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
245 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
246
247 /* If we are going to reload an address, compute the reload type to
248 use. */
249 #define ADDR_TYPE(type) \
250 ((type) == RELOAD_FOR_INPUT_ADDRESS \
251 ? RELOAD_FOR_INPADDR_ADDRESS \
252 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
253 ? RELOAD_FOR_OUTADDR_ADDRESS \
254 : (type)))
255
256 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
257 enum machine_mode, enum reload_type,
258 enum insn_code *, secondary_reload_info *);
259 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
260 int, unsigned int);
261 static int reload_inner_reg_of_subreg (rtx, enum machine_mode, int);
262 static void push_replacement (rtx *, int, enum machine_mode);
263 static void dup_replacements (rtx *, rtx *);
264 static void combine_reloads (void);
265 static int find_reusable_reload (rtx *, rtx, enum reg_class,
266 enum reload_type, int, int);
267 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
268 enum machine_mode, reg_class_t, int, int);
269 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
270 static struct decomposition decompose (rtx);
271 static int immune_p (rtx, rtx, struct decomposition);
272 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
273 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, rtx,
274 int *);
275 static rtx make_memloc (rtx, int);
276 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
277 addr_space_t, rtx *);
278 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
279 int, enum reload_type, int, rtx);
280 static rtx subst_reg_equivs (rtx, rtx);
281 static rtx subst_indexed_address (rtx);
282 static void update_auto_inc_notes (rtx, int, int);
283 static int find_reloads_address_1 (enum machine_mode, rtx, int,
284 enum rtx_code, enum rtx_code, rtx *,
285 int, enum reload_type,int, rtx);
286 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
287 enum machine_mode, int,
288 enum reload_type, int);
289 static rtx find_reloads_subreg_address (rtx, int, int, enum reload_type,
290 int, rtx, int *);
291 static void copy_replacements_1 (rtx *, rtx *, int);
292 static int find_inc_amount (rtx, rtx);
293 static int refers_to_mem_for_reload_p (rtx);
294 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
295 rtx, rtx *);
296
297 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
298 list yet. */
299
300 static void
301 push_reg_equiv_alt_mem (int regno, rtx mem)
302 {
303 rtx it;
304
305 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
306 if (rtx_equal_p (XEXP (it, 0), mem))
307 return;
308
309 reg_equiv_alt_mem_list (regno)
310 = alloc_EXPR_LIST (REG_EQUIV, mem,
311 reg_equiv_alt_mem_list (regno));
312 }
313 \f
314 /* Determine if any secondary reloads are needed for loading (if IN_P is
315 nonzero) or storing (if IN_P is zero) X to or from a reload register of
316 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
317 are needed, push them.
318
319 Return the reload number of the secondary reload we made, or -1 if
320 we didn't need one. *PICODE is set to the insn_code to use if we do
321 need a secondary reload. */
322
323 static int
324 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
325 enum reg_class reload_class,
326 enum machine_mode reload_mode, enum reload_type type,
327 enum insn_code *picode, secondary_reload_info *prev_sri)
328 {
329 enum reg_class rclass = NO_REGS;
330 enum reg_class scratch_class;
331 enum machine_mode mode = reload_mode;
332 enum insn_code icode = CODE_FOR_nothing;
333 enum insn_code t_icode = CODE_FOR_nothing;
334 enum reload_type secondary_type;
335 int s_reload, t_reload = -1;
336 const char *scratch_constraint;
337 char letter;
338 secondary_reload_info sri;
339
340 if (type == RELOAD_FOR_INPUT_ADDRESS
341 || type == RELOAD_FOR_OUTPUT_ADDRESS
342 || type == RELOAD_FOR_INPADDR_ADDRESS
343 || type == RELOAD_FOR_OUTADDR_ADDRESS)
344 secondary_type = type;
345 else
346 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
347
348 *picode = CODE_FOR_nothing;
349
350 /* If X is a paradoxical SUBREG, use the inner value to determine both the
351 mode and object being reloaded. */
352 if (GET_CODE (x) == SUBREG
353 && (GET_MODE_SIZE (GET_MODE (x))
354 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
355 {
356 x = SUBREG_REG (x);
357 reload_mode = GET_MODE (x);
358 }
359
360 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
361 is still a pseudo-register by now, it *must* have an equivalent MEM
362 but we don't want to assume that), use that equivalent when seeing if
363 a secondary reload is needed since whether or not a reload is needed
364 might be sensitive to the form of the MEM. */
365
366 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
367 && reg_equiv_mem (REGNO (x)))
368 x = reg_equiv_mem (REGNO (x));
369
370 sri.icode = CODE_FOR_nothing;
371 sri.prev_sri = prev_sri;
372 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
373 reload_mode, &sri);
374 icode = (enum insn_code) sri.icode;
375
376 /* If we don't need any secondary registers, done. */
377 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
378 return -1;
379
380 if (rclass != NO_REGS)
381 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
382 reload_mode, type, &t_icode, &sri);
383
384 /* If we will be using an insn, the secondary reload is for a
385 scratch register. */
386
387 if (icode != CODE_FOR_nothing)
388 {
389 /* If IN_P is nonzero, the reload register will be the output in
390 operand 0. If IN_P is zero, the reload register will be the input
391 in operand 1. Outputs should have an initial "=", which we must
392 skip. */
393
394 /* ??? It would be useful to be able to handle only two, or more than
395 three, operands, but for now we can only handle the case of having
396 exactly three: output, input and one temp/scratch. */
397 gcc_assert (insn_data[(int) icode].n_operands == 3);
398
399 /* ??? We currently have no way to represent a reload that needs
400 an icode to reload from an intermediate tertiary reload register.
401 We should probably have a new field in struct reload to tag a
402 chain of scratch operand reloads onto. */
403 gcc_assert (rclass == NO_REGS);
404
405 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
406 gcc_assert (*scratch_constraint == '=');
407 scratch_constraint++;
408 if (*scratch_constraint == '&')
409 scratch_constraint++;
410 letter = *scratch_constraint;
411 scratch_class = (letter == 'r' ? GENERAL_REGS
412 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) letter,
413 scratch_constraint));
414
415 rclass = scratch_class;
416 mode = insn_data[(int) icode].operand[2].mode;
417 }
418
419 /* This case isn't valid, so fail. Reload is allowed to use the same
420 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
421 in the case of a secondary register, we actually need two different
422 registers for correct code. We fail here to prevent the possibility of
423 silently generating incorrect code later.
424
425 The convention is that secondary input reloads are valid only if the
426 secondary_class is different from class. If you have such a case, you
427 can not use secondary reloads, you must work around the problem some
428 other way.
429
430 Allow this when a reload_in/out pattern is being used. I.e. assume
431 that the generated code handles this case. */
432
433 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
434 || t_icode != CODE_FOR_nothing);
435
436 /* See if we can reuse an existing secondary reload. */
437 for (s_reload = 0; s_reload < n_reloads; s_reload++)
438 if (rld[s_reload].secondary_p
439 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
440 || reg_class_subset_p (rld[s_reload].rclass, rclass))
441 && ((in_p && rld[s_reload].inmode == mode)
442 || (! in_p && rld[s_reload].outmode == mode))
443 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
444 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
445 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
446 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
447 && (small_register_class_p (rclass)
448 || targetm.small_register_classes_for_mode_p (VOIDmode))
449 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
450 opnum, rld[s_reload].opnum))
451 {
452 if (in_p)
453 rld[s_reload].inmode = mode;
454 if (! in_p)
455 rld[s_reload].outmode = mode;
456
457 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
458 rld[s_reload].rclass = rclass;
459
460 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
461 rld[s_reload].optional &= optional;
462 rld[s_reload].secondary_p = 1;
463 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
464 opnum, rld[s_reload].opnum))
465 rld[s_reload].when_needed = RELOAD_OTHER;
466
467 break;
468 }
469
470 if (s_reload == n_reloads)
471 {
472 #ifdef SECONDARY_MEMORY_NEEDED
473 /* If we need a memory location to copy between the two reload regs,
474 set it up now. Note that we do the input case before making
475 the reload and the output case after. This is due to the
476 way reloads are output. */
477
478 if (in_p && icode == CODE_FOR_nothing
479 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
480 {
481 get_secondary_mem (x, reload_mode, opnum, type);
482
483 /* We may have just added new reloads. Make sure we add
484 the new reload at the end. */
485 s_reload = n_reloads;
486 }
487 #endif
488
489 /* We need to make a new secondary reload for this register class. */
490 rld[s_reload].in = rld[s_reload].out = 0;
491 rld[s_reload].rclass = rclass;
492
493 rld[s_reload].inmode = in_p ? mode : VOIDmode;
494 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
495 rld[s_reload].reg_rtx = 0;
496 rld[s_reload].optional = optional;
497 rld[s_reload].inc = 0;
498 /* Maybe we could combine these, but it seems too tricky. */
499 rld[s_reload].nocombine = 1;
500 rld[s_reload].in_reg = 0;
501 rld[s_reload].out_reg = 0;
502 rld[s_reload].opnum = opnum;
503 rld[s_reload].when_needed = secondary_type;
504 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
505 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
506 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
507 rld[s_reload].secondary_out_icode
508 = ! in_p ? t_icode : CODE_FOR_nothing;
509 rld[s_reload].secondary_p = 1;
510
511 n_reloads++;
512
513 #ifdef SECONDARY_MEMORY_NEEDED
514 if (! in_p && icode == CODE_FOR_nothing
515 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
516 get_secondary_mem (x, mode, opnum, type);
517 #endif
518 }
519
520 *picode = icode;
521 return s_reload;
522 }
523
524 /* If a secondary reload is needed, return its class. If both an intermediate
525 register and a scratch register is needed, we return the class of the
526 intermediate register. */
527 reg_class_t
528 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
529 rtx x)
530 {
531 enum insn_code icode;
532 secondary_reload_info sri;
533
534 sri.icode = CODE_FOR_nothing;
535 sri.prev_sri = NULL;
536 rclass
537 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
538 icode = (enum insn_code) sri.icode;
539
540 /* If there are no secondary reloads at all, we return NO_REGS.
541 If an intermediate register is needed, we return its class. */
542 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
543 return rclass;
544
545 /* No intermediate register is needed, but we have a special reload
546 pattern, which we assume for now needs a scratch register. */
547 return scratch_reload_class (icode);
548 }
549
550 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
551 three operands, verify that operand 2 is an output operand, and return
552 its register class.
553 ??? We'd like to be able to handle any pattern with at least 2 operands,
554 for zero or more scratch registers, but that needs more infrastructure. */
555 enum reg_class
556 scratch_reload_class (enum insn_code icode)
557 {
558 const char *scratch_constraint;
559 char scratch_letter;
560 enum reg_class rclass;
561
562 gcc_assert (insn_data[(int) icode].n_operands == 3);
563 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
564 gcc_assert (*scratch_constraint == '=');
565 scratch_constraint++;
566 if (*scratch_constraint == '&')
567 scratch_constraint++;
568 scratch_letter = *scratch_constraint;
569 if (scratch_letter == 'r')
570 return GENERAL_REGS;
571 rclass = REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
572 scratch_constraint);
573 gcc_assert (rclass != NO_REGS);
574 return rclass;
575 }
576 \f
577 #ifdef SECONDARY_MEMORY_NEEDED
578
579 /* Return a memory location that will be used to copy X in mode MODE.
580 If we haven't already made a location for this mode in this insn,
581 call find_reloads_address on the location being returned. */
582
583 rtx
584 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
585 int opnum, enum reload_type type)
586 {
587 rtx loc;
588 int mem_valid;
589
590 /* By default, if MODE is narrower than a word, widen it to a word.
591 This is required because most machines that require these memory
592 locations do not support short load and stores from all registers
593 (e.g., FP registers). */
594
595 #ifdef SECONDARY_MEMORY_NEEDED_MODE
596 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
597 #else
598 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
599 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
600 #endif
601
602 /* If we already have made a MEM for this operand in MODE, return it. */
603 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
604 return secondary_memlocs_elim[(int) mode][opnum];
605
606 /* If this is the first time we've tried to get a MEM for this mode,
607 allocate a new one. `something_changed' in reload will get set
608 by noticing that the frame size has changed. */
609
610 if (secondary_memlocs[(int) mode] == 0)
611 {
612 #ifdef SECONDARY_MEMORY_NEEDED_RTX
613 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
614 #else
615 secondary_memlocs[(int) mode]
616 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
617 #endif
618 }
619
620 /* Get a version of the address doing any eliminations needed. If that
621 didn't give us a new MEM, make a new one if it isn't valid. */
622
623 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
624 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
625 MEM_ADDR_SPACE (loc));
626
627 if (! mem_valid && loc == secondary_memlocs[(int) mode])
628 loc = copy_rtx (loc);
629
630 /* The only time the call below will do anything is if the stack
631 offset is too large. In that case IND_LEVELS doesn't matter, so we
632 can just pass a zero. Adjust the type to be the address of the
633 corresponding object. If the address was valid, save the eliminated
634 address. If it wasn't valid, we need to make a reload each time, so
635 don't save it. */
636
637 if (! mem_valid)
638 {
639 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
640 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
641 : RELOAD_OTHER);
642
643 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
644 opnum, type, 0, 0);
645 }
646
647 secondary_memlocs_elim[(int) mode][opnum] = loc;
648 if (secondary_memlocs_elim_used <= (int)mode)
649 secondary_memlocs_elim_used = (int)mode + 1;
650 return loc;
651 }
652
653 /* Clear any secondary memory locations we've made. */
654
655 void
656 clear_secondary_mem (void)
657 {
658 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
659 }
660 #endif /* SECONDARY_MEMORY_NEEDED */
661 \f
662
663 /* Find the largest class which has at least one register valid in
664 mode INNER, and which for every such register, that register number
665 plus N is also valid in OUTER (if in range) and is cheap to move
666 into REGNO. Such a class must exist. */
667
668 static enum reg_class
669 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
670 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
671 unsigned int dest_regno ATTRIBUTE_UNUSED)
672 {
673 int best_cost = -1;
674 int rclass;
675 int regno;
676 enum reg_class best_class = NO_REGS;
677 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
678 unsigned int best_size = 0;
679 int cost;
680
681 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
682 {
683 int bad = 0;
684 int good = 0;
685 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
686 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
687 {
688 if (HARD_REGNO_MODE_OK (regno, inner))
689 {
690 good = 1;
691 if (! TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
692 || ! HARD_REGNO_MODE_OK (regno + n, outer))
693 bad = 1;
694 }
695 }
696
697 if (bad || !good)
698 continue;
699 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
700
701 if ((reg_class_size[rclass] > best_size
702 && (best_cost < 0 || best_cost >= cost))
703 || best_cost > cost)
704 {
705 best_class = (enum reg_class) rclass;
706 best_size = reg_class_size[rclass];
707 best_cost = register_move_cost (outer, (enum reg_class) rclass,
708 dest_class);
709 }
710 }
711
712 gcc_assert (best_size != 0);
713
714 return best_class;
715 }
716 \f
717 /* Return the number of a previously made reload that can be combined with
718 a new one, or n_reloads if none of the existing reloads can be used.
719 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
720 push_reload, they determine the kind of the new reload that we try to
721 combine. P_IN points to the corresponding value of IN, which can be
722 modified by this function.
723 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
724
725 static int
726 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
727 enum reload_type type, int opnum, int dont_share)
728 {
729 rtx in = *p_in;
730 int i;
731 /* We can't merge two reloads if the output of either one is
732 earlyclobbered. */
733
734 if (earlyclobber_operand_p (out))
735 return n_reloads;
736
737 /* We can use an existing reload if the class is right
738 and at least one of IN and OUT is a match
739 and the other is at worst neutral.
740 (A zero compared against anything is neutral.)
741
742 For targets with small register classes, don't use existing reloads
743 unless they are for the same thing since that can cause us to need
744 more reload registers than we otherwise would. */
745
746 for (i = 0; i < n_reloads; i++)
747 if ((reg_class_subset_p (rclass, rld[i].rclass)
748 || reg_class_subset_p (rld[i].rclass, rclass))
749 /* If the existing reload has a register, it must fit our class. */
750 && (rld[i].reg_rtx == 0
751 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
752 true_regnum (rld[i].reg_rtx)))
753 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
754 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
755 || (out != 0 && MATCHES (rld[i].out, out)
756 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
757 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
758 && (small_register_class_p (rclass)
759 || targetm.small_register_classes_for_mode_p (VOIDmode))
760 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
761 return i;
762
763 /* Reloading a plain reg for input can match a reload to postincrement
764 that reg, since the postincrement's value is the right value.
765 Likewise, it can match a preincrement reload, since we regard
766 the preincrementation as happening before any ref in this insn
767 to that register. */
768 for (i = 0; i < n_reloads; i++)
769 if ((reg_class_subset_p (rclass, rld[i].rclass)
770 || reg_class_subset_p (rld[i].rclass, rclass))
771 /* If the existing reload has a register, it must fit our
772 class. */
773 && (rld[i].reg_rtx == 0
774 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
775 true_regnum (rld[i].reg_rtx)))
776 && out == 0 && rld[i].out == 0 && rld[i].in != 0
777 && ((REG_P (in)
778 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
779 && MATCHES (XEXP (rld[i].in, 0), in))
780 || (REG_P (rld[i].in)
781 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
782 && MATCHES (XEXP (in, 0), rld[i].in)))
783 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
784 && (small_register_class_p (rclass)
785 || targetm.small_register_classes_for_mode_p (VOIDmode))
786 && MERGABLE_RELOADS (type, rld[i].when_needed,
787 opnum, rld[i].opnum))
788 {
789 /* Make sure reload_in ultimately has the increment,
790 not the plain register. */
791 if (REG_P (in))
792 *p_in = rld[i].in;
793 return i;
794 }
795 return n_reloads;
796 }
797
798 /* Return nonzero if X is a SUBREG which will require reloading of its
799 SUBREG_REG expression. */
800
801 static int
802 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, int output)
803 {
804 rtx inner;
805
806 /* Only SUBREGs are problematical. */
807 if (GET_CODE (x) != SUBREG)
808 return 0;
809
810 inner = SUBREG_REG (x);
811
812 /* If INNER is a constant or PLUS, then INNER must be reloaded. */
813 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
814 return 1;
815
816 /* If INNER is not a hard register, then INNER will not need to
817 be reloaded. */
818 if (!REG_P (inner)
819 || REGNO (inner) >= FIRST_PSEUDO_REGISTER)
820 return 0;
821
822 /* If INNER is not ok for MODE, then INNER will need reloading. */
823 if (! HARD_REGNO_MODE_OK (subreg_regno (x), mode))
824 return 1;
825
826 /* If the outer part is a word or smaller, INNER larger than a
827 word and the number of regs for INNER is not the same as the
828 number of words in INNER, then INNER will need reloading. */
829 return (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
830 && output
831 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
832 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
833 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
834 }
835
836 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
837 requiring an extra reload register. The caller has already found that
838 IN contains some reference to REGNO, so check that we can produce the
839 new value in a single step. E.g. if we have
840 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
841 instruction that adds one to a register, this should succeed.
842 However, if we have something like
843 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
844 needs to be loaded into a register first, we need a separate reload
845 register.
846 Such PLUS reloads are generated by find_reload_address_part.
847 The out-of-range PLUS expressions are usually introduced in the instruction
848 patterns by register elimination and substituting pseudos without a home
849 by their function-invariant equivalences. */
850 static int
851 can_reload_into (rtx in, int regno, enum machine_mode mode)
852 {
853 rtx dst, test_insn;
854 int r = 0;
855 struct recog_data save_recog_data;
856
857 /* For matching constraints, we often get notional input reloads where
858 we want to use the original register as the reload register. I.e.
859 technically this is a non-optional input-output reload, but IN is
860 already a valid register, and has been chosen as the reload register.
861 Speed this up, since it trivially works. */
862 if (REG_P (in))
863 return 1;
864
865 /* To test MEMs properly, we'd have to take into account all the reloads
866 that are already scheduled, which can become quite complicated.
867 And since we've already handled address reloads for this MEM, it
868 should always succeed anyway. */
869 if (MEM_P (in))
870 return 1;
871
872 /* If we can make a simple SET insn that does the job, everything should
873 be fine. */
874 dst = gen_rtx_REG (mode, regno);
875 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
876 save_recog_data = recog_data;
877 if (recog_memoized (test_insn) >= 0)
878 {
879 extract_insn (test_insn);
880 r = constrain_operands (1);
881 }
882 recog_data = save_recog_data;
883 return r;
884 }
885
886 /* Record one reload that needs to be performed.
887 IN is an rtx saying where the data are to be found before this instruction.
888 OUT says where they must be stored after the instruction.
889 (IN is zero for data not read, and OUT is zero for data not written.)
890 INLOC and OUTLOC point to the places in the instructions where
891 IN and OUT were found.
892 If IN and OUT are both nonzero, it means the same register must be used
893 to reload both IN and OUT.
894
895 RCLASS is a register class required for the reloaded data.
896 INMODE is the machine mode that the instruction requires
897 for the reg that replaces IN and OUTMODE is likewise for OUT.
898
899 If IN is zero, then OUT's location and mode should be passed as
900 INLOC and INMODE.
901
902 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
903
904 OPTIONAL nonzero means this reload does not need to be performed:
905 it can be discarded if that is more convenient.
906
907 OPNUM and TYPE say what the purpose of this reload is.
908
909 The return value is the reload-number for this reload.
910
911 If both IN and OUT are nonzero, in some rare cases we might
912 want to make two separate reloads. (Actually we never do this now.)
913 Therefore, the reload-number for OUT is stored in
914 output_reloadnum when we return; the return value applies to IN.
915 Usually (presently always), when IN and OUT are nonzero,
916 the two reload-numbers are equal, but the caller should be careful to
917 distinguish them. */
918
919 int
920 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
921 enum reg_class rclass, enum machine_mode inmode,
922 enum machine_mode outmode, int strict_low, int optional,
923 int opnum, enum reload_type type)
924 {
925 int i;
926 int dont_share = 0;
927 int dont_remove_subreg = 0;
928 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
929 int secondary_in_reload = -1, secondary_out_reload = -1;
930 enum insn_code secondary_in_icode = CODE_FOR_nothing;
931 enum insn_code secondary_out_icode = CODE_FOR_nothing;
932
933 /* INMODE and/or OUTMODE could be VOIDmode if no mode
934 has been specified for the operand. In that case,
935 use the operand's mode as the mode to reload. */
936 if (inmode == VOIDmode && in != 0)
937 inmode = GET_MODE (in);
938 if (outmode == VOIDmode && out != 0)
939 outmode = GET_MODE (out);
940
941 /* If find_reloads and friends until now missed to replace a pseudo
942 with a constant of reg_equiv_constant something went wrong
943 beforehand.
944 Note that it can't simply be done here if we missed it earlier
945 since the constant might need to be pushed into the literal pool
946 and the resulting memref would probably need further
947 reloading. */
948 if (in != 0 && REG_P (in))
949 {
950 int regno = REGNO (in);
951
952 gcc_assert (regno < FIRST_PSEUDO_REGISTER
953 || reg_renumber[regno] >= 0
954 || reg_equiv_constant (regno) == NULL_RTX);
955 }
956
957 /* reg_equiv_constant only contains constants which are obviously
958 not appropriate as destination. So if we would need to replace
959 the destination pseudo with a constant we are in real
960 trouble. */
961 if (out != 0 && REG_P (out))
962 {
963 int regno = REGNO (out);
964
965 gcc_assert (regno < FIRST_PSEUDO_REGISTER
966 || reg_renumber[regno] >= 0
967 || reg_equiv_constant (regno) == NULL_RTX);
968 }
969
970 /* If we have a read-write operand with an address side-effect,
971 change either IN or OUT so the side-effect happens only once. */
972 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
973 switch (GET_CODE (XEXP (in, 0)))
974 {
975 case POST_INC: case POST_DEC: case POST_MODIFY:
976 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
977 break;
978
979 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
980 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
981 break;
982
983 default:
984 break;
985 }
986
987 /* If we are reloading a (SUBREG constant ...), really reload just the
988 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
989 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
990 a pseudo and hence will become a MEM) with M1 wider than M2 and the
991 register is a pseudo, also reload the inside expression.
992 For machines that extend byte loads, do this for any SUBREG of a pseudo
993 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
994 M2 is an integral mode that gets extended when loaded.
995 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
996 either M1 is not valid for R or M2 is wider than a word but we only
997 need one word to store an M2-sized quantity in R.
998 (However, if OUT is nonzero, we need to reload the reg *and*
999 the subreg, so do nothing here, and let following statement handle it.)
1000
1001 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1002 we can't handle it here because CONST_INT does not indicate a mode.
1003
1004 Similarly, we must reload the inside expression if we have a
1005 STRICT_LOW_PART (presumably, in == out in this case).
1006
1007 Also reload the inner expression if it does not require a secondary
1008 reload but the SUBREG does.
1009
1010 Finally, reload the inner expression if it is a register that is in
1011 the class whose registers cannot be referenced in a different size
1012 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1013 cannot reload just the inside since we might end up with the wrong
1014 register class. But if it is inside a STRICT_LOW_PART, we have
1015 no choice, so we hope we do get the right register class there. */
1016
1017 if (in != 0 && GET_CODE (in) == SUBREG
1018 && (subreg_lowpart_p (in) || strict_low)
1019 #ifdef CANNOT_CHANGE_MODE_CLASS
1020 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1021 #endif
1022 && (CONSTANT_P (SUBREG_REG (in))
1023 || GET_CODE (SUBREG_REG (in)) == PLUS
1024 || strict_low
1025 || (((REG_P (SUBREG_REG (in))
1026 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1027 || MEM_P (SUBREG_REG (in)))
1028 && ((GET_MODE_SIZE (inmode)
1029 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1030 #ifdef LOAD_EXTEND_OP
1031 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1032 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1033 <= UNITS_PER_WORD)
1034 && (GET_MODE_SIZE (inmode)
1035 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1036 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1037 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1038 #endif
1039 #ifdef WORD_REGISTER_OPERATIONS
1040 || ((GET_MODE_SIZE (inmode)
1041 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
1042 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1043 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1044 / UNITS_PER_WORD)))
1045 #endif
1046 ))
1047 || (REG_P (SUBREG_REG (in))
1048 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1049 /* The case where out is nonzero
1050 is handled differently in the following statement. */
1051 && (out == 0 || subreg_lowpart_p (in))
1052 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1053 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1054 > UNITS_PER_WORD)
1055 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1056 / UNITS_PER_WORD)
1057 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1058 [GET_MODE (SUBREG_REG (in))]))
1059 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1060 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1061 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1062 SUBREG_REG (in))
1063 == NO_REGS))
1064 #ifdef CANNOT_CHANGE_MODE_CLASS
1065 || (REG_P (SUBREG_REG (in))
1066 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1067 && REG_CANNOT_CHANGE_MODE_P
1068 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1069 #endif
1070 ))
1071 {
1072 in_subreg_loc = inloc;
1073 inloc = &SUBREG_REG (in);
1074 in = *inloc;
1075 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1076 if (MEM_P (in))
1077 /* This is supposed to happen only for paradoxical subregs made by
1078 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1079 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1080 #endif
1081 inmode = GET_MODE (in);
1082 }
1083
1084 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1085 either M1 is not valid for R or M2 is wider than a word but we only
1086 need one word to store an M2-sized quantity in R.
1087
1088 However, we must reload the inner reg *as well as* the subreg in
1089 that case. */
1090
1091 /* Similar issue for (SUBREG constant ...) if it was not handled by the
1092 code above. This can happen if SUBREG_BYTE != 0. */
1093
1094 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, 0))
1095 {
1096 enum reg_class in_class = rclass;
1097
1098 if (REG_P (SUBREG_REG (in)))
1099 in_class
1100 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1101 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1102 GET_MODE (SUBREG_REG (in)),
1103 SUBREG_BYTE (in),
1104 GET_MODE (in)),
1105 REGNO (SUBREG_REG (in)));
1106
1107 /* This relies on the fact that emit_reload_insns outputs the
1108 instructions for input reloads of type RELOAD_OTHER in the same
1109 order as the reloads. Thus if the outer reload is also of type
1110 RELOAD_OTHER, we are guaranteed that this inner reload will be
1111 output before the outer reload. */
1112 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1113 in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1114 dont_remove_subreg = 1;
1115 }
1116
1117 /* Similarly for paradoxical and problematical SUBREGs on the output.
1118 Note that there is no reason we need worry about the previous value
1119 of SUBREG_REG (out); even if wider than out,
1120 storing in a subreg is entitled to clobber it all
1121 (except in the case of STRICT_LOW_PART,
1122 and in that case the constraint should label it input-output.) */
1123 if (out != 0 && GET_CODE (out) == SUBREG
1124 && (subreg_lowpart_p (out) || strict_low)
1125 #ifdef CANNOT_CHANGE_MODE_CLASS
1126 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1127 #endif
1128 && (CONSTANT_P (SUBREG_REG (out))
1129 || strict_low
1130 || (((REG_P (SUBREG_REG (out))
1131 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1132 || MEM_P (SUBREG_REG (out)))
1133 && ((GET_MODE_SIZE (outmode)
1134 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1135 #ifdef WORD_REGISTER_OPERATIONS
1136 || ((GET_MODE_SIZE (outmode)
1137 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
1138 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1139 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1140 / UNITS_PER_WORD)))
1141 #endif
1142 ))
1143 || (REG_P (SUBREG_REG (out))
1144 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1145 && ((GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1146 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1147 > UNITS_PER_WORD)
1148 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1149 / UNITS_PER_WORD)
1150 != (int) hard_regno_nregs[REGNO (SUBREG_REG (out))]
1151 [GET_MODE (SUBREG_REG (out))]))
1152 || ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)))
1153 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1154 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1155 SUBREG_REG (out))
1156 == NO_REGS))
1157 #ifdef CANNOT_CHANGE_MODE_CLASS
1158 || (REG_P (SUBREG_REG (out))
1159 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1160 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1161 GET_MODE (SUBREG_REG (out)),
1162 outmode))
1163 #endif
1164 ))
1165 {
1166 out_subreg_loc = outloc;
1167 outloc = &SUBREG_REG (out);
1168 out = *outloc;
1169 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1170 gcc_assert (!MEM_P (out)
1171 || GET_MODE_SIZE (GET_MODE (out))
1172 <= GET_MODE_SIZE (outmode));
1173 #endif
1174 outmode = GET_MODE (out);
1175 }
1176
1177 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R where
1178 either M1 is not valid for R or M2 is wider than a word but we only
1179 need one word to store an M2-sized quantity in R.
1180
1181 However, we must reload the inner reg *as well as* the subreg in
1182 that case. In this case, the inner reg is an in-out reload. */
1183
1184 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, 1))
1185 {
1186 /* This relies on the fact that emit_reload_insns outputs the
1187 instructions for output reloads of type RELOAD_OTHER in reverse
1188 order of the reloads. Thus if the outer reload is also of type
1189 RELOAD_OTHER, we are guaranteed that this inner reload will be
1190 output after the outer reload. */
1191 dont_remove_subreg = 1;
1192 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1193 &SUBREG_REG (out),
1194 find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1195 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1196 GET_MODE (SUBREG_REG (out)),
1197 SUBREG_BYTE (out),
1198 GET_MODE (out)),
1199 REGNO (SUBREG_REG (out))),
1200 VOIDmode, VOIDmode, 0, 0,
1201 opnum, RELOAD_OTHER);
1202 }
1203
1204 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1205 if (in != 0 && out != 0 && MEM_P (out)
1206 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1207 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1208 dont_share = 1;
1209
1210 /* If IN is a SUBREG of a hard register, make a new REG. This
1211 simplifies some of the cases below. */
1212
1213 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1214 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1215 && ! dont_remove_subreg)
1216 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1217
1218 /* Similarly for OUT. */
1219 if (out != 0 && GET_CODE (out) == SUBREG
1220 && REG_P (SUBREG_REG (out))
1221 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1222 && ! dont_remove_subreg)
1223 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1224
1225 /* Narrow down the class of register wanted if that is
1226 desirable on this machine for efficiency. */
1227 {
1228 reg_class_t preferred_class = rclass;
1229
1230 if (in != 0)
1231 preferred_class = targetm.preferred_reload_class (in, rclass);
1232
1233 /* Output reloads may need analogous treatment, different in detail. */
1234 if (out != 0)
1235 preferred_class
1236 = targetm.preferred_output_reload_class (out, preferred_class);
1237
1238 /* Discard what the target said if we cannot do it. */
1239 if (preferred_class != NO_REGS
1240 || (optional && type == RELOAD_FOR_OUTPUT))
1241 rclass = (enum reg_class) preferred_class;
1242 }
1243
1244 /* Make sure we use a class that can handle the actual pseudo
1245 inside any subreg. For example, on the 386, QImode regs
1246 can appear within SImode subregs. Although GENERAL_REGS
1247 can handle SImode, QImode needs a smaller class. */
1248 #ifdef LIMIT_RELOAD_CLASS
1249 if (in_subreg_loc)
1250 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1251 else if (in != 0 && GET_CODE (in) == SUBREG)
1252 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1253
1254 if (out_subreg_loc)
1255 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1256 if (out != 0 && GET_CODE (out) == SUBREG)
1257 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1258 #endif
1259
1260 /* Verify that this class is at least possible for the mode that
1261 is specified. */
1262 if (this_insn_is_asm)
1263 {
1264 enum machine_mode mode;
1265 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1266 mode = inmode;
1267 else
1268 mode = outmode;
1269 if (mode == VOIDmode)
1270 {
1271 error_for_asm (this_insn, "cannot reload integer constant "
1272 "operand in %<asm%>");
1273 mode = word_mode;
1274 if (in != 0)
1275 inmode = word_mode;
1276 if (out != 0)
1277 outmode = word_mode;
1278 }
1279 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1280 if (HARD_REGNO_MODE_OK (i, mode)
1281 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1282 break;
1283 if (i == FIRST_PSEUDO_REGISTER)
1284 {
1285 error_for_asm (this_insn, "impossible register constraint "
1286 "in %<asm%>");
1287 /* Avoid further trouble with this insn. */
1288 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1289 /* We used to continue here setting class to ALL_REGS, but it triggers
1290 sanity check on i386 for:
1291 void foo(long double d)
1292 {
1293 asm("" :: "a" (d));
1294 }
1295 Returning zero here ought to be safe as we take care in
1296 find_reloads to not process the reloads when instruction was
1297 replaced by USE. */
1298
1299 return 0;
1300 }
1301 }
1302
1303 /* Optional output reloads are always OK even if we have no register class,
1304 since the function of these reloads is only to have spill_reg_store etc.
1305 set, so that the storing insn can be deleted later. */
1306 gcc_assert (rclass != NO_REGS
1307 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1308
1309 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1310
1311 if (i == n_reloads)
1312 {
1313 /* See if we need a secondary reload register to move between CLASS
1314 and IN or CLASS and OUT. Get the icode and push any required reloads
1315 needed for each of them if so. */
1316
1317 if (in != 0)
1318 secondary_in_reload
1319 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1320 &secondary_in_icode, NULL);
1321 if (out != 0 && GET_CODE (out) != SCRATCH)
1322 secondary_out_reload
1323 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1324 type, &secondary_out_icode, NULL);
1325
1326 /* We found no existing reload suitable for re-use.
1327 So add an additional reload. */
1328
1329 #ifdef SECONDARY_MEMORY_NEEDED
1330 /* If a memory location is needed for the copy, make one. */
1331 if (in != 0
1332 && (REG_P (in)
1333 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1334 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER
1335 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (reg_or_subregno (in)),
1336 rclass, inmode))
1337 get_secondary_mem (in, inmode, opnum, type);
1338 #endif
1339
1340 i = n_reloads;
1341 rld[i].in = in;
1342 rld[i].out = out;
1343 rld[i].rclass = rclass;
1344 rld[i].inmode = inmode;
1345 rld[i].outmode = outmode;
1346 rld[i].reg_rtx = 0;
1347 rld[i].optional = optional;
1348 rld[i].inc = 0;
1349 rld[i].nocombine = 0;
1350 rld[i].in_reg = inloc ? *inloc : 0;
1351 rld[i].out_reg = outloc ? *outloc : 0;
1352 rld[i].opnum = opnum;
1353 rld[i].when_needed = type;
1354 rld[i].secondary_in_reload = secondary_in_reload;
1355 rld[i].secondary_out_reload = secondary_out_reload;
1356 rld[i].secondary_in_icode = secondary_in_icode;
1357 rld[i].secondary_out_icode = secondary_out_icode;
1358 rld[i].secondary_p = 0;
1359
1360 n_reloads++;
1361
1362 #ifdef SECONDARY_MEMORY_NEEDED
1363 if (out != 0
1364 && (REG_P (out)
1365 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1366 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1367 && SECONDARY_MEMORY_NEEDED (rclass,
1368 REGNO_REG_CLASS (reg_or_subregno (out)),
1369 outmode))
1370 get_secondary_mem (out, outmode, opnum, type);
1371 #endif
1372 }
1373 else
1374 {
1375 /* We are reusing an existing reload,
1376 but we may have additional information for it.
1377 For example, we may now have both IN and OUT
1378 while the old one may have just one of them. */
1379
1380 /* The modes can be different. If they are, we want to reload in
1381 the larger mode, so that the value is valid for both modes. */
1382 if (inmode != VOIDmode
1383 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1384 rld[i].inmode = inmode;
1385 if (outmode != VOIDmode
1386 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1387 rld[i].outmode = outmode;
1388 if (in != 0)
1389 {
1390 rtx in_reg = inloc ? *inloc : 0;
1391 /* If we merge reloads for two distinct rtl expressions that
1392 are identical in content, there might be duplicate address
1393 reloads. Remove the extra set now, so that if we later find
1394 that we can inherit this reload, we can get rid of the
1395 address reloads altogether.
1396
1397 Do not do this if both reloads are optional since the result
1398 would be an optional reload which could potentially leave
1399 unresolved address replacements.
1400
1401 It is not sufficient to call transfer_replacements since
1402 choose_reload_regs will remove the replacements for address
1403 reloads of inherited reloads which results in the same
1404 problem. */
1405 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1406 && ! (rld[i].optional && optional))
1407 {
1408 /* We must keep the address reload with the lower operand
1409 number alive. */
1410 if (opnum > rld[i].opnum)
1411 {
1412 remove_address_replacements (in);
1413 in = rld[i].in;
1414 in_reg = rld[i].in_reg;
1415 }
1416 else
1417 remove_address_replacements (rld[i].in);
1418 }
1419 /* When emitting reloads we don't necessarily look at the in-
1420 and outmode, but also directly at the operands (in and out).
1421 So we can't simply overwrite them with whatever we have found
1422 for this (to-be-merged) reload, we have to "merge" that too.
1423 Reusing another reload already verified that we deal with the
1424 same operands, just possibly in different modes. So we
1425 overwrite the operands only when the new mode is larger.
1426 See also PR33613. */
1427 if (!rld[i].in
1428 || GET_MODE_SIZE (GET_MODE (in))
1429 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1430 rld[i].in = in;
1431 if (!rld[i].in_reg
1432 || (in_reg
1433 && GET_MODE_SIZE (GET_MODE (in_reg))
1434 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1435 rld[i].in_reg = in_reg;
1436 }
1437 if (out != 0)
1438 {
1439 if (!rld[i].out
1440 || (out
1441 && GET_MODE_SIZE (GET_MODE (out))
1442 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1443 rld[i].out = out;
1444 if (outloc
1445 && (!rld[i].out_reg
1446 || GET_MODE_SIZE (GET_MODE (*outloc))
1447 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1448 rld[i].out_reg = *outloc;
1449 }
1450 if (reg_class_subset_p (rclass, rld[i].rclass))
1451 rld[i].rclass = rclass;
1452 rld[i].optional &= optional;
1453 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1454 opnum, rld[i].opnum))
1455 rld[i].when_needed = RELOAD_OTHER;
1456 rld[i].opnum = MIN (rld[i].opnum, opnum);
1457 }
1458
1459 /* If the ostensible rtx being reloaded differs from the rtx found
1460 in the location to substitute, this reload is not safe to combine
1461 because we cannot reliably tell whether it appears in the insn. */
1462
1463 if (in != 0 && in != *inloc)
1464 rld[i].nocombine = 1;
1465
1466 #if 0
1467 /* This was replaced by changes in find_reloads_address_1 and the new
1468 function inc_for_reload, which go with a new meaning of reload_inc. */
1469
1470 /* If this is an IN/OUT reload in an insn that sets the CC,
1471 it must be for an autoincrement. It doesn't work to store
1472 the incremented value after the insn because that would clobber the CC.
1473 So we must do the increment of the value reloaded from,
1474 increment it, store it back, then decrement again. */
1475 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1476 {
1477 out = 0;
1478 rld[i].out = 0;
1479 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1480 /* If we did not find a nonzero amount-to-increment-by,
1481 that contradicts the belief that IN is being incremented
1482 in an address in this insn. */
1483 gcc_assert (rld[i].inc != 0);
1484 }
1485 #endif
1486
1487 /* If we will replace IN and OUT with the reload-reg,
1488 record where they are located so that substitution need
1489 not do a tree walk. */
1490
1491 if (replace_reloads)
1492 {
1493 if (inloc != 0)
1494 {
1495 struct replacement *r = &replacements[n_replacements++];
1496 r->what = i;
1497 r->subreg_loc = in_subreg_loc;
1498 r->where = inloc;
1499 r->mode = inmode;
1500 }
1501 if (outloc != 0 && outloc != inloc)
1502 {
1503 struct replacement *r = &replacements[n_replacements++];
1504 r->what = i;
1505 r->where = outloc;
1506 r->subreg_loc = out_subreg_loc;
1507 r->mode = outmode;
1508 }
1509 }
1510
1511 /* If this reload is just being introduced and it has both
1512 an incoming quantity and an outgoing quantity that are
1513 supposed to be made to match, see if either one of the two
1514 can serve as the place to reload into.
1515
1516 If one of them is acceptable, set rld[i].reg_rtx
1517 to that one. */
1518
1519 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1520 {
1521 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1522 inmode, outmode,
1523 rld[i].rclass, i,
1524 earlyclobber_operand_p (out));
1525
1526 /* If the outgoing register already contains the same value
1527 as the incoming one, we can dispense with loading it.
1528 The easiest way to tell the caller that is to give a phony
1529 value for the incoming operand (same as outgoing one). */
1530 if (rld[i].reg_rtx == out
1531 && (REG_P (in) || CONSTANT_P (in))
1532 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1533 static_reload_reg_p, i, inmode))
1534 rld[i].in = out;
1535 }
1536
1537 /* If this is an input reload and the operand contains a register that
1538 dies in this insn and is used nowhere else, see if it is the right class
1539 to be used for this reload. Use it if so. (This occurs most commonly
1540 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1541 this if it is also an output reload that mentions the register unless
1542 the output is a SUBREG that clobbers an entire register.
1543
1544 Note that the operand might be one of the spill regs, if it is a
1545 pseudo reg and we are in a block where spilling has not taken place.
1546 But if there is no spilling in this block, that is OK.
1547 An explicitly used hard reg cannot be a spill reg. */
1548
1549 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1550 {
1551 rtx note;
1552 int regno;
1553 enum machine_mode rel_mode = inmode;
1554
1555 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1556 rel_mode = outmode;
1557
1558 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1559 if (REG_NOTE_KIND (note) == REG_DEAD
1560 && REG_P (XEXP (note, 0))
1561 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1562 && reg_mentioned_p (XEXP (note, 0), in)
1563 /* Check that a former pseudo is valid; see find_dummy_reload. */
1564 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1565 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1566 ORIGINAL_REGNO (XEXP (note, 0)))
1567 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1568 && ! refers_to_regno_for_reload_p (regno,
1569 end_hard_regno (rel_mode,
1570 regno),
1571 PATTERN (this_insn), inloc)
1572 /* If this is also an output reload, IN cannot be used as
1573 the reload register if it is set in this insn unless IN
1574 is also OUT. */
1575 && (out == 0 || in == out
1576 || ! hard_reg_set_here_p (regno,
1577 end_hard_regno (rel_mode, regno),
1578 PATTERN (this_insn)))
1579 /* ??? Why is this code so different from the previous?
1580 Is there any simple coherent way to describe the two together?
1581 What's going on here. */
1582 && (in != out
1583 || (GET_CODE (in) == SUBREG
1584 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1585 / UNITS_PER_WORD)
1586 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1587 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1588 /* Make sure the operand fits in the reg that dies. */
1589 && (GET_MODE_SIZE (rel_mode)
1590 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1591 && HARD_REGNO_MODE_OK (regno, inmode)
1592 && HARD_REGNO_MODE_OK (regno, outmode))
1593 {
1594 unsigned int offs;
1595 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1596 hard_regno_nregs[regno][outmode]);
1597
1598 for (offs = 0; offs < nregs; offs++)
1599 if (fixed_regs[regno + offs]
1600 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1601 regno + offs))
1602 break;
1603
1604 if (offs == nregs
1605 && (! (refers_to_regno_for_reload_p
1606 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1607 || can_reload_into (in, regno, inmode)))
1608 {
1609 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1610 break;
1611 }
1612 }
1613 }
1614
1615 if (out)
1616 output_reloadnum = i;
1617
1618 return i;
1619 }
1620
1621 /* Record an additional place we must replace a value
1622 for which we have already recorded a reload.
1623 RELOADNUM is the value returned by push_reload
1624 when the reload was recorded.
1625 This is used in insn patterns that use match_dup. */
1626
1627 static void
1628 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1629 {
1630 if (replace_reloads)
1631 {
1632 struct replacement *r = &replacements[n_replacements++];
1633 r->what = reloadnum;
1634 r->where = loc;
1635 r->subreg_loc = 0;
1636 r->mode = mode;
1637 }
1638 }
1639
1640 /* Duplicate any replacement we have recorded to apply at
1641 location ORIG_LOC to also be performed at DUP_LOC.
1642 This is used in insn patterns that use match_dup. */
1643
1644 static void
1645 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1646 {
1647 int i, n = n_replacements;
1648
1649 for (i = 0; i < n; i++)
1650 {
1651 struct replacement *r = &replacements[i];
1652 if (r->where == orig_loc)
1653 push_replacement (dup_loc, r->what, r->mode);
1654 }
1655 }
1656 \f
1657 /* Transfer all replacements that used to be in reload FROM to be in
1658 reload TO. */
1659
1660 void
1661 transfer_replacements (int to, int from)
1662 {
1663 int i;
1664
1665 for (i = 0; i < n_replacements; i++)
1666 if (replacements[i].what == from)
1667 replacements[i].what = to;
1668 }
1669 \f
1670 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1671 or a subpart of it. If we have any replacements registered for IN_RTX,
1672 cancel the reloads that were supposed to load them.
1673 Return nonzero if we canceled any reloads. */
1674 int
1675 remove_address_replacements (rtx in_rtx)
1676 {
1677 int i, j;
1678 char reload_flags[MAX_RELOADS];
1679 int something_changed = 0;
1680
1681 memset (reload_flags, 0, sizeof reload_flags);
1682 for (i = 0, j = 0; i < n_replacements; i++)
1683 {
1684 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1685 reload_flags[replacements[i].what] |= 1;
1686 else
1687 {
1688 replacements[j++] = replacements[i];
1689 reload_flags[replacements[i].what] |= 2;
1690 }
1691 }
1692 /* Note that the following store must be done before the recursive calls. */
1693 n_replacements = j;
1694
1695 for (i = n_reloads - 1; i >= 0; i--)
1696 {
1697 if (reload_flags[i] == 1)
1698 {
1699 deallocate_reload_reg (i);
1700 remove_address_replacements (rld[i].in);
1701 rld[i].in = 0;
1702 something_changed = 1;
1703 }
1704 }
1705 return something_changed;
1706 }
1707 \f
1708 /* If there is only one output reload, and it is not for an earlyclobber
1709 operand, try to combine it with a (logically unrelated) input reload
1710 to reduce the number of reload registers needed.
1711
1712 This is safe if the input reload does not appear in
1713 the value being output-reloaded, because this implies
1714 it is not needed any more once the original insn completes.
1715
1716 If that doesn't work, see we can use any of the registers that
1717 die in this insn as a reload register. We can if it is of the right
1718 class and does not appear in the value being output-reloaded. */
1719
1720 static void
1721 combine_reloads (void)
1722 {
1723 int i, regno;
1724 int output_reload = -1;
1725 int secondary_out = -1;
1726 rtx note;
1727
1728 /* Find the output reload; return unless there is exactly one
1729 and that one is mandatory. */
1730
1731 for (i = 0; i < n_reloads; i++)
1732 if (rld[i].out != 0)
1733 {
1734 if (output_reload >= 0)
1735 return;
1736 output_reload = i;
1737 }
1738
1739 if (output_reload < 0 || rld[output_reload].optional)
1740 return;
1741
1742 /* An input-output reload isn't combinable. */
1743
1744 if (rld[output_reload].in != 0)
1745 return;
1746
1747 /* If this reload is for an earlyclobber operand, we can't do anything. */
1748 if (earlyclobber_operand_p (rld[output_reload].out))
1749 return;
1750
1751 /* If there is a reload for part of the address of this operand, we would
1752 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1753 its life to the point where doing this combine would not lower the
1754 number of spill registers needed. */
1755 for (i = 0; i < n_reloads; i++)
1756 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1757 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1758 && rld[i].opnum == rld[output_reload].opnum)
1759 return;
1760
1761 /* Check each input reload; can we combine it? */
1762
1763 for (i = 0; i < n_reloads; i++)
1764 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1765 /* Life span of this reload must not extend past main insn. */
1766 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1767 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1768 && rld[i].when_needed != RELOAD_OTHER
1769 && (CLASS_MAX_NREGS (rld[i].rclass, rld[i].inmode)
1770 == CLASS_MAX_NREGS (rld[output_reload].rclass,
1771 rld[output_reload].outmode))
1772 && rld[i].inc == 0
1773 && rld[i].reg_rtx == 0
1774 #ifdef SECONDARY_MEMORY_NEEDED
1775 /* Don't combine two reloads with different secondary
1776 memory locations. */
1777 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1778 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1779 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1780 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1781 #endif
1782 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1783 ? (rld[i].rclass == rld[output_reload].rclass)
1784 : (reg_class_subset_p (rld[i].rclass,
1785 rld[output_reload].rclass)
1786 || reg_class_subset_p (rld[output_reload].rclass,
1787 rld[i].rclass)))
1788 && (MATCHES (rld[i].in, rld[output_reload].out)
1789 /* Args reversed because the first arg seems to be
1790 the one that we imagine being modified
1791 while the second is the one that might be affected. */
1792 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1793 rld[i].in)
1794 /* However, if the input is a register that appears inside
1795 the output, then we also can't share.
1796 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1797 If the same reload reg is used for both reg 69 and the
1798 result to be stored in memory, then that result
1799 will clobber the address of the memory ref. */
1800 && ! (REG_P (rld[i].in)
1801 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1802 rld[output_reload].out))))
1803 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1804 rld[i].when_needed != RELOAD_FOR_INPUT)
1805 && (reg_class_size[(int) rld[i].rclass]
1806 || targetm.small_register_classes_for_mode_p (VOIDmode))
1807 /* We will allow making things slightly worse by combining an
1808 input and an output, but no worse than that. */
1809 && (rld[i].when_needed == RELOAD_FOR_INPUT
1810 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1811 {
1812 int j;
1813
1814 /* We have found a reload to combine with! */
1815 rld[i].out = rld[output_reload].out;
1816 rld[i].out_reg = rld[output_reload].out_reg;
1817 rld[i].outmode = rld[output_reload].outmode;
1818 /* Mark the old output reload as inoperative. */
1819 rld[output_reload].out = 0;
1820 /* The combined reload is needed for the entire insn. */
1821 rld[i].when_needed = RELOAD_OTHER;
1822 /* If the output reload had a secondary reload, copy it. */
1823 if (rld[output_reload].secondary_out_reload != -1)
1824 {
1825 rld[i].secondary_out_reload
1826 = rld[output_reload].secondary_out_reload;
1827 rld[i].secondary_out_icode
1828 = rld[output_reload].secondary_out_icode;
1829 }
1830
1831 #ifdef SECONDARY_MEMORY_NEEDED
1832 /* Copy any secondary MEM. */
1833 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1834 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1835 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1836 #endif
1837 /* If required, minimize the register class. */
1838 if (reg_class_subset_p (rld[output_reload].rclass,
1839 rld[i].rclass))
1840 rld[i].rclass = rld[output_reload].rclass;
1841
1842 /* Transfer all replacements from the old reload to the combined. */
1843 for (j = 0; j < n_replacements; j++)
1844 if (replacements[j].what == output_reload)
1845 replacements[j].what = i;
1846
1847 return;
1848 }
1849
1850 /* If this insn has only one operand that is modified or written (assumed
1851 to be the first), it must be the one corresponding to this reload. It
1852 is safe to use anything that dies in this insn for that output provided
1853 that it does not occur in the output (we already know it isn't an
1854 earlyclobber. If this is an asm insn, give up. */
1855
1856 if (INSN_CODE (this_insn) == -1)
1857 return;
1858
1859 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1860 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1861 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1862 return;
1863
1864 /* See if some hard register that dies in this insn and is not used in
1865 the output is the right class. Only works if the register we pick
1866 up can fully hold our output reload. */
1867 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1868 if (REG_NOTE_KIND (note) == REG_DEAD
1869 && REG_P (XEXP (note, 0))
1870 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1871 rld[output_reload].out)
1872 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1873 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1874 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1875 regno)
1876 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1877 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1878 /* Ensure that a secondary or tertiary reload for this output
1879 won't want this register. */
1880 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1881 || (!(TEST_HARD_REG_BIT
1882 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1883 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1884 || !(TEST_HARD_REG_BIT
1885 (reg_class_contents[(int) rld[secondary_out].rclass],
1886 regno)))))
1887 && !fixed_regs[regno]
1888 /* Check that a former pseudo is valid; see find_dummy_reload. */
1889 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1890 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
1891 ORIGINAL_REGNO (XEXP (note, 0)))
1892 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1893 {
1894 rld[output_reload].reg_rtx
1895 = gen_rtx_REG (rld[output_reload].outmode, regno);
1896 return;
1897 }
1898 }
1899 \f
1900 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1901 See if one of IN and OUT is a register that may be used;
1902 this is desirable since a spill-register won't be needed.
1903 If so, return the register rtx that proves acceptable.
1904
1905 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1906 RCLASS is the register class required for the reload.
1907
1908 If FOR_REAL is >= 0, it is the number of the reload,
1909 and in some cases when it can be discovered that OUT doesn't need
1910 to be computed, clear out rld[FOR_REAL].out.
1911
1912 If FOR_REAL is -1, this should not be done, because this call
1913 is just to see if a register can be found, not to find and install it.
1914
1915 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1916 puts an additional constraint on being able to use IN for OUT since
1917 IN must not appear elsewhere in the insn (it is assumed that IN itself
1918 is safe from the earlyclobber). */
1919
1920 static rtx
1921 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1922 enum machine_mode inmode, enum machine_mode outmode,
1923 reg_class_t rclass, int for_real, int earlyclobber)
1924 {
1925 rtx in = real_in;
1926 rtx out = real_out;
1927 int in_offset = 0;
1928 int out_offset = 0;
1929 rtx value = 0;
1930
1931 /* If operands exceed a word, we can't use either of them
1932 unless they have the same size. */
1933 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1934 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1935 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1936 return 0;
1937
1938 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1939 respectively refers to a hard register. */
1940
1941 /* Find the inside of any subregs. */
1942 while (GET_CODE (out) == SUBREG)
1943 {
1944 if (REG_P (SUBREG_REG (out))
1945 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1946 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1947 GET_MODE (SUBREG_REG (out)),
1948 SUBREG_BYTE (out),
1949 GET_MODE (out));
1950 out = SUBREG_REG (out);
1951 }
1952 while (GET_CODE (in) == SUBREG)
1953 {
1954 if (REG_P (SUBREG_REG (in))
1955 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1956 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1957 GET_MODE (SUBREG_REG (in)),
1958 SUBREG_BYTE (in),
1959 GET_MODE (in));
1960 in = SUBREG_REG (in);
1961 }
1962
1963 /* Narrow down the reg class, the same way push_reload will;
1964 otherwise we might find a dummy now, but push_reload won't. */
1965 {
1966 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1967 if (preferred_class != NO_REGS)
1968 rclass = (enum reg_class) preferred_class;
1969 }
1970
1971 /* See if OUT will do. */
1972 if (REG_P (out)
1973 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1974 {
1975 unsigned int regno = REGNO (out) + out_offset;
1976 unsigned int nwords = hard_regno_nregs[regno][outmode];
1977 rtx saved_rtx;
1978
1979 /* When we consider whether the insn uses OUT,
1980 ignore references within IN. They don't prevent us
1981 from copying IN into OUT, because those refs would
1982 move into the insn that reloads IN.
1983
1984 However, we only ignore IN in its role as this reload.
1985 If the insn uses IN elsewhere and it contains OUT,
1986 that counts. We can't be sure it's the "same" operand
1987 so it might not go through this reload. */
1988 saved_rtx = *inloc;
1989 *inloc = const0_rtx;
1990
1991 if (regno < FIRST_PSEUDO_REGISTER
1992 && HARD_REGNO_MODE_OK (regno, outmode)
1993 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1994 PATTERN (this_insn), outloc))
1995 {
1996 unsigned int i;
1997
1998 for (i = 0; i < nwords; i++)
1999 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2000 regno + i))
2001 break;
2002
2003 if (i == nwords)
2004 {
2005 if (REG_P (real_out))
2006 value = real_out;
2007 else
2008 value = gen_rtx_REG (outmode, regno);
2009 }
2010 }
2011
2012 *inloc = saved_rtx;
2013 }
2014
2015 /* Consider using IN if OUT was not acceptable
2016 or if OUT dies in this insn (like the quotient in a divmod insn).
2017 We can't use IN unless it is dies in this insn,
2018 which means we must know accurately which hard regs are live.
2019 Also, the result can't go in IN if IN is used within OUT,
2020 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2021 if (hard_regs_live_known
2022 && REG_P (in)
2023 && REGNO (in) < FIRST_PSEUDO_REGISTER
2024 && (value == 0
2025 || find_reg_note (this_insn, REG_UNUSED, real_out))
2026 && find_reg_note (this_insn, REG_DEAD, real_in)
2027 && !fixed_regs[REGNO (in)]
2028 && HARD_REGNO_MODE_OK (REGNO (in),
2029 /* The only case where out and real_out might
2030 have different modes is where real_out
2031 is a subreg, and in that case, out
2032 has a real mode. */
2033 (GET_MODE (out) != VOIDmode
2034 ? GET_MODE (out) : outmode))
2035 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2036 /* However only do this if we can be sure that this input
2037 operand doesn't correspond with an uninitialized pseudo.
2038 global can assign some hardreg to it that is the same as
2039 the one assigned to a different, also live pseudo (as it
2040 can ignore the conflict). We must never introduce writes
2041 to such hardregs, as they would clobber the other live
2042 pseudo. See PR 20973. */
2043 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
2044 ORIGINAL_REGNO (in))
2045 /* Similarly, only do this if we can be sure that the death
2046 note is still valid. global can assign some hardreg to
2047 the pseudo referenced in the note and simultaneously a
2048 subword of this hardreg to a different, also live pseudo,
2049 because only another subword of the hardreg is actually
2050 used in the insn. This cannot happen if the pseudo has
2051 been assigned exactly one hardreg. See PR 33732. */
2052 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2053 {
2054 unsigned int regno = REGNO (in) + in_offset;
2055 unsigned int nwords = hard_regno_nregs[regno][inmode];
2056
2057 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2058 && ! hard_reg_set_here_p (regno, regno + nwords,
2059 PATTERN (this_insn))
2060 && (! earlyclobber
2061 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2062 PATTERN (this_insn), inloc)))
2063 {
2064 unsigned int i;
2065
2066 for (i = 0; i < nwords; i++)
2067 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2068 regno + i))
2069 break;
2070
2071 if (i == nwords)
2072 {
2073 /* If we were going to use OUT as the reload reg
2074 and changed our mind, it means OUT is a dummy that
2075 dies here. So don't bother copying value to it. */
2076 if (for_real >= 0 && value == real_out)
2077 rld[for_real].out = 0;
2078 if (REG_P (real_in))
2079 value = real_in;
2080 else
2081 value = gen_rtx_REG (inmode, regno);
2082 }
2083 }
2084 }
2085
2086 return value;
2087 }
2088 \f
2089 /* This page contains subroutines used mainly for determining
2090 whether the IN or an OUT of a reload can serve as the
2091 reload register. */
2092
2093 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2094
2095 int
2096 earlyclobber_operand_p (rtx x)
2097 {
2098 int i;
2099
2100 for (i = 0; i < n_earlyclobbers; i++)
2101 if (reload_earlyclobbers[i] == x)
2102 return 1;
2103
2104 return 0;
2105 }
2106
2107 /* Return 1 if expression X alters a hard reg in the range
2108 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2109 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2110 X should be the body of an instruction. */
2111
2112 static int
2113 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2114 {
2115 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2116 {
2117 rtx op0 = SET_DEST (x);
2118
2119 while (GET_CODE (op0) == SUBREG)
2120 op0 = SUBREG_REG (op0);
2121 if (REG_P (op0))
2122 {
2123 unsigned int r = REGNO (op0);
2124
2125 /* See if this reg overlaps range under consideration. */
2126 if (r < end_regno
2127 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2128 return 1;
2129 }
2130 }
2131 else if (GET_CODE (x) == PARALLEL)
2132 {
2133 int i = XVECLEN (x, 0) - 1;
2134
2135 for (; i >= 0; i--)
2136 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2137 return 1;
2138 }
2139
2140 return 0;
2141 }
2142
2143 /* Return 1 if ADDR is a valid memory address for mode MODE
2144 in address space AS, and check that each pseudo reg has the
2145 proper kind of hard reg. */
2146
2147 int
2148 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2149 rtx addr, addr_space_t as)
2150 {
2151 #ifdef GO_IF_LEGITIMATE_ADDRESS
2152 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2153 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2154 return 0;
2155
2156 win:
2157 return 1;
2158 #else
2159 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2160 #endif
2161 }
2162 \f
2163 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2164 if they are the same hard reg, and has special hacks for
2165 autoincrement and autodecrement.
2166 This is specifically intended for find_reloads to use
2167 in determining whether two operands match.
2168 X is the operand whose number is the lower of the two.
2169
2170 The value is 2 if Y contains a pre-increment that matches
2171 a non-incrementing address in X. */
2172
2173 /* ??? To be completely correct, we should arrange to pass
2174 for X the output operand and for Y the input operand.
2175 For now, we assume that the output operand has the lower number
2176 because that is natural in (SET output (... input ...)). */
2177
2178 int
2179 operands_match_p (rtx x, rtx y)
2180 {
2181 int i;
2182 RTX_CODE code = GET_CODE (x);
2183 const char *fmt;
2184 int success_2;
2185
2186 if (x == y)
2187 return 1;
2188 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2189 && (REG_P (y) || (GET_CODE (y) == SUBREG
2190 && REG_P (SUBREG_REG (y)))))
2191 {
2192 int j;
2193
2194 if (code == SUBREG)
2195 {
2196 i = REGNO (SUBREG_REG (x));
2197 if (i >= FIRST_PSEUDO_REGISTER)
2198 goto slow;
2199 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2200 GET_MODE (SUBREG_REG (x)),
2201 SUBREG_BYTE (x),
2202 GET_MODE (x));
2203 }
2204 else
2205 i = REGNO (x);
2206
2207 if (GET_CODE (y) == SUBREG)
2208 {
2209 j = REGNO (SUBREG_REG (y));
2210 if (j >= FIRST_PSEUDO_REGISTER)
2211 goto slow;
2212 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2213 GET_MODE (SUBREG_REG (y)),
2214 SUBREG_BYTE (y),
2215 GET_MODE (y));
2216 }
2217 else
2218 j = REGNO (y);
2219
2220 /* On a WORDS_BIG_ENDIAN machine, point to the last register of a
2221 multiple hard register group of scalar integer registers, so that
2222 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2223 register. */
2224 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2225 && SCALAR_INT_MODE_P (GET_MODE (x))
2226 && i < FIRST_PSEUDO_REGISTER)
2227 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2228 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2229 && SCALAR_INT_MODE_P (GET_MODE (y))
2230 && j < FIRST_PSEUDO_REGISTER)
2231 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2232
2233 return i == j;
2234 }
2235 /* If two operands must match, because they are really a single
2236 operand of an assembler insn, then two postincrements are invalid
2237 because the assembler insn would increment only once.
2238 On the other hand, a postincrement matches ordinary indexing
2239 if the postincrement is the output operand. */
2240 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2241 return operands_match_p (XEXP (x, 0), y);
2242 /* Two preincrements are invalid
2243 because the assembler insn would increment only once.
2244 On the other hand, a preincrement matches ordinary indexing
2245 if the preincrement is the input operand.
2246 In this case, return 2, since some callers need to do special
2247 things when this happens. */
2248 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2249 || GET_CODE (y) == PRE_MODIFY)
2250 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2251
2252 slow:
2253
2254 /* Now we have disposed of all the cases in which different rtx codes
2255 can match. */
2256 if (code != GET_CODE (y))
2257 return 0;
2258
2259 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2260 if (GET_MODE (x) != GET_MODE (y))
2261 return 0;
2262
2263 /* MEMs refering to different address space are not equivalent. */
2264 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2265 return 0;
2266
2267 switch (code)
2268 {
2269 case CONST_INT:
2270 case CONST_DOUBLE:
2271 case CONST_FIXED:
2272 return 0;
2273
2274 case LABEL_REF:
2275 return XEXP (x, 0) == XEXP (y, 0);
2276 case SYMBOL_REF:
2277 return XSTR (x, 0) == XSTR (y, 0);
2278
2279 default:
2280 break;
2281 }
2282
2283 /* Compare the elements. If any pair of corresponding elements
2284 fail to match, return 0 for the whole things. */
2285
2286 success_2 = 0;
2287 fmt = GET_RTX_FORMAT (code);
2288 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2289 {
2290 int val, j;
2291 switch (fmt[i])
2292 {
2293 case 'w':
2294 if (XWINT (x, i) != XWINT (y, i))
2295 return 0;
2296 break;
2297
2298 case 'i':
2299 if (XINT (x, i) != XINT (y, i))
2300 return 0;
2301 break;
2302
2303 case 'e':
2304 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2305 if (val == 0)
2306 return 0;
2307 /* If any subexpression returns 2,
2308 we should return 2 if we are successful. */
2309 if (val == 2)
2310 success_2 = 1;
2311 break;
2312
2313 case '0':
2314 break;
2315
2316 case 'E':
2317 if (XVECLEN (x, i) != XVECLEN (y, i))
2318 return 0;
2319 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2320 {
2321 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2322 if (val == 0)
2323 return 0;
2324 if (val == 2)
2325 success_2 = 1;
2326 }
2327 break;
2328
2329 /* It is believed that rtx's at this level will never
2330 contain anything but integers and other rtx's,
2331 except for within LABEL_REFs and SYMBOL_REFs. */
2332 default:
2333 gcc_unreachable ();
2334 }
2335 }
2336 return 1 + success_2;
2337 }
2338 \f
2339 /* Describe the range of registers or memory referenced by X.
2340 If X is a register, set REG_FLAG and put the first register
2341 number into START and the last plus one into END.
2342 If X is a memory reference, put a base address into BASE
2343 and a range of integer offsets into START and END.
2344 If X is pushing on the stack, we can assume it causes no trouble,
2345 so we set the SAFE field. */
2346
2347 static struct decomposition
2348 decompose (rtx x)
2349 {
2350 struct decomposition val;
2351 int all_const = 0;
2352
2353 memset (&val, 0, sizeof (val));
2354
2355 switch (GET_CODE (x))
2356 {
2357 case MEM:
2358 {
2359 rtx base = NULL_RTX, offset = 0;
2360 rtx addr = XEXP (x, 0);
2361
2362 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2363 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2364 {
2365 val.base = XEXP (addr, 0);
2366 val.start = -GET_MODE_SIZE (GET_MODE (x));
2367 val.end = GET_MODE_SIZE (GET_MODE (x));
2368 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2369 return val;
2370 }
2371
2372 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2373 {
2374 if (GET_CODE (XEXP (addr, 1)) == PLUS
2375 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2376 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2377 {
2378 val.base = XEXP (addr, 0);
2379 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2380 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2381 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2382 return val;
2383 }
2384 }
2385
2386 if (GET_CODE (addr) == CONST)
2387 {
2388 addr = XEXP (addr, 0);
2389 all_const = 1;
2390 }
2391 if (GET_CODE (addr) == PLUS)
2392 {
2393 if (CONSTANT_P (XEXP (addr, 0)))
2394 {
2395 base = XEXP (addr, 1);
2396 offset = XEXP (addr, 0);
2397 }
2398 else if (CONSTANT_P (XEXP (addr, 1)))
2399 {
2400 base = XEXP (addr, 0);
2401 offset = XEXP (addr, 1);
2402 }
2403 }
2404
2405 if (offset == 0)
2406 {
2407 base = addr;
2408 offset = const0_rtx;
2409 }
2410 if (GET_CODE (offset) == CONST)
2411 offset = XEXP (offset, 0);
2412 if (GET_CODE (offset) == PLUS)
2413 {
2414 if (CONST_INT_P (XEXP (offset, 0)))
2415 {
2416 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2417 offset = XEXP (offset, 0);
2418 }
2419 else if (CONST_INT_P (XEXP (offset, 1)))
2420 {
2421 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2422 offset = XEXP (offset, 1);
2423 }
2424 else
2425 {
2426 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2427 offset = const0_rtx;
2428 }
2429 }
2430 else if (!CONST_INT_P (offset))
2431 {
2432 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2433 offset = const0_rtx;
2434 }
2435
2436 if (all_const && GET_CODE (base) == PLUS)
2437 base = gen_rtx_CONST (GET_MODE (base), base);
2438
2439 gcc_assert (CONST_INT_P (offset));
2440
2441 val.start = INTVAL (offset);
2442 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2443 val.base = base;
2444 }
2445 break;
2446
2447 case REG:
2448 val.reg_flag = 1;
2449 val.start = true_regnum (x);
2450 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2451 {
2452 /* A pseudo with no hard reg. */
2453 val.start = REGNO (x);
2454 val.end = val.start + 1;
2455 }
2456 else
2457 /* A hard reg. */
2458 val.end = end_hard_regno (GET_MODE (x), val.start);
2459 break;
2460
2461 case SUBREG:
2462 if (!REG_P (SUBREG_REG (x)))
2463 /* This could be more precise, but it's good enough. */
2464 return decompose (SUBREG_REG (x));
2465 val.reg_flag = 1;
2466 val.start = true_regnum (x);
2467 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2468 return decompose (SUBREG_REG (x));
2469 else
2470 /* A hard reg. */
2471 val.end = val.start + subreg_nregs (x);
2472 break;
2473
2474 case SCRATCH:
2475 /* This hasn't been assigned yet, so it can't conflict yet. */
2476 val.safe = 1;
2477 break;
2478
2479 default:
2480 gcc_assert (CONSTANT_P (x));
2481 val.safe = 1;
2482 break;
2483 }
2484 return val;
2485 }
2486
2487 /* Return 1 if altering Y will not modify the value of X.
2488 Y is also described by YDATA, which should be decompose (Y). */
2489
2490 static int
2491 immune_p (rtx x, rtx y, struct decomposition ydata)
2492 {
2493 struct decomposition xdata;
2494
2495 if (ydata.reg_flag)
2496 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2497 if (ydata.safe)
2498 return 1;
2499
2500 gcc_assert (MEM_P (y));
2501 /* If Y is memory and X is not, Y can't affect X. */
2502 if (!MEM_P (x))
2503 return 1;
2504
2505 xdata = decompose (x);
2506
2507 if (! rtx_equal_p (xdata.base, ydata.base))
2508 {
2509 /* If bases are distinct symbolic constants, there is no overlap. */
2510 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2511 return 1;
2512 /* Constants and stack slots never overlap. */
2513 if (CONSTANT_P (xdata.base)
2514 && (ydata.base == frame_pointer_rtx
2515 || ydata.base == hard_frame_pointer_rtx
2516 || ydata.base == stack_pointer_rtx))
2517 return 1;
2518 if (CONSTANT_P (ydata.base)
2519 && (xdata.base == frame_pointer_rtx
2520 || xdata.base == hard_frame_pointer_rtx
2521 || xdata.base == stack_pointer_rtx))
2522 return 1;
2523 /* If either base is variable, we don't know anything. */
2524 return 0;
2525 }
2526
2527 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2528 }
2529
2530 /* Similar, but calls decompose. */
2531
2532 int
2533 safe_from_earlyclobber (rtx op, rtx clobber)
2534 {
2535 struct decomposition early_data;
2536
2537 early_data = decompose (clobber);
2538 return immune_p (op, clobber, early_data);
2539 }
2540 \f
2541 /* Main entry point of this file: search the body of INSN
2542 for values that need reloading and record them with push_reload.
2543 REPLACE nonzero means record also where the values occur
2544 so that subst_reloads can be used.
2545
2546 IND_LEVELS says how many levels of indirection are supported by this
2547 machine; a value of zero means that a memory reference is not a valid
2548 memory address.
2549
2550 LIVE_KNOWN says we have valid information about which hard
2551 regs are live at each point in the program; this is true when
2552 we are called from global_alloc but false when stupid register
2553 allocation has been done.
2554
2555 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2556 which is nonnegative if the reg has been commandeered for reloading into.
2557 It is copied into STATIC_RELOAD_REG_P and referenced from there
2558 by various subroutines.
2559
2560 Return TRUE if some operands need to be changed, because of swapping
2561 commutative operands, reg_equiv_address substitution, or whatever. */
2562
2563 int
2564 find_reloads (rtx insn, int replace, int ind_levels, int live_known,
2565 short *reload_reg_p)
2566 {
2567 int insn_code_number;
2568 int i, j;
2569 int noperands;
2570 /* These start out as the constraints for the insn
2571 and they are chewed up as we consider alternatives. */
2572 const char *constraints[MAX_RECOG_OPERANDS];
2573 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2574 a register. */
2575 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2576 char pref_or_nothing[MAX_RECOG_OPERANDS];
2577 /* Nonzero for a MEM operand whose entire address needs a reload.
2578 May be -1 to indicate the entire address may or may not need a reload. */
2579 int address_reloaded[MAX_RECOG_OPERANDS];
2580 /* Nonzero for an address operand that needs to be completely reloaded.
2581 May be -1 to indicate the entire operand may or may not need a reload. */
2582 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2583 /* Value of enum reload_type to use for operand. */
2584 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2585 /* Value of enum reload_type to use within address of operand. */
2586 enum reload_type address_type[MAX_RECOG_OPERANDS];
2587 /* Save the usage of each operand. */
2588 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2589 int no_input_reloads = 0, no_output_reloads = 0;
2590 int n_alternatives;
2591 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2592 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2593 char this_alternative_win[MAX_RECOG_OPERANDS];
2594 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2595 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2596 int this_alternative_matches[MAX_RECOG_OPERANDS];
2597 int swapped;
2598 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2599 int this_alternative_number;
2600 int goal_alternative_number = 0;
2601 int operand_reloadnum[MAX_RECOG_OPERANDS];
2602 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2603 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2604 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2605 char goal_alternative_win[MAX_RECOG_OPERANDS];
2606 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2607 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2608 int goal_alternative_swapped;
2609 int best;
2610 int commutative;
2611 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2612 rtx substed_operand[MAX_RECOG_OPERANDS];
2613 rtx body = PATTERN (insn);
2614 rtx set = single_set (insn);
2615 int goal_earlyclobber = 0, this_earlyclobber;
2616 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2617 int retval = 0;
2618
2619 this_insn = insn;
2620 n_reloads = 0;
2621 n_replacements = 0;
2622 n_earlyclobbers = 0;
2623 replace_reloads = replace;
2624 hard_regs_live_known = live_known;
2625 static_reload_reg_p = reload_reg_p;
2626
2627 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2628 neither are insns that SET cc0. Insns that use CC0 are not allowed
2629 to have any input reloads. */
2630 if (JUMP_P (insn) || CALL_P (insn))
2631 no_output_reloads = 1;
2632
2633 #ifdef HAVE_cc0
2634 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2635 no_input_reloads = 1;
2636 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2637 no_output_reloads = 1;
2638 #endif
2639
2640 #ifdef SECONDARY_MEMORY_NEEDED
2641 /* The eliminated forms of any secondary memory locations are per-insn, so
2642 clear them out here. */
2643
2644 if (secondary_memlocs_elim_used)
2645 {
2646 memset (secondary_memlocs_elim, 0,
2647 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2648 secondary_memlocs_elim_used = 0;
2649 }
2650 #endif
2651
2652 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2653 is cheap to move between them. If it is not, there may not be an insn
2654 to do the copy, so we may need a reload. */
2655 if (GET_CODE (body) == SET
2656 && REG_P (SET_DEST (body))
2657 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2658 && REG_P (SET_SRC (body))
2659 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2660 && register_move_cost (GET_MODE (SET_SRC (body)),
2661 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2662 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2663 return 0;
2664
2665 extract_insn (insn);
2666
2667 noperands = reload_n_operands = recog_data.n_operands;
2668 n_alternatives = recog_data.n_alternatives;
2669
2670 /* Just return "no reloads" if insn has no operands with constraints. */
2671 if (noperands == 0 || n_alternatives == 0)
2672 return 0;
2673
2674 insn_code_number = INSN_CODE (insn);
2675 this_insn_is_asm = insn_code_number < 0;
2676
2677 memcpy (operand_mode, recog_data.operand_mode,
2678 noperands * sizeof (enum machine_mode));
2679 memcpy (constraints, recog_data.constraints,
2680 noperands * sizeof (const char *));
2681
2682 commutative = -1;
2683
2684 /* If we will need to know, later, whether some pair of operands
2685 are the same, we must compare them now and save the result.
2686 Reloading the base and index registers will clobber them
2687 and afterward they will fail to match. */
2688
2689 for (i = 0; i < noperands; i++)
2690 {
2691 const char *p;
2692 int c;
2693 char *end;
2694
2695 substed_operand[i] = recog_data.operand[i];
2696 p = constraints[i];
2697
2698 modified[i] = RELOAD_READ;
2699
2700 /* Scan this operand's constraint to see if it is an output operand,
2701 an in-out operand, is commutative, or should match another. */
2702
2703 while ((c = *p))
2704 {
2705 p += CONSTRAINT_LEN (c, p);
2706 switch (c)
2707 {
2708 case '=':
2709 modified[i] = RELOAD_WRITE;
2710 break;
2711 case '+':
2712 modified[i] = RELOAD_READ_WRITE;
2713 break;
2714 case '%':
2715 {
2716 /* The last operand should not be marked commutative. */
2717 gcc_assert (i != noperands - 1);
2718
2719 /* We currently only support one commutative pair of
2720 operands. Some existing asm code currently uses more
2721 than one pair. Previously, that would usually work,
2722 but sometimes it would crash the compiler. We
2723 continue supporting that case as well as we can by
2724 silently ignoring all but the first pair. In the
2725 future we may handle it correctly. */
2726 if (commutative < 0)
2727 commutative = i;
2728 else
2729 gcc_assert (this_insn_is_asm);
2730 }
2731 break;
2732 /* Use of ISDIGIT is tempting here, but it may get expensive because
2733 of locale support we don't want. */
2734 case '0': case '1': case '2': case '3': case '4':
2735 case '5': case '6': case '7': case '8': case '9':
2736 {
2737 c = strtoul (p - 1, &end, 10);
2738 p = end;
2739
2740 operands_match[c][i]
2741 = operands_match_p (recog_data.operand[c],
2742 recog_data.operand[i]);
2743
2744 /* An operand may not match itself. */
2745 gcc_assert (c != i);
2746
2747 /* If C can be commuted with C+1, and C might need to match I,
2748 then C+1 might also need to match I. */
2749 if (commutative >= 0)
2750 {
2751 if (c == commutative || c == commutative + 1)
2752 {
2753 int other = c + (c == commutative ? 1 : -1);
2754 operands_match[other][i]
2755 = operands_match_p (recog_data.operand[other],
2756 recog_data.operand[i]);
2757 }
2758 if (i == commutative || i == commutative + 1)
2759 {
2760 int other = i + (i == commutative ? 1 : -1);
2761 operands_match[c][other]
2762 = operands_match_p (recog_data.operand[c],
2763 recog_data.operand[other]);
2764 }
2765 /* Note that C is supposed to be less than I.
2766 No need to consider altering both C and I because in
2767 that case we would alter one into the other. */
2768 }
2769 }
2770 }
2771 }
2772 }
2773
2774 /* Examine each operand that is a memory reference or memory address
2775 and reload parts of the addresses into index registers.
2776 Also here any references to pseudo regs that didn't get hard regs
2777 but are equivalent to constants get replaced in the insn itself
2778 with those constants. Nobody will ever see them again.
2779
2780 Finally, set up the preferred classes of each operand. */
2781
2782 for (i = 0; i < noperands; i++)
2783 {
2784 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2785
2786 address_reloaded[i] = 0;
2787 address_operand_reloaded[i] = 0;
2788 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2789 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2790 : RELOAD_OTHER);
2791 address_type[i]
2792 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2793 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2794 : RELOAD_OTHER);
2795
2796 if (*constraints[i] == 0)
2797 /* Ignore things like match_operator operands. */
2798 ;
2799 else if (constraints[i][0] == 'p'
2800 || EXTRA_ADDRESS_CONSTRAINT (constraints[i][0], constraints[i]))
2801 {
2802 address_operand_reloaded[i]
2803 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2804 recog_data.operand[i],
2805 recog_data.operand_loc[i],
2806 i, operand_type[i], ind_levels, insn);
2807
2808 /* If we now have a simple operand where we used to have a
2809 PLUS or MULT, re-recognize and try again. */
2810 if ((OBJECT_P (*recog_data.operand_loc[i])
2811 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2812 && (GET_CODE (recog_data.operand[i]) == MULT
2813 || GET_CODE (recog_data.operand[i]) == PLUS))
2814 {
2815 INSN_CODE (insn) = -1;
2816 retval = find_reloads (insn, replace, ind_levels, live_known,
2817 reload_reg_p);
2818 return retval;
2819 }
2820
2821 recog_data.operand[i] = *recog_data.operand_loc[i];
2822 substed_operand[i] = recog_data.operand[i];
2823
2824 /* Address operands are reloaded in their existing mode,
2825 no matter what is specified in the machine description. */
2826 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2827 }
2828 else if (code == MEM)
2829 {
2830 address_reloaded[i]
2831 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2832 recog_data.operand_loc[i],
2833 XEXP (recog_data.operand[i], 0),
2834 &XEXP (recog_data.operand[i], 0),
2835 i, address_type[i], ind_levels, insn);
2836 recog_data.operand[i] = *recog_data.operand_loc[i];
2837 substed_operand[i] = recog_data.operand[i];
2838 }
2839 else if (code == SUBREG)
2840 {
2841 rtx reg = SUBREG_REG (recog_data.operand[i]);
2842 rtx op
2843 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2844 ind_levels,
2845 set != 0
2846 && &SET_DEST (set) == recog_data.operand_loc[i],
2847 insn,
2848 &address_reloaded[i]);
2849
2850 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2851 that didn't get a hard register, emit a USE with a REG_EQUAL
2852 note in front so that we might inherit a previous, possibly
2853 wider reload. */
2854
2855 if (replace
2856 && MEM_P (op)
2857 && REG_P (reg)
2858 && (GET_MODE_SIZE (GET_MODE (reg))
2859 >= GET_MODE_SIZE (GET_MODE (op)))
2860 && reg_equiv_constant (REGNO (reg)) == 0)
2861 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2862 insn),
2863 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2864
2865 substed_operand[i] = recog_data.operand[i] = op;
2866 }
2867 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2868 /* We can get a PLUS as an "operand" as a result of register
2869 elimination. See eliminate_regs and gen_reload. We handle
2870 a unary operator by reloading the operand. */
2871 substed_operand[i] = recog_data.operand[i]
2872 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2873 ind_levels, 0, insn,
2874 &address_reloaded[i]);
2875 else if (code == REG)
2876 {
2877 /* This is equivalent to calling find_reloads_toplev.
2878 The code is duplicated for speed.
2879 When we find a pseudo always equivalent to a constant,
2880 we replace it by the constant. We must be sure, however,
2881 that we don't try to replace it in the insn in which it
2882 is being set. */
2883 int regno = REGNO (recog_data.operand[i]);
2884 if (reg_equiv_constant (regno) != 0
2885 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2886 {
2887 /* Record the existing mode so that the check if constants are
2888 allowed will work when operand_mode isn't specified. */
2889
2890 if (operand_mode[i] == VOIDmode)
2891 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2892
2893 substed_operand[i] = recog_data.operand[i]
2894 = reg_equiv_constant (regno);
2895 }
2896 if (reg_equiv_memory_loc (regno) != 0
2897 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2898 /* We need not give a valid is_set_dest argument since the case
2899 of a constant equivalence was checked above. */
2900 substed_operand[i] = recog_data.operand[i]
2901 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2902 ind_levels, 0, insn,
2903 &address_reloaded[i]);
2904 }
2905 /* If the operand is still a register (we didn't replace it with an
2906 equivalent), get the preferred class to reload it into. */
2907 code = GET_CODE (recog_data.operand[i]);
2908 preferred_class[i]
2909 = ((code == REG && REGNO (recog_data.operand[i])
2910 >= FIRST_PSEUDO_REGISTER)
2911 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2912 : NO_REGS);
2913 pref_or_nothing[i]
2914 = (code == REG
2915 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2916 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2917 }
2918
2919 /* If this is simply a copy from operand 1 to operand 0, merge the
2920 preferred classes for the operands. */
2921 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2922 && recog_data.operand[1] == SET_SRC (set))
2923 {
2924 preferred_class[0] = preferred_class[1]
2925 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2926 pref_or_nothing[0] |= pref_or_nothing[1];
2927 pref_or_nothing[1] |= pref_or_nothing[0];
2928 }
2929
2930 /* Now see what we need for pseudo-regs that didn't get hard regs
2931 or got the wrong kind of hard reg. For this, we must consider
2932 all the operands together against the register constraints. */
2933
2934 best = MAX_RECOG_OPERANDS * 2 + 600;
2935
2936 swapped = 0;
2937 goal_alternative_swapped = 0;
2938 try_swapped:
2939
2940 /* The constraints are made of several alternatives.
2941 Each operand's constraint looks like foo,bar,... with commas
2942 separating the alternatives. The first alternatives for all
2943 operands go together, the second alternatives go together, etc.
2944
2945 First loop over alternatives. */
2946
2947 for (this_alternative_number = 0;
2948 this_alternative_number < n_alternatives;
2949 this_alternative_number++)
2950 {
2951 /* Loop over operands for one constraint alternative. */
2952 /* LOSERS counts those that don't fit this alternative
2953 and would require loading. */
2954 int losers = 0;
2955 /* BAD is set to 1 if it some operand can't fit this alternative
2956 even after reloading. */
2957 int bad = 0;
2958 /* REJECT is a count of how undesirable this alternative says it is
2959 if any reloading is required. If the alternative matches exactly
2960 then REJECT is ignored, but otherwise it gets this much
2961 counted against it in addition to the reloading needed. Each
2962 ? counts three times here since we want the disparaging caused by
2963 a bad register class to only count 1/3 as much. */
2964 int reject = 0;
2965
2966 if (!recog_data.alternative_enabled_p[this_alternative_number])
2967 {
2968 int i;
2969
2970 for (i = 0; i < recog_data.n_operands; i++)
2971 constraints[i] = skip_alternative (constraints[i]);
2972
2973 continue;
2974 }
2975
2976 this_earlyclobber = 0;
2977
2978 for (i = 0; i < noperands; i++)
2979 {
2980 const char *p = constraints[i];
2981 char *end;
2982 int len;
2983 int win = 0;
2984 int did_match = 0;
2985 /* 0 => this operand can be reloaded somehow for this alternative. */
2986 int badop = 1;
2987 /* 0 => this operand can be reloaded if the alternative allows regs. */
2988 int winreg = 0;
2989 int c;
2990 int m;
2991 rtx operand = recog_data.operand[i];
2992 int offset = 0;
2993 /* Nonzero means this is a MEM that must be reloaded into a reg
2994 regardless of what the constraint says. */
2995 int force_reload = 0;
2996 int offmemok = 0;
2997 /* Nonzero if a constant forced into memory would be OK for this
2998 operand. */
2999 int constmemok = 0;
3000 int earlyclobber = 0;
3001
3002 /* If the predicate accepts a unary operator, it means that
3003 we need to reload the operand, but do not do this for
3004 match_operator and friends. */
3005 if (UNARY_P (operand) && *p != 0)
3006 operand = XEXP (operand, 0);
3007
3008 /* If the operand is a SUBREG, extract
3009 the REG or MEM (or maybe even a constant) within.
3010 (Constants can occur as a result of reg_equiv_constant.) */
3011
3012 while (GET_CODE (operand) == SUBREG)
3013 {
3014 /* Offset only matters when operand is a REG and
3015 it is a hard reg. This is because it is passed
3016 to reg_fits_class_p if it is a REG and all pseudos
3017 return 0 from that function. */
3018 if (REG_P (SUBREG_REG (operand))
3019 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3020 {
3021 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3022 GET_MODE (SUBREG_REG (operand)),
3023 SUBREG_BYTE (operand),
3024 GET_MODE (operand)) < 0)
3025 force_reload = 1;
3026 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3027 GET_MODE (SUBREG_REG (operand)),
3028 SUBREG_BYTE (operand),
3029 GET_MODE (operand));
3030 }
3031 operand = SUBREG_REG (operand);
3032 /* Force reload if this is a constant or PLUS or if there may
3033 be a problem accessing OPERAND in the outer mode. */
3034 if (CONSTANT_P (operand)
3035 || GET_CODE (operand) == PLUS
3036 /* We must force a reload of paradoxical SUBREGs
3037 of a MEM because the alignment of the inner value
3038 may not be enough to do the outer reference. On
3039 big-endian machines, it may also reference outside
3040 the object.
3041
3042 On machines that extend byte operations and we have a
3043 SUBREG where both the inner and outer modes are no wider
3044 than a word and the inner mode is narrower, is integral,
3045 and gets extended when loaded from memory, combine.c has
3046 made assumptions about the behavior of the machine in such
3047 register access. If the data is, in fact, in memory we
3048 must always load using the size assumed to be in the
3049 register and let the insn do the different-sized
3050 accesses.
3051
3052 This is doubly true if WORD_REGISTER_OPERATIONS. In
3053 this case eliminate_regs has left non-paradoxical
3054 subregs for push_reload to see. Make sure it does
3055 by forcing the reload.
3056
3057 ??? When is it right at this stage to have a subreg
3058 of a mem that is _not_ to be handled specially? IMO
3059 those should have been reduced to just a mem. */
3060 || ((MEM_P (operand)
3061 || (REG_P (operand)
3062 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3063 #ifndef WORD_REGISTER_OPERATIONS
3064 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3065 < BIGGEST_ALIGNMENT)
3066 && (GET_MODE_SIZE (operand_mode[i])
3067 > GET_MODE_SIZE (GET_MODE (operand))))
3068 || BYTES_BIG_ENDIAN
3069 #ifdef LOAD_EXTEND_OP
3070 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3071 && (GET_MODE_SIZE (GET_MODE (operand))
3072 <= UNITS_PER_WORD)
3073 && (GET_MODE_SIZE (operand_mode[i])
3074 > GET_MODE_SIZE (GET_MODE (operand)))
3075 && INTEGRAL_MODE_P (GET_MODE (operand))
3076 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3077 #endif
3078 )
3079 #endif
3080 )
3081 )
3082 force_reload = 1;
3083 }
3084
3085 this_alternative[i] = NO_REGS;
3086 this_alternative_win[i] = 0;
3087 this_alternative_match_win[i] = 0;
3088 this_alternative_offmemok[i] = 0;
3089 this_alternative_earlyclobber[i] = 0;
3090 this_alternative_matches[i] = -1;
3091
3092 /* An empty constraint or empty alternative
3093 allows anything which matched the pattern. */
3094 if (*p == 0 || *p == ',')
3095 win = 1, badop = 0;
3096
3097 /* Scan this alternative's specs for this operand;
3098 set WIN if the operand fits any letter in this alternative.
3099 Otherwise, clear BADOP if this operand could
3100 fit some letter after reloads,
3101 or set WINREG if this operand could fit after reloads
3102 provided the constraint allows some registers. */
3103
3104 do
3105 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3106 {
3107 case '\0':
3108 len = 0;
3109 break;
3110 case ',':
3111 c = '\0';
3112 break;
3113
3114 case '=': case '+': case '*':
3115 break;
3116
3117 case '%':
3118 /* We only support one commutative marker, the first
3119 one. We already set commutative above. */
3120 break;
3121
3122 case '?':
3123 reject += 6;
3124 break;
3125
3126 case '!':
3127 reject = 600;
3128 break;
3129
3130 case '#':
3131 /* Ignore rest of this alternative as far as
3132 reloading is concerned. */
3133 do
3134 p++;
3135 while (*p && *p != ',');
3136 len = 0;
3137 break;
3138
3139 case '0': case '1': case '2': case '3': case '4':
3140 case '5': case '6': case '7': case '8': case '9':
3141 m = strtoul (p, &end, 10);
3142 p = end;
3143 len = 0;
3144
3145 this_alternative_matches[i] = m;
3146 /* We are supposed to match a previous operand.
3147 If we do, we win if that one did.
3148 If we do not, count both of the operands as losers.
3149 (This is too conservative, since most of the time
3150 only a single reload insn will be needed to make
3151 the two operands win. As a result, this alternative
3152 may be rejected when it is actually desirable.) */
3153 if ((swapped && (m != commutative || i != commutative + 1))
3154 /* If we are matching as if two operands were swapped,
3155 also pretend that operands_match had been computed
3156 with swapped.
3157 But if I is the second of those and C is the first,
3158 don't exchange them, because operands_match is valid
3159 only on one side of its diagonal. */
3160 ? (operands_match
3161 [(m == commutative || m == commutative + 1)
3162 ? 2 * commutative + 1 - m : m]
3163 [(i == commutative || i == commutative + 1)
3164 ? 2 * commutative + 1 - i : i])
3165 : operands_match[m][i])
3166 {
3167 /* If we are matching a non-offsettable address where an
3168 offsettable address was expected, then we must reject
3169 this combination, because we can't reload it. */
3170 if (this_alternative_offmemok[m]
3171 && MEM_P (recog_data.operand[m])
3172 && this_alternative[m] == NO_REGS
3173 && ! this_alternative_win[m])
3174 bad = 1;
3175
3176 did_match = this_alternative_win[m];
3177 }
3178 else
3179 {
3180 /* Operands don't match. */
3181 rtx value;
3182 int loc1, loc2;
3183 /* Retroactively mark the operand we had to match
3184 as a loser, if it wasn't already. */
3185 if (this_alternative_win[m])
3186 losers++;
3187 this_alternative_win[m] = 0;
3188 if (this_alternative[m] == NO_REGS)
3189 bad = 1;
3190 /* But count the pair only once in the total badness of
3191 this alternative, if the pair can be a dummy reload.
3192 The pointers in operand_loc are not swapped; swap
3193 them by hand if necessary. */
3194 if (swapped && i == commutative)
3195 loc1 = commutative + 1;
3196 else if (swapped && i == commutative + 1)
3197 loc1 = commutative;
3198 else
3199 loc1 = i;
3200 if (swapped && m == commutative)
3201 loc2 = commutative + 1;
3202 else if (swapped && m == commutative + 1)
3203 loc2 = commutative;
3204 else
3205 loc2 = m;
3206 value
3207 = find_dummy_reload (recog_data.operand[i],
3208 recog_data.operand[m],
3209 recog_data.operand_loc[loc1],
3210 recog_data.operand_loc[loc2],
3211 operand_mode[i], operand_mode[m],
3212 this_alternative[m], -1,
3213 this_alternative_earlyclobber[m]);
3214
3215 if (value != 0)
3216 losers--;
3217 }
3218 /* This can be fixed with reloads if the operand
3219 we are supposed to match can be fixed with reloads. */
3220 badop = 0;
3221 this_alternative[i] = this_alternative[m];
3222
3223 /* If we have to reload this operand and some previous
3224 operand also had to match the same thing as this
3225 operand, we don't know how to do that. So reject this
3226 alternative. */
3227 if (! did_match || force_reload)
3228 for (j = 0; j < i; j++)
3229 if (this_alternative_matches[j]
3230 == this_alternative_matches[i])
3231 badop = 1;
3232 break;
3233
3234 case 'p':
3235 /* All necessary reloads for an address_operand
3236 were handled in find_reloads_address. */
3237 this_alternative[i] = base_reg_class (VOIDmode, ADDRESS,
3238 SCRATCH);
3239 win = 1;
3240 badop = 0;
3241 break;
3242
3243 case TARGET_MEM_CONSTRAINT:
3244 if (force_reload)
3245 break;
3246 if (MEM_P (operand)
3247 || (REG_P (operand)
3248 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3249 && reg_renumber[REGNO (operand)] < 0))
3250 win = 1;
3251 if (CONST_POOL_OK_P (operand_mode[i], operand))
3252 badop = 0;
3253 constmemok = 1;
3254 break;
3255
3256 case '<':
3257 if (MEM_P (operand)
3258 && ! address_reloaded[i]
3259 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3260 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3261 win = 1;
3262 break;
3263
3264 case '>':
3265 if (MEM_P (operand)
3266 && ! address_reloaded[i]
3267 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3268 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3269 win = 1;
3270 break;
3271
3272 /* Memory operand whose address is not offsettable. */
3273 case 'V':
3274 if (force_reload)
3275 break;
3276 if (MEM_P (operand)
3277 && ! (ind_levels ? offsettable_memref_p (operand)
3278 : offsettable_nonstrict_memref_p (operand))
3279 /* Certain mem addresses will become offsettable
3280 after they themselves are reloaded. This is important;
3281 we don't want our own handling of unoffsettables
3282 to override the handling of reg_equiv_address. */
3283 && !(REG_P (XEXP (operand, 0))
3284 && (ind_levels == 0
3285 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3286 win = 1;
3287 break;
3288
3289 /* Memory operand whose address is offsettable. */
3290 case 'o':
3291 if (force_reload)
3292 break;
3293 if ((MEM_P (operand)
3294 /* If IND_LEVELS, find_reloads_address won't reload a
3295 pseudo that didn't get a hard reg, so we have to
3296 reject that case. */
3297 && ((ind_levels ? offsettable_memref_p (operand)
3298 : offsettable_nonstrict_memref_p (operand))
3299 /* A reloaded address is offsettable because it is now
3300 just a simple register indirect. */
3301 || address_reloaded[i] == 1))
3302 || (REG_P (operand)
3303 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3304 && reg_renumber[REGNO (operand)] < 0
3305 /* If reg_equiv_address is nonzero, we will be
3306 loading it into a register; hence it will be
3307 offsettable, but we cannot say that reg_equiv_mem
3308 is offsettable without checking. */
3309 && ((reg_equiv_mem (REGNO (operand)) != 0
3310 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3311 || (reg_equiv_address (REGNO (operand)) != 0))))
3312 win = 1;
3313 if (CONST_POOL_OK_P (operand_mode[i], operand)
3314 || MEM_P (operand))
3315 badop = 0;
3316 constmemok = 1;
3317 offmemok = 1;
3318 break;
3319
3320 case '&':
3321 /* Output operand that is stored before the need for the
3322 input operands (and their index registers) is over. */
3323 earlyclobber = 1, this_earlyclobber = 1;
3324 break;
3325
3326 case 'E':
3327 case 'F':
3328 if (GET_CODE (operand) == CONST_DOUBLE
3329 || (GET_CODE (operand) == CONST_VECTOR
3330 && (GET_MODE_CLASS (GET_MODE (operand))
3331 == MODE_VECTOR_FLOAT)))
3332 win = 1;
3333 break;
3334
3335 case 'G':
3336 case 'H':
3337 if (GET_CODE (operand) == CONST_DOUBLE
3338 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (operand, c, p))
3339 win = 1;
3340 break;
3341
3342 case 's':
3343 if (CONST_INT_P (operand)
3344 || (GET_CODE (operand) == CONST_DOUBLE
3345 && GET_MODE (operand) == VOIDmode))
3346 break;
3347 case 'i':
3348 if (CONSTANT_P (operand)
3349 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (operand)))
3350 win = 1;
3351 break;
3352
3353 case 'n':
3354 if (CONST_INT_P (operand)
3355 || (GET_CODE (operand) == CONST_DOUBLE
3356 && GET_MODE (operand) == VOIDmode))
3357 win = 1;
3358 break;
3359
3360 case 'I':
3361 case 'J':
3362 case 'K':
3363 case 'L':
3364 case 'M':
3365 case 'N':
3366 case 'O':
3367 case 'P':
3368 if (CONST_INT_P (operand)
3369 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (operand), c, p))
3370 win = 1;
3371 break;
3372
3373 case 'X':
3374 force_reload = 0;
3375 win = 1;
3376 break;
3377
3378 case 'g':
3379 if (! force_reload
3380 /* A PLUS is never a valid operand, but reload can make
3381 it from a register when eliminating registers. */
3382 && GET_CODE (operand) != PLUS
3383 /* A SCRATCH is not a valid operand. */
3384 && GET_CODE (operand) != SCRATCH
3385 && (! CONSTANT_P (operand)
3386 || ! flag_pic
3387 || LEGITIMATE_PIC_OPERAND_P (operand))
3388 && (GENERAL_REGS == ALL_REGS
3389 || !REG_P (operand)
3390 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3391 && reg_renumber[REGNO (operand)] < 0)))
3392 win = 1;
3393 /* Drop through into 'r' case. */
3394
3395 case 'r':
3396 this_alternative[i]
3397 = reg_class_subunion[this_alternative[i]][(int) GENERAL_REGS];
3398 goto reg;
3399
3400 default:
3401 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS)
3402 {
3403 #ifdef EXTRA_CONSTRAINT_STR
3404 if (EXTRA_MEMORY_CONSTRAINT (c, p))
3405 {
3406 if (force_reload)
3407 break;
3408 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3409 win = 1;
3410 /* If the address was already reloaded,
3411 we win as well. */
3412 else if (MEM_P (operand)
3413 && address_reloaded[i] == 1)
3414 win = 1;
3415 /* Likewise if the address will be reloaded because
3416 reg_equiv_address is nonzero. For reg_equiv_mem
3417 we have to check. */
3418 else if (REG_P (operand)
3419 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3420 && reg_renumber[REGNO (operand)] < 0
3421 && ((reg_equiv_mem (REGNO (operand)) != 0
3422 && EXTRA_CONSTRAINT_STR (reg_equiv_mem (REGNO (operand)), c, p))
3423 || (reg_equiv_address (REGNO (operand)) != 0)))
3424 win = 1;
3425
3426 /* If we didn't already win, we can reload
3427 constants via force_const_mem, and other
3428 MEMs by reloading the address like for 'o'. */
3429 if (CONST_POOL_OK_P (operand_mode[i], operand)
3430 || MEM_P (operand))
3431 badop = 0;
3432 constmemok = 1;
3433 offmemok = 1;
3434 break;
3435 }
3436 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
3437 {
3438 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3439 win = 1;
3440
3441 /* If we didn't already win, we can reload
3442 the address into a base register. */
3443 this_alternative[i] = base_reg_class (VOIDmode,
3444 ADDRESS,
3445 SCRATCH);
3446 badop = 0;
3447 break;
3448 }
3449
3450 if (EXTRA_CONSTRAINT_STR (operand, c, p))
3451 win = 1;
3452 #endif
3453 break;
3454 }
3455
3456 this_alternative[i]
3457 = (reg_class_subunion
3458 [this_alternative[i]]
3459 [(int) REG_CLASS_FROM_CONSTRAINT (c, p)]);
3460 reg:
3461 if (GET_MODE (operand) == BLKmode)
3462 break;
3463 winreg = 1;
3464 if (REG_P (operand)
3465 && reg_fits_class_p (operand, this_alternative[i],
3466 offset, GET_MODE (recog_data.operand[i])))
3467 win = 1;
3468 break;
3469 }
3470 while ((p += len), c);
3471
3472 constraints[i] = p;
3473
3474 /* If this operand could be handled with a reg,
3475 and some reg is allowed, then this operand can be handled. */
3476 if (winreg && this_alternative[i] != NO_REGS
3477 && (win || !class_only_fixed_regs[this_alternative[i]]))
3478 badop = 0;
3479
3480 /* Record which operands fit this alternative. */
3481 this_alternative_earlyclobber[i] = earlyclobber;
3482 if (win && ! force_reload)
3483 this_alternative_win[i] = 1;
3484 else if (did_match && ! force_reload)
3485 this_alternative_match_win[i] = 1;
3486 else
3487 {
3488 int const_to_mem = 0;
3489
3490 this_alternative_offmemok[i] = offmemok;
3491 losers++;
3492 if (badop)
3493 bad = 1;
3494 /* Alternative loses if it has no regs for a reg operand. */
3495 if (REG_P (operand)
3496 && this_alternative[i] == NO_REGS
3497 && this_alternative_matches[i] < 0)
3498 bad = 1;
3499
3500 /* If this is a constant that is reloaded into the desired
3501 class by copying it to memory first, count that as another
3502 reload. This is consistent with other code and is
3503 required to avoid choosing another alternative when
3504 the constant is moved into memory by this function on
3505 an early reload pass. Note that the test here is
3506 precisely the same as in the code below that calls
3507 force_const_mem. */
3508 if (CONST_POOL_OK_P (operand_mode[i], operand)
3509 && ((targetm.preferred_reload_class (operand,
3510 this_alternative[i])
3511 == NO_REGS)
3512 || no_input_reloads))
3513 {
3514 const_to_mem = 1;
3515 if (this_alternative[i] != NO_REGS)
3516 losers++;
3517 }
3518
3519 /* Alternative loses if it requires a type of reload not
3520 permitted for this insn. We can always reload SCRATCH
3521 and objects with a REG_UNUSED note. */
3522 if (GET_CODE (operand) != SCRATCH
3523 && modified[i] != RELOAD_READ && no_output_reloads
3524 && ! find_reg_note (insn, REG_UNUSED, operand))
3525 bad = 1;
3526 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3527 && ! const_to_mem)
3528 bad = 1;
3529
3530 /* If we can't reload this value at all, reject this
3531 alternative. Note that we could also lose due to
3532 LIMIT_RELOAD_CLASS, but we don't check that
3533 here. */
3534
3535 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3536 {
3537 if (targetm.preferred_reload_class (operand, this_alternative[i])
3538 == NO_REGS)
3539 reject = 600;
3540
3541 if (operand_type[i] == RELOAD_FOR_OUTPUT
3542 && (targetm.preferred_output_reload_class (operand,
3543 this_alternative[i])
3544 == NO_REGS))
3545 reject = 600;
3546 }
3547
3548 /* We prefer to reload pseudos over reloading other things,
3549 since such reloads may be able to be eliminated later.
3550 If we are reloading a SCRATCH, we won't be generating any
3551 insns, just using a register, so it is also preferred.
3552 So bump REJECT in other cases. Don't do this in the
3553 case where we are forcing a constant into memory and
3554 it will then win since we don't want to have a different
3555 alternative match then. */
3556 if (! (REG_P (operand)
3557 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3558 && GET_CODE (operand) != SCRATCH
3559 && ! (const_to_mem && constmemok))
3560 reject += 2;
3561
3562 /* Input reloads can be inherited more often than output
3563 reloads can be removed, so penalize output reloads. */
3564 if (operand_type[i] != RELOAD_FOR_INPUT
3565 && GET_CODE (operand) != SCRATCH)
3566 reject++;
3567 }
3568
3569 /* If this operand is a pseudo register that didn't get a hard
3570 reg and this alternative accepts some register, see if the
3571 class that we want is a subset of the preferred class for this
3572 register. If not, but it intersects that class, use the
3573 preferred class instead. If it does not intersect the preferred
3574 class, show that usage of this alternative should be discouraged;
3575 it will be discouraged more still if the register is `preferred
3576 or nothing'. We do this because it increases the chance of
3577 reusing our spill register in a later insn and avoiding a pair
3578 of memory stores and loads.
3579
3580 Don't bother with this if this alternative will accept this
3581 operand.
3582
3583 Don't do this for a multiword operand, since it is only a
3584 small win and has the risk of requiring more spill registers,
3585 which could cause a large loss.
3586
3587 Don't do this if the preferred class has only one register
3588 because we might otherwise exhaust the class. */
3589
3590 if (! win && ! did_match
3591 && this_alternative[i] != NO_REGS
3592 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3593 && reg_class_size [(int) preferred_class[i]] > 0
3594 && ! small_register_class_p (preferred_class[i]))
3595 {
3596 if (! reg_class_subset_p (this_alternative[i],
3597 preferred_class[i]))
3598 {
3599 /* Since we don't have a way of forming the intersection,
3600 we just do something special if the preferred class
3601 is a subset of the class we have; that's the most
3602 common case anyway. */
3603 if (reg_class_subset_p (preferred_class[i],
3604 this_alternative[i]))
3605 this_alternative[i] = preferred_class[i];
3606 else
3607 reject += (2 + 2 * pref_or_nothing[i]);
3608 }
3609 }
3610 }
3611
3612 /* Now see if any output operands that are marked "earlyclobber"
3613 in this alternative conflict with any input operands
3614 or any memory addresses. */
3615
3616 for (i = 0; i < noperands; i++)
3617 if (this_alternative_earlyclobber[i]
3618 && (this_alternative_win[i] || this_alternative_match_win[i]))
3619 {
3620 struct decomposition early_data;
3621
3622 early_data = decompose (recog_data.operand[i]);
3623
3624 gcc_assert (modified[i] != RELOAD_READ);
3625
3626 if (this_alternative[i] == NO_REGS)
3627 {
3628 this_alternative_earlyclobber[i] = 0;
3629 gcc_assert (this_insn_is_asm);
3630 error_for_asm (this_insn,
3631 "%<&%> constraint used with no register class");
3632 }
3633
3634 for (j = 0; j < noperands; j++)
3635 /* Is this an input operand or a memory ref? */
3636 if ((MEM_P (recog_data.operand[j])
3637 || modified[j] != RELOAD_WRITE)
3638 && j != i
3639 /* Ignore things like match_operator operands. */
3640 && !recog_data.is_operator[j]
3641 /* Don't count an input operand that is constrained to match
3642 the early clobber operand. */
3643 && ! (this_alternative_matches[j] == i
3644 && rtx_equal_p (recog_data.operand[i],
3645 recog_data.operand[j]))
3646 /* Is it altered by storing the earlyclobber operand? */
3647 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3648 early_data))
3649 {
3650 /* If the output is in a non-empty few-regs class,
3651 it's costly to reload it, so reload the input instead. */
3652 if (small_register_class_p (this_alternative[i])
3653 && (REG_P (recog_data.operand[j])
3654 || GET_CODE (recog_data.operand[j]) == SUBREG))
3655 {
3656 losers++;
3657 this_alternative_win[j] = 0;
3658 this_alternative_match_win[j] = 0;
3659 }
3660 else
3661 break;
3662 }
3663 /* If an earlyclobber operand conflicts with something,
3664 it must be reloaded, so request this and count the cost. */
3665 if (j != noperands)
3666 {
3667 losers++;
3668 this_alternative_win[i] = 0;
3669 this_alternative_match_win[j] = 0;
3670 for (j = 0; j < noperands; j++)
3671 if (this_alternative_matches[j] == i
3672 && this_alternative_match_win[j])
3673 {
3674 this_alternative_win[j] = 0;
3675 this_alternative_match_win[j] = 0;
3676 losers++;
3677 }
3678 }
3679 }
3680
3681 /* If one alternative accepts all the operands, no reload required,
3682 choose that alternative; don't consider the remaining ones. */
3683 if (losers == 0)
3684 {
3685 /* Unswap these so that they are never swapped at `finish'. */
3686 if (commutative >= 0)
3687 {
3688 recog_data.operand[commutative] = substed_operand[commutative];
3689 recog_data.operand[commutative + 1]
3690 = substed_operand[commutative + 1];
3691 }
3692 for (i = 0; i < noperands; i++)
3693 {
3694 goal_alternative_win[i] = this_alternative_win[i];
3695 goal_alternative_match_win[i] = this_alternative_match_win[i];
3696 goal_alternative[i] = this_alternative[i];
3697 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3698 goal_alternative_matches[i] = this_alternative_matches[i];
3699 goal_alternative_earlyclobber[i]
3700 = this_alternative_earlyclobber[i];
3701 }
3702 goal_alternative_number = this_alternative_number;
3703 goal_alternative_swapped = swapped;
3704 goal_earlyclobber = this_earlyclobber;
3705 goto finish;
3706 }
3707
3708 /* REJECT, set by the ! and ? constraint characters and when a register
3709 would be reloaded into a non-preferred class, discourages the use of
3710 this alternative for a reload goal. REJECT is incremented by six
3711 for each ? and two for each non-preferred class. */
3712 losers = losers * 6 + reject;
3713
3714 /* If this alternative can be made to work by reloading,
3715 and it needs less reloading than the others checked so far,
3716 record it as the chosen goal for reloading. */
3717 if (! bad)
3718 {
3719 if (best > losers)
3720 {
3721 for (i = 0; i < noperands; i++)
3722 {
3723 goal_alternative[i] = this_alternative[i];
3724 goal_alternative_win[i] = this_alternative_win[i];
3725 goal_alternative_match_win[i]
3726 = this_alternative_match_win[i];
3727 goal_alternative_offmemok[i]
3728 = this_alternative_offmemok[i];
3729 goal_alternative_matches[i] = this_alternative_matches[i];
3730 goal_alternative_earlyclobber[i]
3731 = this_alternative_earlyclobber[i];
3732 }
3733 goal_alternative_swapped = swapped;
3734 best = losers;
3735 goal_alternative_number = this_alternative_number;
3736 goal_earlyclobber = this_earlyclobber;
3737 }
3738 }
3739 }
3740
3741 /* If insn is commutative (it's safe to exchange a certain pair of operands)
3742 then we need to try each alternative twice,
3743 the second time matching those two operands
3744 as if we had exchanged them.
3745 To do this, really exchange them in operands.
3746
3747 If we have just tried the alternatives the second time,
3748 return operands to normal and drop through. */
3749
3750 if (commutative >= 0)
3751 {
3752 swapped = !swapped;
3753 if (swapped)
3754 {
3755 enum reg_class tclass;
3756 int t;
3757
3758 recog_data.operand[commutative] = substed_operand[commutative + 1];
3759 recog_data.operand[commutative + 1] = substed_operand[commutative];
3760 /* Swap the duplicates too. */
3761 for (i = 0; i < recog_data.n_dups; i++)
3762 if (recog_data.dup_num[i] == commutative
3763 || recog_data.dup_num[i] == commutative + 1)
3764 *recog_data.dup_loc[i]
3765 = recog_data.operand[(int) recog_data.dup_num[i]];
3766
3767 tclass = preferred_class[commutative];
3768 preferred_class[commutative] = preferred_class[commutative + 1];
3769 preferred_class[commutative + 1] = tclass;
3770
3771 t = pref_or_nothing[commutative];
3772 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3773 pref_or_nothing[commutative + 1] = t;
3774
3775 t = address_reloaded[commutative];
3776 address_reloaded[commutative] = address_reloaded[commutative + 1];
3777 address_reloaded[commutative + 1] = t;
3778
3779 memcpy (constraints, recog_data.constraints,
3780 noperands * sizeof (const char *));
3781 goto try_swapped;
3782 }
3783 else
3784 {
3785 recog_data.operand[commutative] = substed_operand[commutative];
3786 recog_data.operand[commutative + 1]
3787 = substed_operand[commutative + 1];
3788 /* Unswap the duplicates too. */
3789 for (i = 0; i < recog_data.n_dups; i++)
3790 if (recog_data.dup_num[i] == commutative
3791 || recog_data.dup_num[i] == commutative + 1)
3792 *recog_data.dup_loc[i]
3793 = recog_data.operand[(int) recog_data.dup_num[i]];
3794 }
3795 }
3796
3797 /* The operands don't meet the constraints.
3798 goal_alternative describes the alternative
3799 that we could reach by reloading the fewest operands.
3800 Reload so as to fit it. */
3801
3802 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3803 {
3804 /* No alternative works with reloads?? */
3805 if (insn_code_number >= 0)
3806 fatal_insn ("unable to generate reloads for:", insn);
3807 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3808 /* Avoid further trouble with this insn. */
3809 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3810 n_reloads = 0;
3811 return 0;
3812 }
3813
3814 /* Jump to `finish' from above if all operands are valid already.
3815 In that case, goal_alternative_win is all 1. */
3816 finish:
3817
3818 /* Right now, for any pair of operands I and J that are required to match,
3819 with I < J,
3820 goal_alternative_matches[J] is I.
3821 Set up goal_alternative_matched as the inverse function:
3822 goal_alternative_matched[I] = J. */
3823
3824 for (i = 0; i < noperands; i++)
3825 goal_alternative_matched[i] = -1;
3826
3827 for (i = 0; i < noperands; i++)
3828 if (! goal_alternative_win[i]
3829 && goal_alternative_matches[i] >= 0)
3830 goal_alternative_matched[goal_alternative_matches[i]] = i;
3831
3832 for (i = 0; i < noperands; i++)
3833 goal_alternative_win[i] |= goal_alternative_match_win[i];
3834
3835 /* If the best alternative is with operands 1 and 2 swapped,
3836 consider them swapped before reporting the reloads. Update the
3837 operand numbers of any reloads already pushed. */
3838
3839 if (goal_alternative_swapped)
3840 {
3841 rtx tem;
3842
3843 tem = substed_operand[commutative];
3844 substed_operand[commutative] = substed_operand[commutative + 1];
3845 substed_operand[commutative + 1] = tem;
3846 tem = recog_data.operand[commutative];
3847 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3848 recog_data.operand[commutative + 1] = tem;
3849 tem = *recog_data.operand_loc[commutative];
3850 *recog_data.operand_loc[commutative]
3851 = *recog_data.operand_loc[commutative + 1];
3852 *recog_data.operand_loc[commutative + 1] = tem;
3853
3854 for (i = 0; i < n_reloads; i++)
3855 {
3856 if (rld[i].opnum == commutative)
3857 rld[i].opnum = commutative + 1;
3858 else if (rld[i].opnum == commutative + 1)
3859 rld[i].opnum = commutative;
3860 }
3861 }
3862
3863 for (i = 0; i < noperands; i++)
3864 {
3865 operand_reloadnum[i] = -1;
3866
3867 /* If this is an earlyclobber operand, we need to widen the scope.
3868 The reload must remain valid from the start of the insn being
3869 reloaded until after the operand is stored into its destination.
3870 We approximate this with RELOAD_OTHER even though we know that we
3871 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3872
3873 One special case that is worth checking is when we have an
3874 output that is earlyclobber but isn't used past the insn (typically
3875 a SCRATCH). In this case, we only need have the reload live
3876 through the insn itself, but not for any of our input or output
3877 reloads.
3878 But we must not accidentally narrow the scope of an existing
3879 RELOAD_OTHER reload - leave these alone.
3880
3881 In any case, anything needed to address this operand can remain
3882 however they were previously categorized. */
3883
3884 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3885 operand_type[i]
3886 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3887 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3888 }
3889
3890 /* Any constants that aren't allowed and can't be reloaded
3891 into registers are here changed into memory references. */
3892 for (i = 0; i < noperands; i++)
3893 if (! goal_alternative_win[i])
3894 {
3895 rtx op = recog_data.operand[i];
3896 rtx subreg = NULL_RTX;
3897 rtx plus = NULL_RTX;
3898 enum machine_mode mode = operand_mode[i];
3899
3900 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3901 push_reload so we have to let them pass here. */
3902 if (GET_CODE (op) == SUBREG)
3903 {
3904 subreg = op;
3905 op = SUBREG_REG (op);
3906 mode = GET_MODE (op);
3907 }
3908
3909 if (GET_CODE (op) == PLUS)
3910 {
3911 plus = op;
3912 op = XEXP (op, 1);
3913 }
3914
3915 if (CONST_POOL_OK_P (mode, op)
3916 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3917 == NO_REGS)
3918 || no_input_reloads))
3919 {
3920 int this_address_reloaded;
3921 rtx tem = force_const_mem (mode, op);
3922
3923 /* If we stripped a SUBREG or a PLUS above add it back. */
3924 if (plus != NULL_RTX)
3925 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3926
3927 if (subreg != NULL_RTX)
3928 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3929
3930 this_address_reloaded = 0;
3931 substed_operand[i] = recog_data.operand[i]
3932 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3933 0, insn, &this_address_reloaded);
3934
3935 /* If the alternative accepts constant pool refs directly
3936 there will be no reload needed at all. */
3937 if (plus == NULL_RTX
3938 && subreg == NULL_RTX
3939 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3940 ? substed_operand[i]
3941 : NULL,
3942 recog_data.constraints[i],
3943 goal_alternative_number))
3944 goal_alternative_win[i] = 1;
3945 }
3946 }
3947
3948 /* Record the values of the earlyclobber operands for the caller. */
3949 if (goal_earlyclobber)
3950 for (i = 0; i < noperands; i++)
3951 if (goal_alternative_earlyclobber[i])
3952 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3953
3954 /* Now record reloads for all the operands that need them. */
3955 for (i = 0; i < noperands; i++)
3956 if (! goal_alternative_win[i])
3957 {
3958 /* Operands that match previous ones have already been handled. */
3959 if (goal_alternative_matches[i] >= 0)
3960 ;
3961 /* Handle an operand with a nonoffsettable address
3962 appearing where an offsettable address will do
3963 by reloading the address into a base register.
3964
3965 ??? We can also do this when the operand is a register and
3966 reg_equiv_mem is not offsettable, but this is a bit tricky,
3967 so we don't bother with it. It may not be worth doing. */
3968 else if (goal_alternative_matched[i] == -1
3969 && goal_alternative_offmemok[i]
3970 && MEM_P (recog_data.operand[i]))
3971 {
3972 /* If the address to be reloaded is a VOIDmode constant,
3973 use the default address mode as mode of the reload register,
3974 as would have been done by find_reloads_address. */
3975 enum machine_mode address_mode;
3976 address_mode = GET_MODE (XEXP (recog_data.operand[i], 0));
3977 if (address_mode == VOIDmode)
3978 {
3979 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3980 address_mode = targetm.addr_space.address_mode (as);
3981 }
3982
3983 operand_reloadnum[i]
3984 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3985 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3986 base_reg_class (VOIDmode, MEM, SCRATCH),
3987 address_mode,
3988 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3989 rld[operand_reloadnum[i]].inc
3990 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3991
3992 /* If this operand is an output, we will have made any
3993 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3994 now we are treating part of the operand as an input, so
3995 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3996
3997 if (modified[i] == RELOAD_WRITE)
3998 {
3999 for (j = 0; j < n_reloads; j++)
4000 {
4001 if (rld[j].opnum == i)
4002 {
4003 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4004 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4005 else if (rld[j].when_needed
4006 == RELOAD_FOR_OUTADDR_ADDRESS)
4007 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4008 }
4009 }
4010 }
4011 }
4012 else if (goal_alternative_matched[i] == -1)
4013 {
4014 operand_reloadnum[i]
4015 = push_reload ((modified[i] != RELOAD_WRITE
4016 ? recog_data.operand[i] : 0),
4017 (modified[i] != RELOAD_READ
4018 ? recog_data.operand[i] : 0),
4019 (modified[i] != RELOAD_WRITE
4020 ? recog_data.operand_loc[i] : 0),
4021 (modified[i] != RELOAD_READ
4022 ? recog_data.operand_loc[i] : 0),
4023 (enum reg_class) goal_alternative[i],
4024 (modified[i] == RELOAD_WRITE
4025 ? VOIDmode : operand_mode[i]),
4026 (modified[i] == RELOAD_READ
4027 ? VOIDmode : operand_mode[i]),
4028 (insn_code_number < 0 ? 0
4029 : insn_data[insn_code_number].operand[i].strict_low),
4030 0, i, operand_type[i]);
4031 }
4032 /* In a matching pair of operands, one must be input only
4033 and the other must be output only.
4034 Pass the input operand as IN and the other as OUT. */
4035 else if (modified[i] == RELOAD_READ
4036 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4037 {
4038 operand_reloadnum[i]
4039 = push_reload (recog_data.operand[i],
4040 recog_data.operand[goal_alternative_matched[i]],
4041 recog_data.operand_loc[i],
4042 recog_data.operand_loc[goal_alternative_matched[i]],
4043 (enum reg_class) goal_alternative[i],
4044 operand_mode[i],
4045 operand_mode[goal_alternative_matched[i]],
4046 0, 0, i, RELOAD_OTHER);
4047 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4048 }
4049 else if (modified[i] == RELOAD_WRITE
4050 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4051 {
4052 operand_reloadnum[goal_alternative_matched[i]]
4053 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4054 recog_data.operand[i],
4055 recog_data.operand_loc[goal_alternative_matched[i]],
4056 recog_data.operand_loc[i],
4057 (enum reg_class) goal_alternative[i],
4058 operand_mode[goal_alternative_matched[i]],
4059 operand_mode[i],
4060 0, 0, i, RELOAD_OTHER);
4061 operand_reloadnum[i] = output_reloadnum;
4062 }
4063 else
4064 {
4065 gcc_assert (insn_code_number < 0);
4066 error_for_asm (insn, "inconsistent operand constraints "
4067 "in an %<asm%>");
4068 /* Avoid further trouble with this insn. */
4069 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4070 n_reloads = 0;
4071 return 0;
4072 }
4073 }
4074 else if (goal_alternative_matched[i] < 0
4075 && goal_alternative_matches[i] < 0
4076 && address_operand_reloaded[i] != 1
4077 && optimize)
4078 {
4079 /* For each non-matching operand that's a MEM or a pseudo-register
4080 that didn't get a hard register, make an optional reload.
4081 This may get done even if the insn needs no reloads otherwise. */
4082
4083 rtx operand = recog_data.operand[i];
4084
4085 while (GET_CODE (operand) == SUBREG)
4086 operand = SUBREG_REG (operand);
4087 if ((MEM_P (operand)
4088 || (REG_P (operand)
4089 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4090 /* If this is only for an output, the optional reload would not
4091 actually cause us to use a register now, just note that
4092 something is stored here. */
4093 && (goal_alternative[i] != NO_REGS
4094 || modified[i] == RELOAD_WRITE)
4095 && ! no_input_reloads
4096 /* An optional output reload might allow to delete INSN later.
4097 We mustn't make in-out reloads on insns that are not permitted
4098 output reloads.
4099 If this is an asm, we can't delete it; we must not even call
4100 push_reload for an optional output reload in this case,
4101 because we can't be sure that the constraint allows a register,
4102 and push_reload verifies the constraints for asms. */
4103 && (modified[i] == RELOAD_READ
4104 || (! no_output_reloads && ! this_insn_is_asm)))
4105 operand_reloadnum[i]
4106 = push_reload ((modified[i] != RELOAD_WRITE
4107 ? recog_data.operand[i] : 0),
4108 (modified[i] != RELOAD_READ
4109 ? recog_data.operand[i] : 0),
4110 (modified[i] != RELOAD_WRITE
4111 ? recog_data.operand_loc[i] : 0),
4112 (modified[i] != RELOAD_READ
4113 ? recog_data.operand_loc[i] : 0),
4114 (enum reg_class) goal_alternative[i],
4115 (modified[i] == RELOAD_WRITE
4116 ? VOIDmode : operand_mode[i]),
4117 (modified[i] == RELOAD_READ
4118 ? VOIDmode : operand_mode[i]),
4119 (insn_code_number < 0 ? 0
4120 : insn_data[insn_code_number].operand[i].strict_low),
4121 1, i, operand_type[i]);
4122 /* If a memory reference remains (either as a MEM or a pseudo that
4123 did not get a hard register), yet we can't make an optional
4124 reload, check if this is actually a pseudo register reference;
4125 we then need to emit a USE and/or a CLOBBER so that reload
4126 inheritance will do the right thing. */
4127 else if (replace
4128 && (MEM_P (operand)
4129 || (REG_P (operand)
4130 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4131 && reg_renumber [REGNO (operand)] < 0)))
4132 {
4133 operand = *recog_data.operand_loc[i];
4134
4135 while (GET_CODE (operand) == SUBREG)
4136 operand = SUBREG_REG (operand);
4137 if (REG_P (operand))
4138 {
4139 if (modified[i] != RELOAD_WRITE)
4140 /* We mark the USE with QImode so that we recognize
4141 it as one that can be safely deleted at the end
4142 of reload. */
4143 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4144 insn), QImode);
4145 if (modified[i] != RELOAD_READ)
4146 emit_insn_after (gen_clobber (operand), insn);
4147 }
4148 }
4149 }
4150 else if (goal_alternative_matches[i] >= 0
4151 && goal_alternative_win[goal_alternative_matches[i]]
4152 && modified[i] == RELOAD_READ
4153 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4154 && ! no_input_reloads && ! no_output_reloads
4155 && optimize)
4156 {
4157 /* Similarly, make an optional reload for a pair of matching
4158 objects that are in MEM or a pseudo that didn't get a hard reg. */
4159
4160 rtx operand = recog_data.operand[i];
4161
4162 while (GET_CODE (operand) == SUBREG)
4163 operand = SUBREG_REG (operand);
4164 if ((MEM_P (operand)
4165 || (REG_P (operand)
4166 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4167 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4168 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4169 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4170 recog_data.operand[i],
4171 recog_data.operand_loc[goal_alternative_matches[i]],
4172 recog_data.operand_loc[i],
4173 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4174 operand_mode[goal_alternative_matches[i]],
4175 operand_mode[i],
4176 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4177 }
4178
4179 /* Perform whatever substitutions on the operands we are supposed
4180 to make due to commutativity or replacement of registers
4181 with equivalent constants or memory slots. */
4182
4183 for (i = 0; i < noperands; i++)
4184 {
4185 /* We only do this on the last pass through reload, because it is
4186 possible for some data (like reg_equiv_address) to be changed during
4187 later passes. Moreover, we lose the opportunity to get a useful
4188 reload_{in,out}_reg when we do these replacements. */
4189
4190 if (replace)
4191 {
4192 rtx substitution = substed_operand[i];
4193
4194 *recog_data.operand_loc[i] = substitution;
4195
4196 /* If we're replacing an operand with a LABEL_REF, we need to
4197 make sure that there's a REG_LABEL_OPERAND note attached to
4198 this instruction. */
4199 if (GET_CODE (substitution) == LABEL_REF
4200 && !find_reg_note (insn, REG_LABEL_OPERAND,
4201 XEXP (substitution, 0))
4202 /* For a JUMP_P, if it was a branch target it must have
4203 already been recorded as such. */
4204 && (!JUMP_P (insn)
4205 || !label_is_jump_target_p (XEXP (substitution, 0),
4206 insn)))
4207 add_reg_note (insn, REG_LABEL_OPERAND, XEXP (substitution, 0));
4208 }
4209 else
4210 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4211 }
4212
4213 /* If this insn pattern contains any MATCH_DUP's, make sure that
4214 they will be substituted if the operands they match are substituted.
4215 Also do now any substitutions we already did on the operands.
4216
4217 Don't do this if we aren't making replacements because we might be
4218 propagating things allocated by frame pointer elimination into places
4219 it doesn't expect. */
4220
4221 if (insn_code_number >= 0 && replace)
4222 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4223 {
4224 int opno = recog_data.dup_num[i];
4225 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4226 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4227 }
4228
4229 #if 0
4230 /* This loses because reloading of prior insns can invalidate the equivalence
4231 (or at least find_equiv_reg isn't smart enough to find it any more),
4232 causing this insn to need more reload regs than it needed before.
4233 It may be too late to make the reload regs available.
4234 Now this optimization is done safely in choose_reload_regs. */
4235
4236 /* For each reload of a reg into some other class of reg,
4237 search for an existing equivalent reg (same value now) in the right class.
4238 We can use it as long as we don't need to change its contents. */
4239 for (i = 0; i < n_reloads; i++)
4240 if (rld[i].reg_rtx == 0
4241 && rld[i].in != 0
4242 && REG_P (rld[i].in)
4243 && rld[i].out == 0)
4244 {
4245 rld[i].reg_rtx
4246 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4247 static_reload_reg_p, 0, rld[i].inmode);
4248 /* Prevent generation of insn to load the value
4249 because the one we found already has the value. */
4250 if (rld[i].reg_rtx)
4251 rld[i].in = rld[i].reg_rtx;
4252 }
4253 #endif
4254
4255 /* If we detected error and replaced asm instruction by USE, forget about the
4256 reloads. */
4257 if (GET_CODE (PATTERN (insn)) == USE
4258 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4259 n_reloads = 0;
4260
4261 /* Perhaps an output reload can be combined with another
4262 to reduce needs by one. */
4263 if (!goal_earlyclobber)
4264 combine_reloads ();
4265
4266 /* If we have a pair of reloads for parts of an address, they are reloading
4267 the same object, the operands themselves were not reloaded, and they
4268 are for two operands that are supposed to match, merge the reloads and
4269 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4270
4271 for (i = 0; i < n_reloads; i++)
4272 {
4273 int k;
4274
4275 for (j = i + 1; j < n_reloads; j++)
4276 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4277 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4278 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4279 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4280 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4281 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4282 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4283 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4284 && rtx_equal_p (rld[i].in, rld[j].in)
4285 && (operand_reloadnum[rld[i].opnum] < 0
4286 || rld[operand_reloadnum[rld[i].opnum]].optional)
4287 && (operand_reloadnum[rld[j].opnum] < 0
4288 || rld[operand_reloadnum[rld[j].opnum]].optional)
4289 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4290 || (goal_alternative_matches[rld[j].opnum]
4291 == rld[i].opnum)))
4292 {
4293 for (k = 0; k < n_replacements; k++)
4294 if (replacements[k].what == j)
4295 replacements[k].what = i;
4296
4297 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4298 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4299 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4300 else
4301 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4302 rld[j].in = 0;
4303 }
4304 }
4305
4306 /* Scan all the reloads and update their type.
4307 If a reload is for the address of an operand and we didn't reload
4308 that operand, change the type. Similarly, change the operand number
4309 of a reload when two operands match. If a reload is optional, treat it
4310 as though the operand isn't reloaded.
4311
4312 ??? This latter case is somewhat odd because if we do the optional
4313 reload, it means the object is hanging around. Thus we need only
4314 do the address reload if the optional reload was NOT done.
4315
4316 Change secondary reloads to be the address type of their operand, not
4317 the normal type.
4318
4319 If an operand's reload is now RELOAD_OTHER, change any
4320 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4321 RELOAD_FOR_OTHER_ADDRESS. */
4322
4323 for (i = 0; i < n_reloads; i++)
4324 {
4325 if (rld[i].secondary_p
4326 && rld[i].when_needed == operand_type[rld[i].opnum])
4327 rld[i].when_needed = address_type[rld[i].opnum];
4328
4329 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4330 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4331 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4332 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4333 && (operand_reloadnum[rld[i].opnum] < 0
4334 || rld[operand_reloadnum[rld[i].opnum]].optional))
4335 {
4336 /* If we have a secondary reload to go along with this reload,
4337 change its type to RELOAD_FOR_OPADDR_ADDR. */
4338
4339 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4340 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4341 && rld[i].secondary_in_reload != -1)
4342 {
4343 int secondary_in_reload = rld[i].secondary_in_reload;
4344
4345 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4346
4347 /* If there's a tertiary reload we have to change it also. */
4348 if (secondary_in_reload > 0
4349 && rld[secondary_in_reload].secondary_in_reload != -1)
4350 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4351 = RELOAD_FOR_OPADDR_ADDR;
4352 }
4353
4354 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4355 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4356 && rld[i].secondary_out_reload != -1)
4357 {
4358 int secondary_out_reload = rld[i].secondary_out_reload;
4359
4360 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4361
4362 /* If there's a tertiary reload we have to change it also. */
4363 if (secondary_out_reload
4364 && rld[secondary_out_reload].secondary_out_reload != -1)
4365 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4366 = RELOAD_FOR_OPADDR_ADDR;
4367 }
4368
4369 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4370 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4371 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4372 else
4373 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4374 }
4375
4376 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4377 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4378 && operand_reloadnum[rld[i].opnum] >= 0
4379 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4380 == RELOAD_OTHER))
4381 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4382
4383 if (goal_alternative_matches[rld[i].opnum] >= 0)
4384 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4385 }
4386
4387 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4388 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4389 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4390
4391 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4392 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4393 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4394 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4395 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4396 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4397 This is complicated by the fact that a single operand can have more
4398 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4399 choose_reload_regs without affecting code quality, and cases that
4400 actually fail are extremely rare, so it turns out to be better to fix
4401 the problem here by not generating cases that choose_reload_regs will
4402 fail for. */
4403 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4404 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4405 a single operand.
4406 We can reduce the register pressure by exploiting that a
4407 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4408 does not conflict with any of them, if it is only used for the first of
4409 the RELOAD_FOR_X_ADDRESS reloads. */
4410 {
4411 int first_op_addr_num = -2;
4412 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4413 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4414 int need_change = 0;
4415 /* We use last_op_addr_reload and the contents of the above arrays
4416 first as flags - -2 means no instance encountered, -1 means exactly
4417 one instance encountered.
4418 If more than one instance has been encountered, we store the reload
4419 number of the first reload of the kind in question; reload numbers
4420 are known to be non-negative. */
4421 for (i = 0; i < noperands; i++)
4422 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4423 for (i = n_reloads - 1; i >= 0; i--)
4424 {
4425 switch (rld[i].when_needed)
4426 {
4427 case RELOAD_FOR_OPERAND_ADDRESS:
4428 if (++first_op_addr_num >= 0)
4429 {
4430 first_op_addr_num = i;
4431 need_change = 1;
4432 }
4433 break;
4434 case RELOAD_FOR_INPUT_ADDRESS:
4435 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4436 {
4437 first_inpaddr_num[rld[i].opnum] = i;
4438 need_change = 1;
4439 }
4440 break;
4441 case RELOAD_FOR_OUTPUT_ADDRESS:
4442 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4443 {
4444 first_outpaddr_num[rld[i].opnum] = i;
4445 need_change = 1;
4446 }
4447 break;
4448 default:
4449 break;
4450 }
4451 }
4452
4453 if (need_change)
4454 {
4455 for (i = 0; i < n_reloads; i++)
4456 {
4457 int first_num;
4458 enum reload_type type;
4459
4460 switch (rld[i].when_needed)
4461 {
4462 case RELOAD_FOR_OPADDR_ADDR:
4463 first_num = first_op_addr_num;
4464 type = RELOAD_FOR_OPERAND_ADDRESS;
4465 break;
4466 case RELOAD_FOR_INPADDR_ADDRESS:
4467 first_num = first_inpaddr_num[rld[i].opnum];
4468 type = RELOAD_FOR_INPUT_ADDRESS;
4469 break;
4470 case RELOAD_FOR_OUTADDR_ADDRESS:
4471 first_num = first_outpaddr_num[rld[i].opnum];
4472 type = RELOAD_FOR_OUTPUT_ADDRESS;
4473 break;
4474 default:
4475 continue;
4476 }
4477 if (first_num < 0)
4478 continue;
4479 else if (i > first_num)
4480 rld[i].when_needed = type;
4481 else
4482 {
4483 /* Check if the only TYPE reload that uses reload I is
4484 reload FIRST_NUM. */
4485 for (j = n_reloads - 1; j > first_num; j--)
4486 {
4487 if (rld[j].when_needed == type
4488 && (rld[i].secondary_p
4489 ? rld[j].secondary_in_reload == i
4490 : reg_mentioned_p (rld[i].in, rld[j].in)))
4491 {
4492 rld[i].when_needed = type;
4493 break;
4494 }
4495 }
4496 }
4497 }
4498 }
4499 }
4500
4501 /* See if we have any reloads that are now allowed to be merged
4502 because we've changed when the reload is needed to
4503 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4504 check for the most common cases. */
4505
4506 for (i = 0; i < n_reloads; i++)
4507 if (rld[i].in != 0 && rld[i].out == 0
4508 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4509 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4510 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4511 for (j = 0; j < n_reloads; j++)
4512 if (i != j && rld[j].in != 0 && rld[j].out == 0
4513 && rld[j].when_needed == rld[i].when_needed
4514 && MATCHES (rld[i].in, rld[j].in)
4515 && rld[i].rclass == rld[j].rclass
4516 && !rld[i].nocombine && !rld[j].nocombine
4517 && rld[i].reg_rtx == rld[j].reg_rtx)
4518 {
4519 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4520 transfer_replacements (i, j);
4521 rld[j].in = 0;
4522 }
4523
4524 #ifdef HAVE_cc0
4525 /* If we made any reloads for addresses, see if they violate a
4526 "no input reloads" requirement for this insn. But loads that we
4527 do after the insn (such as for output addresses) are fine. */
4528 if (no_input_reloads)
4529 for (i = 0; i < n_reloads; i++)
4530 gcc_assert (rld[i].in == 0
4531 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4532 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4533 #endif
4534
4535 /* Compute reload_mode and reload_nregs. */
4536 for (i = 0; i < n_reloads; i++)
4537 {
4538 rld[i].mode
4539 = (rld[i].inmode == VOIDmode
4540 || (GET_MODE_SIZE (rld[i].outmode)
4541 > GET_MODE_SIZE (rld[i].inmode)))
4542 ? rld[i].outmode : rld[i].inmode;
4543
4544 rld[i].nregs = CLASS_MAX_NREGS (rld[i].rclass, rld[i].mode);
4545 }
4546
4547 /* Special case a simple move with an input reload and a
4548 destination of a hard reg, if the hard reg is ok, use it. */
4549 for (i = 0; i < n_reloads; i++)
4550 if (rld[i].when_needed == RELOAD_FOR_INPUT
4551 && GET_CODE (PATTERN (insn)) == SET
4552 && REG_P (SET_DEST (PATTERN (insn)))
4553 && (SET_SRC (PATTERN (insn)) == rld[i].in
4554 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4555 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4556 {
4557 rtx dest = SET_DEST (PATTERN (insn));
4558 unsigned int regno = REGNO (dest);
4559
4560 if (regno < FIRST_PSEUDO_REGISTER
4561 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4562 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4563 {
4564 int nr = hard_regno_nregs[regno][rld[i].mode];
4565 int ok = 1, nri;
4566
4567 for (nri = 1; nri < nr; nri ++)
4568 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4569 ok = 0;
4570
4571 if (ok)
4572 rld[i].reg_rtx = dest;
4573 }
4574 }
4575
4576 return retval;
4577 }
4578
4579 /* Return true if alternative number ALTNUM in constraint-string
4580 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4581 MEM gives the reference if it didn't need any reloads, otherwise it
4582 is null. */
4583
4584 static bool
4585 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4586 const char *constraint, int altnum)
4587 {
4588 int c;
4589
4590 /* Skip alternatives before the one requested. */
4591 while (altnum > 0)
4592 {
4593 while (*constraint++ != ',');
4594 altnum--;
4595 }
4596 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4597 If one of them is present, this alternative accepts the result of
4598 passing a constant-pool reference through find_reloads_toplev.
4599
4600 The same is true of extra memory constraints if the address
4601 was reloaded into a register. However, the target may elect
4602 to disallow the original constant address, forcing it to be
4603 reloaded into a register instead. */
4604 for (; (c = *constraint) && c != ',' && c != '#';
4605 constraint += CONSTRAINT_LEN (c, constraint))
4606 {
4607 if (c == TARGET_MEM_CONSTRAINT || c == 'o')
4608 return true;
4609 #ifdef EXTRA_CONSTRAINT_STR
4610 if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
4611 && (mem == NULL || EXTRA_CONSTRAINT_STR (mem, c, constraint)))
4612 return true;
4613 #endif
4614 }
4615 return false;
4616 }
4617 \f
4618 /* Scan X for memory references and scan the addresses for reloading.
4619 Also checks for references to "constant" regs that we want to eliminate
4620 and replaces them with the values they stand for.
4621 We may alter X destructively if it contains a reference to such.
4622 If X is just a constant reg, we return the equivalent value
4623 instead of X.
4624
4625 IND_LEVELS says how many levels of indirect addressing this machine
4626 supports.
4627
4628 OPNUM and TYPE identify the purpose of the reload.
4629
4630 IS_SET_DEST is true if X is the destination of a SET, which is not
4631 appropriate to be replaced by a constant.
4632
4633 INSN, if nonzero, is the insn in which we do the reload. It is used
4634 to determine if we may generate output reloads, and where to put USEs
4635 for pseudos that we have to replace with stack slots.
4636
4637 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4638 result of find_reloads_address. */
4639
4640 static rtx
4641 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4642 int ind_levels, int is_set_dest, rtx insn,
4643 int *address_reloaded)
4644 {
4645 RTX_CODE code = GET_CODE (x);
4646
4647 const char *fmt = GET_RTX_FORMAT (code);
4648 int i;
4649 int copied;
4650
4651 if (code == REG)
4652 {
4653 /* This code is duplicated for speed in find_reloads. */
4654 int regno = REGNO (x);
4655 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4656 x = reg_equiv_constant (regno);
4657 #if 0
4658 /* This creates (subreg (mem...)) which would cause an unnecessary
4659 reload of the mem. */
4660 else if (reg_equiv_mem (regno) != 0)
4661 x = reg_equiv_mem (regno);
4662 #endif
4663 else if (reg_equiv_memory_loc (regno)
4664 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4665 {
4666 rtx mem = make_memloc (x, regno);
4667 if (reg_equiv_address (regno)
4668 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4669 {
4670 /* If this is not a toplevel operand, find_reloads doesn't see
4671 this substitution. We have to emit a USE of the pseudo so
4672 that delete_output_reload can see it. */
4673 if (replace_reloads && recog_data.operand[opnum] != x)
4674 /* We mark the USE with QImode so that we recognize it
4675 as one that can be safely deleted at the end of
4676 reload. */
4677 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4678 QImode);
4679 x = mem;
4680 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4681 opnum, type, ind_levels, insn);
4682 if (!rtx_equal_p (x, mem))
4683 push_reg_equiv_alt_mem (regno, x);
4684 if (address_reloaded)
4685 *address_reloaded = i;
4686 }
4687 }
4688 return x;
4689 }
4690 if (code == MEM)
4691 {
4692 rtx tem = x;
4693
4694 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4695 opnum, type, ind_levels, insn);
4696 if (address_reloaded)
4697 *address_reloaded = i;
4698
4699 return tem;
4700 }
4701
4702 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4703 {
4704 /* Check for SUBREG containing a REG that's equivalent to a
4705 constant. If the constant has a known value, truncate it
4706 right now. Similarly if we are extracting a single-word of a
4707 multi-word constant. If the constant is symbolic, allow it
4708 to be substituted normally. push_reload will strip the
4709 subreg later. The constant must not be VOIDmode, because we
4710 will lose the mode of the register (this should never happen
4711 because one of the cases above should handle it). */
4712
4713 int regno = REGNO (SUBREG_REG (x));
4714 rtx tem;
4715
4716 if (regno >= FIRST_PSEUDO_REGISTER
4717 && reg_renumber[regno] < 0
4718 && reg_equiv_constant (regno) != 0)
4719 {
4720 tem =
4721 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4722 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4723 gcc_assert (tem);
4724 if (CONSTANT_P (tem) && !LEGITIMATE_CONSTANT_P (tem))
4725 {
4726 tem = force_const_mem (GET_MODE (x), tem);
4727 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4728 &XEXP (tem, 0), opnum, type,
4729 ind_levels, insn);
4730 if (address_reloaded)
4731 *address_reloaded = i;
4732 }
4733 return tem;
4734 }
4735
4736 /* If the subreg contains a reg that will be converted to a mem,
4737 convert the subreg to a narrower memref now.
4738 Otherwise, we would get (subreg (mem ...) ...),
4739 which would force reload of the mem.
4740
4741 We also need to do this if there is an equivalent MEM that is
4742 not offsettable. In that case, alter_subreg would produce an
4743 invalid address on big-endian machines.
4744
4745 For machines that extend byte loads, we must not reload using
4746 a wider mode if we have a paradoxical SUBREG. find_reloads will
4747 force a reload in that case. So we should not do anything here. */
4748
4749 if (regno >= FIRST_PSEUDO_REGISTER
4750 #ifdef LOAD_EXTEND_OP
4751 && (GET_MODE_SIZE (GET_MODE (x))
4752 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4753 #endif
4754 && (reg_equiv_address (regno) != 0
4755 || (reg_equiv_mem (regno) != 0
4756 && (! strict_memory_address_addr_space_p
4757 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
4758 MEM_ADDR_SPACE (reg_equiv_mem (regno)))
4759 || ! offsettable_memref_p (reg_equiv_mem (regno))
4760 || num_not_at_initial_offset))))
4761 x = find_reloads_subreg_address (x, 1, opnum, type, ind_levels,
4762 insn, address_reloaded);
4763 }
4764
4765 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4766 {
4767 if (fmt[i] == 'e')
4768 {
4769 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4770 ind_levels, is_set_dest, insn,
4771 address_reloaded);
4772 /* If we have replaced a reg with it's equivalent memory loc -
4773 that can still be handled here e.g. if it's in a paradoxical
4774 subreg - we must make the change in a copy, rather than using
4775 a destructive change. This way, find_reloads can still elect
4776 not to do the change. */
4777 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4778 {
4779 x = shallow_copy_rtx (x);
4780 copied = 1;
4781 }
4782 XEXP (x, i) = new_part;
4783 }
4784 }
4785 return x;
4786 }
4787
4788 /* Return a mem ref for the memory equivalent of reg REGNO.
4789 This mem ref is not shared with anything. */
4790
4791 static rtx
4792 make_memloc (rtx ad, int regno)
4793 {
4794 /* We must rerun eliminate_regs, in case the elimination
4795 offsets have changed. */
4796 rtx tem
4797 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4798 0);
4799
4800 /* If TEM might contain a pseudo, we must copy it to avoid
4801 modifying it when we do the substitution for the reload. */
4802 if (rtx_varies_p (tem, 0))
4803 tem = copy_rtx (tem);
4804
4805 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4806 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4807
4808 /* Copy the result if it's still the same as the equivalence, to avoid
4809 modifying it when we do the substitution for the reload. */
4810 if (tem == reg_equiv_memory_loc (regno))
4811 tem = copy_rtx (tem);
4812 return tem;
4813 }
4814
4815 /* Returns true if AD could be turned into a valid memory reference
4816 to mode MODE in address space AS by reloading the part pointed to
4817 by PART into a register. */
4818
4819 static int
4820 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4821 addr_space_t as, rtx *part)
4822 {
4823 int retv;
4824 rtx tem = *part;
4825 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4826
4827 *part = reg;
4828 retv = memory_address_addr_space_p (mode, ad, as);
4829 *part = tem;
4830
4831 return retv;
4832 }
4833
4834 /* Record all reloads needed for handling memory address AD
4835 which appears in *LOC in a memory reference to mode MODE
4836 which itself is found in location *MEMREFLOC.
4837 Note that we take shortcuts assuming that no multi-reg machine mode
4838 occurs as part of an address.
4839
4840 OPNUM and TYPE specify the purpose of this reload.
4841
4842 IND_LEVELS says how many levels of indirect addressing this machine
4843 supports.
4844
4845 INSN, if nonzero, is the insn in which we do the reload. It is used
4846 to determine if we may generate output reloads, and where to put USEs
4847 for pseudos that we have to replace with stack slots.
4848
4849 Value is one if this address is reloaded or replaced as a whole; it is
4850 zero if the top level of this address was not reloaded or replaced, and
4851 it is -1 if it may or may not have been reloaded or replaced.
4852
4853 Note that there is no verification that the address will be valid after
4854 this routine does its work. Instead, we rely on the fact that the address
4855 was valid when reload started. So we need only undo things that reload
4856 could have broken. These are wrong register types, pseudos not allocated
4857 to a hard register, and frame pointer elimination. */
4858
4859 static int
4860 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4861 rtx *loc, int opnum, enum reload_type type,
4862 int ind_levels, rtx insn)
4863 {
4864 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4865 : ADDR_SPACE_GENERIC;
4866 int regno;
4867 int removed_and = 0;
4868 int op_index;
4869 rtx tem;
4870
4871 /* If the address is a register, see if it is a legitimate address and
4872 reload if not. We first handle the cases where we need not reload
4873 or where we must reload in a non-standard way. */
4874
4875 if (REG_P (ad))
4876 {
4877 regno = REGNO (ad);
4878
4879 if (reg_equiv_constant (regno) != 0)
4880 {
4881 find_reloads_address_part (reg_equiv_constant (regno), loc,
4882 base_reg_class (mode, MEM, SCRATCH),
4883 GET_MODE (ad), opnum, type, ind_levels);
4884 return 1;
4885 }
4886
4887 tem = reg_equiv_memory_loc (regno);
4888 if (tem != 0)
4889 {
4890 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4891 {
4892 tem = make_memloc (ad, regno);
4893 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4894 XEXP (tem, 0),
4895 MEM_ADDR_SPACE (tem)))
4896 {
4897 rtx orig = tem;
4898
4899 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4900 &XEXP (tem, 0), opnum,
4901 ADDR_TYPE (type), ind_levels, insn);
4902 if (!rtx_equal_p (tem, orig))
4903 push_reg_equiv_alt_mem (regno, tem);
4904 }
4905 /* We can avoid a reload if the register's equivalent memory
4906 expression is valid as an indirect memory address.
4907 But not all addresses are valid in a mem used as an indirect
4908 address: only reg or reg+constant. */
4909
4910 if (ind_levels > 0
4911 && strict_memory_address_addr_space_p (mode, tem, as)
4912 && (REG_P (XEXP (tem, 0))
4913 || (GET_CODE (XEXP (tem, 0)) == PLUS
4914 && REG_P (XEXP (XEXP (tem, 0), 0))
4915 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4916 {
4917 /* TEM is not the same as what we'll be replacing the
4918 pseudo with after reload, put a USE in front of INSN
4919 in the final reload pass. */
4920 if (replace_reloads
4921 && num_not_at_initial_offset
4922 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4923 {
4924 *loc = tem;
4925 /* We mark the USE with QImode so that we
4926 recognize it as one that can be safely
4927 deleted at the end of reload. */
4928 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4929 insn), QImode);
4930
4931 /* This doesn't really count as replacing the address
4932 as a whole, since it is still a memory access. */
4933 }
4934 return 0;
4935 }
4936 ad = tem;
4937 }
4938 }
4939
4940 /* The only remaining case where we can avoid a reload is if this is a
4941 hard register that is valid as a base register and which is not the
4942 subject of a CLOBBER in this insn. */
4943
4944 else if (regno < FIRST_PSEUDO_REGISTER
4945 && regno_ok_for_base_p (regno, mode, MEM, SCRATCH)
4946 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4947 return 0;
4948
4949 /* If we do not have one of the cases above, we must do the reload. */
4950 push_reload (ad, NULL_RTX, loc, (rtx*) 0, base_reg_class (mode, MEM, SCRATCH),
4951 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4952 return 1;
4953 }
4954
4955 if (strict_memory_address_addr_space_p (mode, ad, as))
4956 {
4957 /* The address appears valid, so reloads are not needed.
4958 But the address may contain an eliminable register.
4959 This can happen because a machine with indirect addressing
4960 may consider a pseudo register by itself a valid address even when
4961 it has failed to get a hard reg.
4962 So do a tree-walk to find and eliminate all such regs. */
4963
4964 /* But first quickly dispose of a common case. */
4965 if (GET_CODE (ad) == PLUS
4966 && CONST_INT_P (XEXP (ad, 1))
4967 && REG_P (XEXP (ad, 0))
4968 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4969 return 0;
4970
4971 subst_reg_equivs_changed = 0;
4972 *loc = subst_reg_equivs (ad, insn);
4973
4974 if (! subst_reg_equivs_changed)
4975 return 0;
4976
4977 /* Check result for validity after substitution. */
4978 if (strict_memory_address_addr_space_p (mode, ad, as))
4979 return 0;
4980 }
4981
4982 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4983 do
4984 {
4985 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4986 {
4987 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4988 ind_levels, win);
4989 }
4990 break;
4991 win:
4992 *memrefloc = copy_rtx (*memrefloc);
4993 XEXP (*memrefloc, 0) = ad;
4994 move_replacements (&ad, &XEXP (*memrefloc, 0));
4995 return -1;
4996 }
4997 while (0);
4998 #endif
4999
5000 /* The address is not valid. We have to figure out why. First see if
5001 we have an outer AND and remove it if so. Then analyze what's inside. */
5002
5003 if (GET_CODE (ad) == AND)
5004 {
5005 removed_and = 1;
5006 loc = &XEXP (ad, 0);
5007 ad = *loc;
5008 }
5009
5010 /* One possibility for why the address is invalid is that it is itself
5011 a MEM. This can happen when the frame pointer is being eliminated, a
5012 pseudo is not allocated to a hard register, and the offset between the
5013 frame and stack pointers is not its initial value. In that case the
5014 pseudo will have been replaced by a MEM referring to the
5015 stack pointer. */
5016 if (MEM_P (ad))
5017 {
5018 /* First ensure that the address in this MEM is valid. Then, unless
5019 indirect addresses are valid, reload the MEM into a register. */
5020 tem = ad;
5021 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5022 opnum, ADDR_TYPE (type),
5023 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5024
5025 /* If tem was changed, then we must create a new memory reference to
5026 hold it and store it back into memrefloc. */
5027 if (tem != ad && memrefloc)
5028 {
5029 *memrefloc = copy_rtx (*memrefloc);
5030 copy_replacements (tem, XEXP (*memrefloc, 0));
5031 loc = &XEXP (*memrefloc, 0);
5032 if (removed_and)
5033 loc = &XEXP (*loc, 0);
5034 }
5035
5036 /* Check similar cases as for indirect addresses as above except
5037 that we can allow pseudos and a MEM since they should have been
5038 taken care of above. */
5039
5040 if (ind_levels == 0
5041 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5042 || MEM_P (XEXP (tem, 0))
5043 || ! (REG_P (XEXP (tem, 0))
5044 || (GET_CODE (XEXP (tem, 0)) == PLUS
5045 && REG_P (XEXP (XEXP (tem, 0), 0))
5046 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5047 {
5048 /* Must use TEM here, not AD, since it is the one that will
5049 have any subexpressions reloaded, if needed. */
5050 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5051 base_reg_class (mode, MEM, SCRATCH), GET_MODE (tem),
5052 VOIDmode, 0,
5053 0, opnum, type);
5054 return ! removed_and;
5055 }
5056 else
5057 return 0;
5058 }
5059
5060 /* If we have address of a stack slot but it's not valid because the
5061 displacement is too large, compute the sum in a register.
5062 Handle all base registers here, not just fp/ap/sp, because on some
5063 targets (namely SH) we can also get too large displacements from
5064 big-endian corrections. */
5065 else if (GET_CODE (ad) == PLUS
5066 && REG_P (XEXP (ad, 0))
5067 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5068 && CONST_INT_P (XEXP (ad, 1))
5069 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, PLUS,
5070 CONST_INT)
5071 /* Similarly, if we were to reload the base register and the
5072 mem+offset address is still invalid, then we want to reload
5073 the whole address, not just the base register. */
5074 || ! maybe_memory_address_addr_space_p
5075 (mode, ad, as, &(XEXP (ad, 0)))))
5076
5077 {
5078 /* Unshare the MEM rtx so we can safely alter it. */
5079 if (memrefloc)
5080 {
5081 *memrefloc = copy_rtx (*memrefloc);
5082 loc = &XEXP (*memrefloc, 0);
5083 if (removed_and)
5084 loc = &XEXP (*loc, 0);
5085 }
5086
5087 if (double_reg_address_ok
5088 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode,
5089 PLUS, CONST_INT))
5090 {
5091 /* Unshare the sum as well. */
5092 *loc = ad = copy_rtx (ad);
5093
5094 /* Reload the displacement into an index reg.
5095 We assume the frame pointer or arg pointer is a base reg. */
5096 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5097 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5098 type, ind_levels);
5099 return 0;
5100 }
5101 else
5102 {
5103 /* If the sum of two regs is not necessarily valid,
5104 reload the sum into a base reg.
5105 That will at least work. */
5106 find_reloads_address_part (ad, loc,
5107 base_reg_class (mode, MEM, SCRATCH),
5108 GET_MODE (ad), opnum, type, ind_levels);
5109 }
5110 return ! removed_and;
5111 }
5112
5113 /* If we have an indexed stack slot, there are three possible reasons why
5114 it might be invalid: The index might need to be reloaded, the address
5115 might have been made by frame pointer elimination and hence have a
5116 constant out of range, or both reasons might apply.
5117
5118 We can easily check for an index needing reload, but even if that is the
5119 case, we might also have an invalid constant. To avoid making the
5120 conservative assumption and requiring two reloads, we see if this address
5121 is valid when not interpreted strictly. If it is, the only problem is
5122 that the index needs a reload and find_reloads_address_1 will take care
5123 of it.
5124
5125 Handle all base registers here, not just fp/ap/sp, because on some
5126 targets (namely SPARC) we can also get invalid addresses from preventive
5127 subreg big-endian corrections made by find_reloads_toplev. We
5128 can also get expressions involving LO_SUM (rather than PLUS) from
5129 find_reloads_subreg_address.
5130
5131 If we decide to do something, it must be that `double_reg_address_ok'
5132 is true. We generate a reload of the base register + constant and
5133 rework the sum so that the reload register will be added to the index.
5134 This is safe because we know the address isn't shared.
5135
5136 We check for the base register as both the first and second operand of
5137 the innermost PLUS and/or LO_SUM. */
5138
5139 for (op_index = 0; op_index < 2; ++op_index)
5140 {
5141 rtx operand, addend;
5142 enum rtx_code inner_code;
5143
5144 if (GET_CODE (ad) != PLUS)
5145 continue;
5146
5147 inner_code = GET_CODE (XEXP (ad, 0));
5148 if (!(GET_CODE (ad) == PLUS
5149 && CONST_INT_P (XEXP (ad, 1))
5150 && (inner_code == PLUS || inner_code == LO_SUM)))
5151 continue;
5152
5153 operand = XEXP (XEXP (ad, 0), op_index);
5154 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5155 continue;
5156
5157 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5158
5159 if ((regno_ok_for_base_p (REGNO (operand), mode, inner_code,
5160 GET_CODE (addend))
5161 || operand == frame_pointer_rtx
5162 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5163 || operand == hard_frame_pointer_rtx
5164 #endif
5165 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5166 || operand == arg_pointer_rtx
5167 #endif
5168 || operand == stack_pointer_rtx)
5169 && ! maybe_memory_address_addr_space_p
5170 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5171 {
5172 rtx offset_reg;
5173 enum reg_class cls;
5174
5175 offset_reg = plus_constant (operand, INTVAL (XEXP (ad, 1)));
5176
5177 /* Form the adjusted address. */
5178 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5179 ad = gen_rtx_PLUS (GET_MODE (ad),
5180 op_index == 0 ? offset_reg : addend,
5181 op_index == 0 ? addend : offset_reg);
5182 else
5183 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5184 op_index == 0 ? offset_reg : addend,
5185 op_index == 0 ? addend : offset_reg);
5186 *loc = ad;
5187
5188 cls = base_reg_class (mode, MEM, GET_CODE (addend));
5189 find_reloads_address_part (XEXP (ad, op_index),
5190 &XEXP (ad, op_index), cls,
5191 GET_MODE (ad), opnum, type, ind_levels);
5192 find_reloads_address_1 (mode,
5193 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5194 GET_CODE (XEXP (ad, op_index)),
5195 &XEXP (ad, 1 - op_index), opnum,
5196 type, 0, insn);
5197
5198 return 0;
5199 }
5200 }
5201
5202 /* See if address becomes valid when an eliminable register
5203 in a sum is replaced. */
5204
5205 tem = ad;
5206 if (GET_CODE (ad) == PLUS)
5207 tem = subst_indexed_address (ad);
5208 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5209 {
5210 /* Ok, we win that way. Replace any additional eliminable
5211 registers. */
5212
5213 subst_reg_equivs_changed = 0;
5214 tem = subst_reg_equivs (tem, insn);
5215
5216 /* Make sure that didn't make the address invalid again. */
5217
5218 if (! subst_reg_equivs_changed
5219 || strict_memory_address_addr_space_p (mode, tem, as))
5220 {
5221 *loc = tem;
5222 return 0;
5223 }
5224 }
5225
5226 /* If constants aren't valid addresses, reload the constant address
5227 into a register. */
5228 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5229 {
5230 enum machine_mode address_mode = GET_MODE (ad);
5231 if (address_mode == VOIDmode)
5232 address_mode = targetm.addr_space.address_mode (as);
5233
5234 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5235 Unshare it so we can safely alter it. */
5236 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5237 && CONSTANT_POOL_ADDRESS_P (ad))
5238 {
5239 *memrefloc = copy_rtx (*memrefloc);
5240 loc = &XEXP (*memrefloc, 0);
5241 if (removed_and)
5242 loc = &XEXP (*loc, 0);
5243 }
5244
5245 find_reloads_address_part (ad, loc, base_reg_class (mode, MEM, SCRATCH),
5246 address_mode, opnum, type, ind_levels);
5247 return ! removed_and;
5248 }
5249
5250 return find_reloads_address_1 (mode, ad, 0, MEM, SCRATCH, loc, opnum, type,
5251 ind_levels, insn);
5252 }
5253 \f
5254 /* Find all pseudo regs appearing in AD
5255 that are eliminable in favor of equivalent values
5256 and do not have hard regs; replace them by their equivalents.
5257 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5258 front of it for pseudos that we have to replace with stack slots. */
5259
5260 static rtx
5261 subst_reg_equivs (rtx ad, rtx insn)
5262 {
5263 RTX_CODE code = GET_CODE (ad);
5264 int i;
5265 const char *fmt;
5266
5267 switch (code)
5268 {
5269 case HIGH:
5270 case CONST_INT:
5271 case CONST:
5272 case CONST_DOUBLE:
5273 case CONST_FIXED:
5274 case CONST_VECTOR:
5275 case SYMBOL_REF:
5276 case LABEL_REF:
5277 case PC:
5278 case CC0:
5279 return ad;
5280
5281 case REG:
5282 {
5283 int regno = REGNO (ad);
5284
5285 if (reg_equiv_constant (regno) != 0)
5286 {
5287 subst_reg_equivs_changed = 1;
5288 return reg_equiv_constant (regno);
5289 }
5290 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5291 {
5292 rtx mem = make_memloc (ad, regno);
5293 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5294 {
5295 subst_reg_equivs_changed = 1;
5296 /* We mark the USE with QImode so that we recognize it
5297 as one that can be safely deleted at the end of
5298 reload. */
5299 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5300 QImode);
5301 return mem;
5302 }
5303 }
5304 }
5305 return ad;
5306
5307 case PLUS:
5308 /* Quickly dispose of a common case. */
5309 if (XEXP (ad, 0) == frame_pointer_rtx
5310 && CONST_INT_P (XEXP (ad, 1)))
5311 return ad;
5312 break;
5313
5314 default:
5315 break;
5316 }
5317
5318 fmt = GET_RTX_FORMAT (code);
5319 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5320 if (fmt[i] == 'e')
5321 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5322 return ad;
5323 }
5324 \f
5325 /* Compute the sum of X and Y, making canonicalizations assumed in an
5326 address, namely: sum constant integers, surround the sum of two
5327 constants with a CONST, put the constant as the second operand, and
5328 group the constant on the outermost sum.
5329
5330 This routine assumes both inputs are already in canonical form. */
5331
5332 rtx
5333 form_sum (enum machine_mode mode, rtx x, rtx y)
5334 {
5335 rtx tem;
5336
5337 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5338 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5339
5340 if (CONST_INT_P (x))
5341 return plus_constant (y, INTVAL (x));
5342 else if (CONST_INT_P (y))
5343 return plus_constant (x, INTVAL (y));
5344 else if (CONSTANT_P (x))
5345 tem = x, x = y, y = tem;
5346
5347 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5348 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5349
5350 /* Note that if the operands of Y are specified in the opposite
5351 order in the recursive calls below, infinite recursion will occur. */
5352 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5353 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5354
5355 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5356 constant will have been placed second. */
5357 if (CONSTANT_P (x) && CONSTANT_P (y))
5358 {
5359 if (GET_CODE (x) == CONST)
5360 x = XEXP (x, 0);
5361 if (GET_CODE (y) == CONST)
5362 y = XEXP (y, 0);
5363
5364 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5365 }
5366
5367 return gen_rtx_PLUS (mode, x, y);
5368 }
5369 \f
5370 /* If ADDR is a sum containing a pseudo register that should be
5371 replaced with a constant (from reg_equiv_constant),
5372 return the result of doing so, and also apply the associative
5373 law so that the result is more likely to be a valid address.
5374 (But it is not guaranteed to be one.)
5375
5376 Note that at most one register is replaced, even if more are
5377 replaceable. Also, we try to put the result into a canonical form
5378 so it is more likely to be a valid address.
5379
5380 In all other cases, return ADDR. */
5381
5382 static rtx
5383 subst_indexed_address (rtx addr)
5384 {
5385 rtx op0 = 0, op1 = 0, op2 = 0;
5386 rtx tem;
5387 int regno;
5388
5389 if (GET_CODE (addr) == PLUS)
5390 {
5391 /* Try to find a register to replace. */
5392 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5393 if (REG_P (op0)
5394 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5395 && reg_renumber[regno] < 0
5396 && reg_equiv_constant (regno) != 0)
5397 op0 = reg_equiv_constant (regno);
5398 else if (REG_P (op1)
5399 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5400 && reg_renumber[regno] < 0
5401 && reg_equiv_constant (regno) != 0)
5402 op1 = reg_equiv_constant (regno);
5403 else if (GET_CODE (op0) == PLUS
5404 && (tem = subst_indexed_address (op0)) != op0)
5405 op0 = tem;
5406 else if (GET_CODE (op1) == PLUS
5407 && (tem = subst_indexed_address (op1)) != op1)
5408 op1 = tem;
5409 else
5410 return addr;
5411
5412 /* Pick out up to three things to add. */
5413 if (GET_CODE (op1) == PLUS)
5414 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5415 else if (GET_CODE (op0) == PLUS)
5416 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5417
5418 /* Compute the sum. */
5419 if (op2 != 0)
5420 op1 = form_sum (GET_MODE (addr), op1, op2);
5421 if (op1 != 0)
5422 op0 = form_sum (GET_MODE (addr), op0, op1);
5423
5424 return op0;
5425 }
5426 return addr;
5427 }
5428 \f
5429 /* Update the REG_INC notes for an insn. It updates all REG_INC
5430 notes for the instruction which refer to REGNO the to refer
5431 to the reload number.
5432
5433 INSN is the insn for which any REG_INC notes need updating.
5434
5435 REGNO is the register number which has been reloaded.
5436
5437 RELOADNUM is the reload number. */
5438
5439 static void
5440 update_auto_inc_notes (rtx insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5441 int reloadnum ATTRIBUTE_UNUSED)
5442 {
5443 #ifdef AUTO_INC_DEC
5444 rtx link;
5445
5446 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5447 if (REG_NOTE_KIND (link) == REG_INC
5448 && (int) REGNO (XEXP (link, 0)) == regno)
5449 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5450 #endif
5451 }
5452 \f
5453 /* Record the pseudo registers we must reload into hard registers in a
5454 subexpression of a would-be memory address, X referring to a value
5455 in mode MODE. (This function is not called if the address we find
5456 is strictly valid.)
5457
5458 CONTEXT = 1 means we are considering regs as index regs,
5459 = 0 means we are considering them as base regs.
5460 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5461 or an autoinc code.
5462 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5463 is the code of the index part of the address. Otherwise, pass SCRATCH
5464 for this argument.
5465 OPNUM and TYPE specify the purpose of any reloads made.
5466
5467 IND_LEVELS says how many levels of indirect addressing are
5468 supported at this point in the address.
5469
5470 INSN, if nonzero, is the insn in which we do the reload. It is used
5471 to determine if we may generate output reloads.
5472
5473 We return nonzero if X, as a whole, is reloaded or replaced. */
5474
5475 /* Note that we take shortcuts assuming that no multi-reg machine mode
5476 occurs as part of an address.
5477 Also, this is not fully machine-customizable; it works for machines
5478 such as VAXen and 68000's and 32000's, but other possible machines
5479 could have addressing modes that this does not handle right.
5480 If you add push_reload calls here, you need to make sure gen_reload
5481 handles those cases gracefully. */
5482
5483 static int
5484 find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
5485 enum rtx_code outer_code, enum rtx_code index_code,
5486 rtx *loc, int opnum, enum reload_type type,
5487 int ind_levels, rtx insn)
5488 {
5489 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, OUTER, INDEX) \
5490 ((CONTEXT) == 0 \
5491 ? regno_ok_for_base_p (REGNO, MODE, OUTER, INDEX) \
5492 : REGNO_OK_FOR_INDEX_P (REGNO))
5493
5494 enum reg_class context_reg_class;
5495 RTX_CODE code = GET_CODE (x);
5496
5497 if (context == 1)
5498 context_reg_class = INDEX_REG_CLASS;
5499 else
5500 context_reg_class = base_reg_class (mode, outer_code, index_code);
5501
5502 switch (code)
5503 {
5504 case PLUS:
5505 {
5506 rtx orig_op0 = XEXP (x, 0);
5507 rtx orig_op1 = XEXP (x, 1);
5508 RTX_CODE code0 = GET_CODE (orig_op0);
5509 RTX_CODE code1 = GET_CODE (orig_op1);
5510 rtx op0 = orig_op0;
5511 rtx op1 = orig_op1;
5512
5513 if (GET_CODE (op0) == SUBREG)
5514 {
5515 op0 = SUBREG_REG (op0);
5516 code0 = GET_CODE (op0);
5517 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5518 op0 = gen_rtx_REG (word_mode,
5519 (REGNO (op0) +
5520 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5521 GET_MODE (SUBREG_REG (orig_op0)),
5522 SUBREG_BYTE (orig_op0),
5523 GET_MODE (orig_op0))));
5524 }
5525
5526 if (GET_CODE (op1) == SUBREG)
5527 {
5528 op1 = SUBREG_REG (op1);
5529 code1 = GET_CODE (op1);
5530 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5531 /* ??? Why is this given op1's mode and above for
5532 ??? op0 SUBREGs we use word_mode? */
5533 op1 = gen_rtx_REG (GET_MODE (op1),
5534 (REGNO (op1) +
5535 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5536 GET_MODE (SUBREG_REG (orig_op1)),
5537 SUBREG_BYTE (orig_op1),
5538 GET_MODE (orig_op1))));
5539 }
5540 /* Plus in the index register may be created only as a result of
5541 register rematerialization for expression like &localvar*4. Reload it.
5542 It may be possible to combine the displacement on the outer level,
5543 but it is probably not worthwhile to do so. */
5544 if (context == 1)
5545 {
5546 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5547 opnum, ADDR_TYPE (type), ind_levels, insn);
5548 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5549 context_reg_class,
5550 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5551 return 1;
5552 }
5553
5554 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5555 || code0 == ZERO_EXTEND || code1 == MEM)
5556 {
5557 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5558 &XEXP (x, 0), opnum, type, ind_levels,
5559 insn);
5560 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5561 &XEXP (x, 1), opnum, type, ind_levels,
5562 insn);
5563 }
5564
5565 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5566 || code1 == ZERO_EXTEND || code0 == MEM)
5567 {
5568 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5569 &XEXP (x, 0), opnum, type, ind_levels,
5570 insn);
5571 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5572 &XEXP (x, 1), opnum, type, ind_levels,
5573 insn);
5574 }
5575
5576 else if (code0 == CONST_INT || code0 == CONST
5577 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5578 find_reloads_address_1 (mode, orig_op1, 0, PLUS, code0,
5579 &XEXP (x, 1), opnum, type, ind_levels,
5580 insn);
5581
5582 else if (code1 == CONST_INT || code1 == CONST
5583 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5584 find_reloads_address_1 (mode, orig_op0, 0, PLUS, code1,
5585 &XEXP (x, 0), opnum, type, ind_levels,
5586 insn);
5587
5588 else if (code0 == REG && code1 == REG)
5589 {
5590 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5591 && regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5592 return 0;
5593 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5594 && regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5595 return 0;
5596 else if (regno_ok_for_base_p (REGNO (op0), mode, PLUS, REG))
5597 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5598 &XEXP (x, 1), opnum, type, ind_levels,
5599 insn);
5600 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5601 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5602 &XEXP (x, 0), opnum, type, ind_levels,
5603 insn);
5604 else if (regno_ok_for_base_p (REGNO (op1), mode, PLUS, REG))
5605 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5606 &XEXP (x, 0), opnum, type, ind_levels,
5607 insn);
5608 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5609 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5610 &XEXP (x, 1), opnum, type, ind_levels,
5611 insn);
5612 else
5613 {
5614 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5615 &XEXP (x, 0), opnum, type, ind_levels,
5616 insn);
5617 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5618 &XEXP (x, 1), opnum, type, ind_levels,
5619 insn);
5620 }
5621 }
5622
5623 else if (code0 == REG)
5624 {
5625 find_reloads_address_1 (mode, orig_op0, 1, PLUS, SCRATCH,
5626 &XEXP (x, 0), opnum, type, ind_levels,
5627 insn);
5628 find_reloads_address_1 (mode, orig_op1, 0, PLUS, REG,
5629 &XEXP (x, 1), opnum, type, ind_levels,
5630 insn);
5631 }
5632
5633 else if (code1 == REG)
5634 {
5635 find_reloads_address_1 (mode, orig_op1, 1, PLUS, SCRATCH,
5636 &XEXP (x, 1), opnum, type, ind_levels,
5637 insn);
5638 find_reloads_address_1 (mode, orig_op0, 0, PLUS, REG,
5639 &XEXP (x, 0), opnum, type, ind_levels,
5640 insn);
5641 }
5642 }
5643
5644 return 0;
5645
5646 case POST_MODIFY:
5647 case PRE_MODIFY:
5648 {
5649 rtx op0 = XEXP (x, 0);
5650 rtx op1 = XEXP (x, 1);
5651 enum rtx_code index_code;
5652 int regno;
5653 int reloadnum;
5654
5655 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5656 return 0;
5657
5658 /* Currently, we only support {PRE,POST}_MODIFY constructs
5659 where a base register is {inc,dec}remented by the contents
5660 of another register or by a constant value. Thus, these
5661 operands must match. */
5662 gcc_assert (op0 == XEXP (op1, 0));
5663
5664 /* Require index register (or constant). Let's just handle the
5665 register case in the meantime... If the target allows
5666 auto-modify by a constant then we could try replacing a pseudo
5667 register with its equivalent constant where applicable.
5668
5669 We also handle the case where the register was eliminated
5670 resulting in a PLUS subexpression.
5671
5672 If we later decide to reload the whole PRE_MODIFY or
5673 POST_MODIFY, inc_for_reload might clobber the reload register
5674 before reading the index. The index register might therefore
5675 need to live longer than a TYPE reload normally would, so be
5676 conservative and class it as RELOAD_OTHER. */
5677 if ((REG_P (XEXP (op1, 1))
5678 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5679 || GET_CODE (XEXP (op1, 1)) == PLUS)
5680 find_reloads_address_1 (mode, XEXP (op1, 1), 1, code, SCRATCH,
5681 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5682 ind_levels, insn);
5683
5684 gcc_assert (REG_P (XEXP (op1, 0)));
5685
5686 regno = REGNO (XEXP (op1, 0));
5687 index_code = GET_CODE (XEXP (op1, 1));
5688
5689 /* A register that is incremented cannot be constant! */
5690 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5691 || reg_equiv_constant (regno) == 0);
5692
5693 /* Handle a register that is equivalent to a memory location
5694 which cannot be addressed directly. */
5695 if (reg_equiv_memory_loc (regno) != 0
5696 && (reg_equiv_address (regno) != 0
5697 || num_not_at_initial_offset))
5698 {
5699 rtx tem = make_memloc (XEXP (x, 0), regno);
5700
5701 if (reg_equiv_address (regno)
5702 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5703 {
5704 rtx orig = tem;
5705
5706 /* First reload the memory location's address.
5707 We can't use ADDR_TYPE (type) here, because we need to
5708 write back the value after reading it, hence we actually
5709 need two registers. */
5710 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5711 &XEXP (tem, 0), opnum,
5712 RELOAD_OTHER,
5713 ind_levels, insn);
5714
5715 if (!rtx_equal_p (tem, orig))
5716 push_reg_equiv_alt_mem (regno, tem);
5717
5718 /* Then reload the memory location into a base
5719 register. */
5720 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5721 &XEXP (op1, 0),
5722 base_reg_class (mode, code,
5723 index_code),
5724 GET_MODE (x), GET_MODE (x), 0,
5725 0, opnum, RELOAD_OTHER);
5726
5727 update_auto_inc_notes (this_insn, regno, reloadnum);
5728 return 0;
5729 }
5730 }
5731
5732 if (reg_renumber[regno] >= 0)
5733 regno = reg_renumber[regno];
5734
5735 /* We require a base register here... */
5736 if (!regno_ok_for_base_p (regno, GET_MODE (x), code, index_code))
5737 {
5738 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5739 &XEXP (op1, 0), &XEXP (x, 0),
5740 base_reg_class (mode, code, index_code),
5741 GET_MODE (x), GET_MODE (x), 0, 0,
5742 opnum, RELOAD_OTHER);
5743
5744 update_auto_inc_notes (this_insn, regno, reloadnum);
5745 return 0;
5746 }
5747 }
5748 return 0;
5749
5750 case POST_INC:
5751 case POST_DEC:
5752 case PRE_INC:
5753 case PRE_DEC:
5754 if (REG_P (XEXP (x, 0)))
5755 {
5756 int regno = REGNO (XEXP (x, 0));
5757 int value = 0;
5758 rtx x_orig = x;
5759
5760 /* A register that is incremented cannot be constant! */
5761 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5762 || reg_equiv_constant (regno) == 0);
5763
5764 /* Handle a register that is equivalent to a memory location
5765 which cannot be addressed directly. */
5766 if (reg_equiv_memory_loc (regno) != 0
5767 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5768 {
5769 rtx tem = make_memloc (XEXP (x, 0), regno);
5770 if (reg_equiv_address (regno)
5771 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5772 {
5773 rtx orig = tem;
5774
5775 /* First reload the memory location's address.
5776 We can't use ADDR_TYPE (type) here, because we need to
5777 write back the value after reading it, hence we actually
5778 need two registers. */
5779 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5780 &XEXP (tem, 0), opnum, type,
5781 ind_levels, insn);
5782 if (!rtx_equal_p (tem, orig))
5783 push_reg_equiv_alt_mem (regno, tem);
5784 /* Put this inside a new increment-expression. */
5785 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5786 /* Proceed to reload that, as if it contained a register. */
5787 }
5788 }
5789
5790 /* If we have a hard register that is ok in this incdec context,
5791 don't make a reload. If the register isn't nice enough for
5792 autoincdec, we can reload it. But, if an autoincrement of a
5793 register that we here verified as playing nice, still outside
5794 isn't "valid", it must be that no autoincrement is "valid".
5795 If that is true and something made an autoincrement anyway,
5796 this must be a special context where one is allowed.
5797 (For example, a "push" instruction.)
5798 We can't improve this address, so leave it alone. */
5799
5800 /* Otherwise, reload the autoincrement into a suitable hard reg
5801 and record how much to increment by. */
5802
5803 if (reg_renumber[regno] >= 0)
5804 regno = reg_renumber[regno];
5805 if (regno >= FIRST_PSEUDO_REGISTER
5806 || !REG_OK_FOR_CONTEXT (context, regno, mode, code,
5807 index_code))
5808 {
5809 int reloadnum;
5810
5811 /* If we can output the register afterwards, do so, this
5812 saves the extra update.
5813 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5814 CALL_INSN - and it does not set CC0.
5815 But don't do this if we cannot directly address the
5816 memory location, since this will make it harder to
5817 reuse address reloads, and increases register pressure.
5818 Also don't do this if we can probably update x directly. */
5819 rtx equiv = (MEM_P (XEXP (x, 0))
5820 ? XEXP (x, 0)
5821 : reg_equiv_mem (regno));
5822 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5823 if (insn && NONJUMP_INSN_P (insn) && equiv
5824 && memory_operand (equiv, GET_MODE (equiv))
5825 #ifdef HAVE_cc0
5826 && ! sets_cc0_p (PATTERN (insn))
5827 #endif
5828 && ! (icode != CODE_FOR_nothing
5829 && insn_operand_matches (icode, 0, equiv)
5830 && insn_operand_matches (icode, 1, equiv)))
5831 {
5832 /* We use the original pseudo for loc, so that
5833 emit_reload_insns() knows which pseudo this
5834 reload refers to and updates the pseudo rtx, not
5835 its equivalent memory location, as well as the
5836 corresponding entry in reg_last_reload_reg. */
5837 loc = &XEXP (x_orig, 0);
5838 x = XEXP (x, 0);
5839 reloadnum
5840 = push_reload (x, x, loc, loc,
5841 context_reg_class,
5842 GET_MODE (x), GET_MODE (x), 0, 0,
5843 opnum, RELOAD_OTHER);
5844 }
5845 else
5846 {
5847 reloadnum
5848 = push_reload (x, x, loc, (rtx*) 0,
5849 context_reg_class,
5850 GET_MODE (x), GET_MODE (x), 0, 0,
5851 opnum, type);
5852 rld[reloadnum].inc
5853 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5854
5855 value = 1;
5856 }
5857
5858 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5859 reloadnum);
5860 }
5861 return value;
5862 }
5863 return 0;
5864
5865 case TRUNCATE:
5866 case SIGN_EXTEND:
5867 case ZERO_EXTEND:
5868 /* Look for parts to reload in the inner expression and reload them
5869 too, in addition to this operation. Reloading all inner parts in
5870 addition to this one shouldn't be necessary, but at this point,
5871 we don't know if we can possibly omit any part that *can* be
5872 reloaded. Targets that are better off reloading just either part
5873 (or perhaps even a different part of an outer expression), should
5874 define LEGITIMIZE_RELOAD_ADDRESS. */
5875 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), XEXP (x, 0),
5876 context, code, SCRATCH, &XEXP (x, 0), opnum,
5877 type, ind_levels, insn);
5878 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5879 context_reg_class,
5880 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5881 return 1;
5882
5883 case MEM:
5884 /* This is probably the result of a substitution, by eliminate_regs, of
5885 an equivalent address for a pseudo that was not allocated to a hard
5886 register. Verify that the specified address is valid and reload it
5887 into a register.
5888
5889 Since we know we are going to reload this item, don't decrement for
5890 the indirection level.
5891
5892 Note that this is actually conservative: it would be slightly more
5893 efficient to use the value of SPILL_INDIRECT_LEVELS from
5894 reload1.c here. */
5895
5896 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5897 opnum, ADDR_TYPE (type), ind_levels, insn);
5898 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5899 context_reg_class,
5900 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5901 return 1;
5902
5903 case REG:
5904 {
5905 int regno = REGNO (x);
5906
5907 if (reg_equiv_constant (regno) != 0)
5908 {
5909 find_reloads_address_part (reg_equiv_constant (regno), loc,
5910 context_reg_class,
5911 GET_MODE (x), opnum, type, ind_levels);
5912 return 1;
5913 }
5914
5915 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5916 that feeds this insn. */
5917 if (reg_equiv_mem (regno) != 0)
5918 {
5919 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5920 context_reg_class,
5921 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5922 return 1;
5923 }
5924 #endif
5925
5926 if (reg_equiv_memory_loc (regno)
5927 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5928 {
5929 rtx tem = make_memloc (x, regno);
5930 if (reg_equiv_address (regno) != 0
5931 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5932 {
5933 x = tem;
5934 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5935 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5936 ind_levels, insn);
5937 if (!rtx_equal_p (x, tem))
5938 push_reg_equiv_alt_mem (regno, x);
5939 }
5940 }
5941
5942 if (reg_renumber[regno] >= 0)
5943 regno = reg_renumber[regno];
5944
5945 if (regno >= FIRST_PSEUDO_REGISTER
5946 || !REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5947 index_code))
5948 {
5949 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5950 context_reg_class,
5951 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5952 return 1;
5953 }
5954
5955 /* If a register appearing in an address is the subject of a CLOBBER
5956 in this insn, reload it into some other register to be safe.
5957 The CLOBBER is supposed to make the register unavailable
5958 from before this insn to after it. */
5959 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5960 {
5961 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5962 context_reg_class,
5963 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5964 return 1;
5965 }
5966 }
5967 return 0;
5968
5969 case SUBREG:
5970 if (REG_P (SUBREG_REG (x)))
5971 {
5972 /* If this is a SUBREG of a hard register and the resulting register
5973 is of the wrong class, reload the whole SUBREG. This avoids
5974 needless copies if SUBREG_REG is multi-word. */
5975 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5976 {
5977 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5978
5979 if (!REG_OK_FOR_CONTEXT (context, regno, mode, outer_code,
5980 index_code))
5981 {
5982 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5983 context_reg_class,
5984 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5985 return 1;
5986 }
5987 }
5988 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5989 is larger than the class size, then reload the whole SUBREG. */
5990 else
5991 {
5992 enum reg_class rclass = context_reg_class;
5993 if ((unsigned) CLASS_MAX_NREGS (rclass, GET_MODE (SUBREG_REG (x)))
5994 > reg_class_size[rclass])
5995 {
5996 x = find_reloads_subreg_address (x, 0, opnum,
5997 ADDR_TYPE (type),
5998 ind_levels, insn, NULL);
5999 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6000 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6001 return 1;
6002 }
6003 }
6004 }
6005 break;
6006
6007 default:
6008 break;
6009 }
6010
6011 {
6012 const char *fmt = GET_RTX_FORMAT (code);
6013 int i;
6014
6015 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6016 {
6017 if (fmt[i] == 'e')
6018 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6019 we get here. */
6020 find_reloads_address_1 (mode, XEXP (x, i), context, code, SCRATCH,
6021 &XEXP (x, i), opnum, type, ind_levels, insn);
6022 }
6023 }
6024
6025 #undef REG_OK_FOR_CONTEXT
6026 return 0;
6027 }
6028 \f
6029 /* X, which is found at *LOC, is a part of an address that needs to be
6030 reloaded into a register of class RCLASS. If X is a constant, or if
6031 X is a PLUS that contains a constant, check that the constant is a
6032 legitimate operand and that we are supposed to be able to load
6033 it into the register.
6034
6035 If not, force the constant into memory and reload the MEM instead.
6036
6037 MODE is the mode to use, in case X is an integer constant.
6038
6039 OPNUM and TYPE describe the purpose of any reloads made.
6040
6041 IND_LEVELS says how many levels of indirect addressing this machine
6042 supports. */
6043
6044 static void
6045 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6046 enum machine_mode mode, int opnum,
6047 enum reload_type type, int ind_levels)
6048 {
6049 if (CONSTANT_P (x)
6050 && (! LEGITIMATE_CONSTANT_P (x)
6051 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6052 {
6053 x = force_const_mem (mode, x);
6054 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6055 opnum, type, ind_levels, 0);
6056 }
6057
6058 else if (GET_CODE (x) == PLUS
6059 && CONSTANT_P (XEXP (x, 1))
6060 && (! LEGITIMATE_CONSTANT_P (XEXP (x, 1))
6061 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6062 == NO_REGS))
6063 {
6064 rtx tem;
6065
6066 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6067 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6068 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6069 opnum, type, ind_levels, 0);
6070 }
6071
6072 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6073 mode, VOIDmode, 0, 0, opnum, type);
6074 }
6075 \f
6076 /* X, a subreg of a pseudo, is a part of an address that needs to be
6077 reloaded.
6078
6079 If the pseudo is equivalent to a memory location that cannot be directly
6080 addressed, make the necessary address reloads.
6081
6082 If address reloads have been necessary, or if the address is changed
6083 by register elimination, return the rtx of the memory location;
6084 otherwise, return X.
6085
6086 If FORCE_REPLACE is nonzero, unconditionally replace the subreg with the
6087 memory location.
6088
6089 OPNUM and TYPE identify the purpose of the reload.
6090
6091 IND_LEVELS says how many levels of indirect addressing are
6092 supported at this point in the address.
6093
6094 INSN, if nonzero, is the insn in which we do the reload. It is used
6095 to determine where to put USEs for pseudos that we have to replace with
6096 stack slots. */
6097
6098 static rtx
6099 find_reloads_subreg_address (rtx x, int force_replace, int opnum,
6100 enum reload_type type, int ind_levels, rtx insn,
6101 int *address_reloaded)
6102 {
6103 int regno = REGNO (SUBREG_REG (x));
6104 int reloaded = 0;
6105
6106 if (reg_equiv_memory_loc (regno))
6107 {
6108 /* If the address is not directly addressable, or if the address is not
6109 offsettable, then it must be replaced. */
6110 if (! force_replace
6111 && (reg_equiv_address (regno)
6112 || ! offsettable_memref_p (reg_equiv_mem (regno))))
6113 force_replace = 1;
6114
6115 if (force_replace || num_not_at_initial_offset)
6116 {
6117 rtx tem = make_memloc (SUBREG_REG (x), regno);
6118
6119 /* If the address changes because of register elimination, then
6120 it must be replaced. */
6121 if (force_replace
6122 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
6123 {
6124 unsigned outer_size = GET_MODE_SIZE (GET_MODE (x));
6125 unsigned inner_size = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
6126 int offset;
6127 rtx orig = tem;
6128
6129 /* For big-endian paradoxical subregs, SUBREG_BYTE does not
6130 hold the correct (negative) byte offset. */
6131 if (BYTES_BIG_ENDIAN && outer_size > inner_size)
6132 offset = inner_size - outer_size;
6133 else
6134 offset = SUBREG_BYTE (x);
6135
6136 XEXP (tem, 0) = plus_constant (XEXP (tem, 0), offset);
6137 PUT_MODE (tem, GET_MODE (x));
6138 if (MEM_OFFSET (tem))
6139 set_mem_offset (tem, plus_constant (MEM_OFFSET (tem), offset));
6140 if (MEM_SIZE (tem)
6141 && INTVAL (MEM_SIZE (tem)) != (HOST_WIDE_INT) outer_size)
6142 set_mem_size (tem, GEN_INT (outer_size));
6143
6144 /* If this was a paradoxical subreg that we replaced, the
6145 resulting memory must be sufficiently aligned to allow
6146 us to widen the mode of the memory. */
6147 if (outer_size > inner_size)
6148 {
6149 rtx base;
6150
6151 base = XEXP (tem, 0);
6152 if (GET_CODE (base) == PLUS)
6153 {
6154 if (CONST_INT_P (XEXP (base, 1))
6155 && INTVAL (XEXP (base, 1)) % outer_size != 0)
6156 return x;
6157 base = XEXP (base, 0);
6158 }
6159 if (!REG_P (base)
6160 || (REGNO_POINTER_ALIGN (REGNO (base))
6161 < outer_size * BITS_PER_UNIT))
6162 return x;
6163 }
6164
6165 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6166 XEXP (tem, 0), &XEXP (tem, 0),
6167 opnum, type, ind_levels, insn);
6168 /* ??? Do we need to handle nonzero offsets somehow? */
6169 if (!offset && !rtx_equal_p (tem, orig))
6170 push_reg_equiv_alt_mem (regno, tem);
6171
6172 /* For some processors an address may be valid in the
6173 original mode but not in a smaller mode. For
6174 example, ARM accepts a scaled index register in
6175 SImode but not in HImode. Note that this is only
6176 a problem if the address in reg_equiv_mem is already
6177 invalid in the new mode; other cases would be fixed
6178 by find_reloads_address as usual.
6179
6180 ??? We attempt to handle such cases here by doing an
6181 additional reload of the full address after the
6182 usual processing by find_reloads_address. Note that
6183 this may not work in the general case, but it seems
6184 to cover the cases where this situation currently
6185 occurs. A more general fix might be to reload the
6186 *value* instead of the address, but this would not
6187 be expected by the callers of this routine as-is.
6188
6189 If find_reloads_address already completed replaced
6190 the address, there is nothing further to do. */
6191 if (reloaded == 0
6192 && reg_equiv_mem (regno) != 0
6193 && !strict_memory_address_addr_space_p
6194 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6195 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6196 {
6197 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6198 base_reg_class (GET_MODE (tem), MEM, SCRATCH),
6199 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0,
6200 opnum, type);
6201 reloaded = 1;
6202 }
6203 /* If this is not a toplevel operand, find_reloads doesn't see
6204 this substitution. We have to emit a USE of the pseudo so
6205 that delete_output_reload can see it. */
6206 if (replace_reloads && recog_data.operand[opnum] != x)
6207 /* We mark the USE with QImode so that we recognize it
6208 as one that can be safely deleted at the end of
6209 reload. */
6210 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode,
6211 SUBREG_REG (x)),
6212 insn), QImode);
6213 x = tem;
6214 }
6215 }
6216 }
6217 if (reloaded && address_reloaded)
6218 *address_reloaded = 1;
6219
6220 return x;
6221 }
6222 \f
6223 /* Substitute into the current INSN the registers into which we have reloaded
6224 the things that need reloading. The array `replacements'
6225 contains the locations of all pointers that must be changed
6226 and says what to replace them with.
6227
6228 Return the rtx that X translates into; usually X, but modified. */
6229
6230 void
6231 subst_reloads (rtx insn)
6232 {
6233 int i;
6234
6235 for (i = 0; i < n_replacements; i++)
6236 {
6237 struct replacement *r = &replacements[i];
6238 rtx reloadreg = rld[r->what].reg_rtx;
6239 if (reloadreg)
6240 {
6241 #ifdef DEBUG_RELOAD
6242 /* This checking takes a very long time on some platforms
6243 causing the gcc.c-torture/compile/limits-fnargs.c test
6244 to time out during testing. See PR 31850.
6245
6246 Internal consistency test. Check that we don't modify
6247 anything in the equivalence arrays. Whenever something from
6248 those arrays needs to be reloaded, it must be unshared before
6249 being substituted into; the equivalence must not be modified.
6250 Otherwise, if the equivalence is used after that, it will
6251 have been modified, and the thing substituted (probably a
6252 register) is likely overwritten and not a usable equivalence. */
6253 int check_regno;
6254
6255 for (check_regno = 0; check_regno < max_regno; check_regno++)
6256 {
6257 #define CHECK_MODF(ARRAY) \
6258 gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
6259 || !loc_mentioned_in_p (r->where, \
6260 VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
6261
6262 CHECK_MODF (equiv_constant);
6263 CHECK_MODF (equiv_memory_loc);
6264 CHECK_MODF (equiv_address);
6265 CHECK_MODF (equiv_mem);
6266 #undef CHECK_MODF
6267 }
6268 #endif /* DEBUG_RELOAD */
6269
6270 /* If we're replacing a LABEL_REF with a register, there must
6271 already be an indication (to e.g. flow) which label this
6272 register refers to. */
6273 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6274 || !JUMP_P (insn)
6275 || find_reg_note (insn,
6276 REG_LABEL_OPERAND,
6277 XEXP (*r->where, 0))
6278 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6279
6280 /* Encapsulate RELOADREG so its machine mode matches what
6281 used to be there. Note that gen_lowpart_common will
6282 do the wrong thing if RELOADREG is multi-word. RELOADREG
6283 will always be a REG here. */
6284 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6285 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6286
6287 /* If we are putting this into a SUBREG and RELOADREG is a
6288 SUBREG, we would be making nested SUBREGs, so we have to fix
6289 this up. Note that r->where == &SUBREG_REG (*r->subreg_loc). */
6290
6291 if (r->subreg_loc != 0 && GET_CODE (reloadreg) == SUBREG)
6292 {
6293 if (GET_MODE (*r->subreg_loc)
6294 == GET_MODE (SUBREG_REG (reloadreg)))
6295 *r->subreg_loc = SUBREG_REG (reloadreg);
6296 else
6297 {
6298 int final_offset =
6299 SUBREG_BYTE (*r->subreg_loc) + SUBREG_BYTE (reloadreg);
6300
6301 /* When working with SUBREGs the rule is that the byte
6302 offset must be a multiple of the SUBREG's mode. */
6303 final_offset = (final_offset /
6304 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6305 final_offset = (final_offset *
6306 GET_MODE_SIZE (GET_MODE (*r->subreg_loc)));
6307
6308 *r->where = SUBREG_REG (reloadreg);
6309 SUBREG_BYTE (*r->subreg_loc) = final_offset;
6310 }
6311 }
6312 else
6313 *r->where = reloadreg;
6314 }
6315 /* If reload got no reg and isn't optional, something's wrong. */
6316 else
6317 gcc_assert (rld[r->what].optional);
6318 }
6319 }
6320 \f
6321 /* Make a copy of any replacements being done into X and move those
6322 copies to locations in Y, a copy of X. */
6323
6324 void
6325 copy_replacements (rtx x, rtx y)
6326 {
6327 /* We can't support X being a SUBREG because we might then need to know its
6328 location if something inside it was replaced. */
6329 gcc_assert (GET_CODE (x) != SUBREG);
6330
6331 copy_replacements_1 (&x, &y, n_replacements);
6332 }
6333
6334 static void
6335 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6336 {
6337 int i, j;
6338 rtx x, y;
6339 struct replacement *r;
6340 enum rtx_code code;
6341 const char *fmt;
6342
6343 for (j = 0; j < orig_replacements; j++)
6344 {
6345 if (replacements[j].subreg_loc == px)
6346 {
6347 r = &replacements[n_replacements++];
6348 r->where = replacements[j].where;
6349 r->subreg_loc = py;
6350 r->what = replacements[j].what;
6351 r->mode = replacements[j].mode;
6352 }
6353 else if (replacements[j].where == px)
6354 {
6355 r = &replacements[n_replacements++];
6356 r->where = py;
6357 r->subreg_loc = 0;
6358 r->what = replacements[j].what;
6359 r->mode = replacements[j].mode;
6360 }
6361 }
6362
6363 x = *px;
6364 y = *py;
6365 code = GET_CODE (x);
6366 fmt = GET_RTX_FORMAT (code);
6367
6368 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6369 {
6370 if (fmt[i] == 'e')
6371 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6372 else if (fmt[i] == 'E')
6373 for (j = XVECLEN (x, i); --j >= 0; )
6374 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6375 orig_replacements);
6376 }
6377 }
6378
6379 /* Change any replacements being done to *X to be done to *Y. */
6380
6381 void
6382 move_replacements (rtx *x, rtx *y)
6383 {
6384 int i;
6385
6386 for (i = 0; i < n_replacements; i++)
6387 if (replacements[i].subreg_loc == x)
6388 replacements[i].subreg_loc = y;
6389 else if (replacements[i].where == x)
6390 {
6391 replacements[i].where = y;
6392 replacements[i].subreg_loc = 0;
6393 }
6394 }
6395 \f
6396 /* If LOC was scheduled to be replaced by something, return the replacement.
6397 Otherwise, return *LOC. */
6398
6399 rtx
6400 find_replacement (rtx *loc)
6401 {
6402 struct replacement *r;
6403
6404 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6405 {
6406 rtx reloadreg = rld[r->what].reg_rtx;
6407
6408 if (reloadreg && r->where == loc)
6409 {
6410 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6411 reloadreg = gen_rtx_REG (r->mode, REGNO (reloadreg));
6412
6413 return reloadreg;
6414 }
6415 else if (reloadreg && r->subreg_loc == loc)
6416 {
6417 /* RELOADREG must be either a REG or a SUBREG.
6418
6419 ??? Is it actually still ever a SUBREG? If so, why? */
6420
6421 if (REG_P (reloadreg))
6422 return gen_rtx_REG (GET_MODE (*loc),
6423 (REGNO (reloadreg) +
6424 subreg_regno_offset (REGNO (SUBREG_REG (*loc)),
6425 GET_MODE (SUBREG_REG (*loc)),
6426 SUBREG_BYTE (*loc),
6427 GET_MODE (*loc))));
6428 else if (GET_MODE (reloadreg) == GET_MODE (*loc))
6429 return reloadreg;
6430 else
6431 {
6432 int final_offset = SUBREG_BYTE (reloadreg) + SUBREG_BYTE (*loc);
6433
6434 /* When working with SUBREGs the rule is that the byte
6435 offset must be a multiple of the SUBREG's mode. */
6436 final_offset = (final_offset / GET_MODE_SIZE (GET_MODE (*loc)));
6437 final_offset = (final_offset * GET_MODE_SIZE (GET_MODE (*loc)));
6438 return gen_rtx_SUBREG (GET_MODE (*loc), SUBREG_REG (reloadreg),
6439 final_offset);
6440 }
6441 }
6442 }
6443
6444 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6445 what's inside and make a new rtl if so. */
6446 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6447 || GET_CODE (*loc) == MULT)
6448 {
6449 rtx x = find_replacement (&XEXP (*loc, 0));
6450 rtx y = find_replacement (&XEXP (*loc, 1));
6451
6452 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6453 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6454 }
6455
6456 return *loc;
6457 }
6458 \f
6459 /* Return nonzero if register in range [REGNO, ENDREGNO)
6460 appears either explicitly or implicitly in X
6461 other than being stored into (except for earlyclobber operands).
6462
6463 References contained within the substructure at LOC do not count.
6464 LOC may be zero, meaning don't ignore anything.
6465
6466 This is similar to refers_to_regno_p in rtlanal.c except that we
6467 look at equivalences for pseudos that didn't get hard registers. */
6468
6469 static int
6470 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6471 rtx x, rtx *loc)
6472 {
6473 int i;
6474 unsigned int r;
6475 RTX_CODE code;
6476 const char *fmt;
6477
6478 if (x == 0)
6479 return 0;
6480
6481 repeat:
6482 code = GET_CODE (x);
6483
6484 switch (code)
6485 {
6486 case REG:
6487 r = REGNO (x);
6488
6489 /* If this is a pseudo, a hard register must not have been allocated.
6490 X must therefore either be a constant or be in memory. */
6491 if (r >= FIRST_PSEUDO_REGISTER)
6492 {
6493 if (reg_equiv_memory_loc (r))
6494 return refers_to_regno_for_reload_p (regno, endregno,
6495 reg_equiv_memory_loc (r),
6496 (rtx*) 0);
6497
6498 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6499 return 0;
6500 }
6501
6502 return (endregno > r
6503 && regno < r + (r < FIRST_PSEUDO_REGISTER
6504 ? hard_regno_nregs[r][GET_MODE (x)]
6505 : 1));
6506
6507 case SUBREG:
6508 /* If this is a SUBREG of a hard reg, we can see exactly which
6509 registers are being modified. Otherwise, handle normally. */
6510 if (REG_P (SUBREG_REG (x))
6511 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6512 {
6513 unsigned int inner_regno = subreg_regno (x);
6514 unsigned int inner_endregno
6515 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6516 ? subreg_nregs (x) : 1);
6517
6518 return endregno > inner_regno && regno < inner_endregno;
6519 }
6520 break;
6521
6522 case CLOBBER:
6523 case SET:
6524 if (&SET_DEST (x) != loc
6525 /* Note setting a SUBREG counts as referring to the REG it is in for
6526 a pseudo but not for hard registers since we can
6527 treat each word individually. */
6528 && ((GET_CODE (SET_DEST (x)) == SUBREG
6529 && loc != &SUBREG_REG (SET_DEST (x))
6530 && REG_P (SUBREG_REG (SET_DEST (x)))
6531 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6532 && refers_to_regno_for_reload_p (regno, endregno,
6533 SUBREG_REG (SET_DEST (x)),
6534 loc))
6535 /* If the output is an earlyclobber operand, this is
6536 a conflict. */
6537 || ((!REG_P (SET_DEST (x))
6538 || earlyclobber_operand_p (SET_DEST (x)))
6539 && refers_to_regno_for_reload_p (regno, endregno,
6540 SET_DEST (x), loc))))
6541 return 1;
6542
6543 if (code == CLOBBER || loc == &SET_SRC (x))
6544 return 0;
6545 x = SET_SRC (x);
6546 goto repeat;
6547
6548 default:
6549 break;
6550 }
6551
6552 /* X does not match, so try its subexpressions. */
6553
6554 fmt = GET_RTX_FORMAT (code);
6555 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6556 {
6557 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6558 {
6559 if (i == 0)
6560 {
6561 x = XEXP (x, 0);
6562 goto repeat;
6563 }
6564 else
6565 if (refers_to_regno_for_reload_p (regno, endregno,
6566 XEXP (x, i), loc))
6567 return 1;
6568 }
6569 else if (fmt[i] == 'E')
6570 {
6571 int j;
6572 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6573 if (loc != &XVECEXP (x, i, j)
6574 && refers_to_regno_for_reload_p (regno, endregno,
6575 XVECEXP (x, i, j), loc))
6576 return 1;
6577 }
6578 }
6579 return 0;
6580 }
6581
6582 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6583 we check if any register number in X conflicts with the relevant register
6584 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6585 contains a MEM (we don't bother checking for memory addresses that can't
6586 conflict because we expect this to be a rare case.
6587
6588 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6589 that we look at equivalences for pseudos that didn't get hard registers. */
6590
6591 int
6592 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6593 {
6594 int regno, endregno;
6595
6596 /* Overly conservative. */
6597 if (GET_CODE (x) == STRICT_LOW_PART
6598 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6599 x = XEXP (x, 0);
6600
6601 /* If either argument is a constant, then modifying X can not affect IN. */
6602 if (CONSTANT_P (x) || CONSTANT_P (in))
6603 return 0;
6604 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6605 return refers_to_mem_for_reload_p (in);
6606 else if (GET_CODE (x) == SUBREG)
6607 {
6608 regno = REGNO (SUBREG_REG (x));
6609 if (regno < FIRST_PSEUDO_REGISTER)
6610 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6611 GET_MODE (SUBREG_REG (x)),
6612 SUBREG_BYTE (x),
6613 GET_MODE (x));
6614 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6615 ? subreg_nregs (x) : 1);
6616
6617 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6618 }
6619 else if (REG_P (x))
6620 {
6621 regno = REGNO (x);
6622
6623 /* If this is a pseudo, it must not have been assigned a hard register.
6624 Therefore, it must either be in memory or be a constant. */
6625
6626 if (regno >= FIRST_PSEUDO_REGISTER)
6627 {
6628 if (reg_equiv_memory_loc (regno))
6629 return refers_to_mem_for_reload_p (in);
6630 gcc_assert (reg_equiv_constant (regno));
6631 return 0;
6632 }
6633
6634 endregno = END_HARD_REGNO (x);
6635
6636 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6637 }
6638 else if (MEM_P (x))
6639 return refers_to_mem_for_reload_p (in);
6640 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6641 || GET_CODE (x) == CC0)
6642 return reg_mentioned_p (x, in);
6643 else
6644 {
6645 gcc_assert (GET_CODE (x) == PLUS);
6646
6647 /* We actually want to know if X is mentioned somewhere inside IN.
6648 We must not say that (plus (sp) (const_int 124)) is in
6649 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6650 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6651 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6652 while (MEM_P (in))
6653 in = XEXP (in, 0);
6654 if (REG_P (in))
6655 return 0;
6656 else if (GET_CODE (in) == PLUS)
6657 return (rtx_equal_p (x, in)
6658 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6659 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6660 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6661 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6662 }
6663
6664 gcc_unreachable ();
6665 }
6666
6667 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6668 registers. */
6669
6670 static int
6671 refers_to_mem_for_reload_p (rtx x)
6672 {
6673 const char *fmt;
6674 int i;
6675
6676 if (MEM_P (x))
6677 return 1;
6678
6679 if (REG_P (x))
6680 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6681 && reg_equiv_memory_loc (REGNO (x)));
6682
6683 fmt = GET_RTX_FORMAT (GET_CODE (x));
6684 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6685 if (fmt[i] == 'e'
6686 && (MEM_P (XEXP (x, i))
6687 || refers_to_mem_for_reload_p (XEXP (x, i))))
6688 return 1;
6689
6690 return 0;
6691 }
6692 \f
6693 /* Check the insns before INSN to see if there is a suitable register
6694 containing the same value as GOAL.
6695 If OTHER is -1, look for a register in class RCLASS.
6696 Otherwise, just see if register number OTHER shares GOAL's value.
6697
6698 Return an rtx for the register found, or zero if none is found.
6699
6700 If RELOAD_REG_P is (short *)1,
6701 we reject any hard reg that appears in reload_reg_rtx
6702 because such a hard reg is also needed coming into this insn.
6703
6704 If RELOAD_REG_P is any other nonzero value,
6705 it is a vector indexed by hard reg number
6706 and we reject any hard reg whose element in the vector is nonnegative
6707 as well as any that appears in reload_reg_rtx.
6708
6709 If GOAL is zero, then GOALREG is a register number; we look
6710 for an equivalent for that register.
6711
6712 MODE is the machine mode of the value we want an equivalence for.
6713 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6714
6715 This function is used by jump.c as well as in the reload pass.
6716
6717 If GOAL is the sum of the stack pointer and a constant, we treat it
6718 as if it were a constant except that sp is required to be unchanging. */
6719
6720 rtx
6721 find_equiv_reg (rtx goal, rtx insn, enum reg_class rclass, int other,
6722 short *reload_reg_p, int goalreg, enum machine_mode mode)
6723 {
6724 rtx p = insn;
6725 rtx goaltry, valtry, value, where;
6726 rtx pat;
6727 int regno = -1;
6728 int valueno;
6729 int goal_mem = 0;
6730 int goal_const = 0;
6731 int goal_mem_addr_varies = 0;
6732 int need_stable_sp = 0;
6733 int nregs;
6734 int valuenregs;
6735 int num = 0;
6736
6737 if (goal == 0)
6738 regno = goalreg;
6739 else if (REG_P (goal))
6740 regno = REGNO (goal);
6741 else if (MEM_P (goal))
6742 {
6743 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6744 if (MEM_VOLATILE_P (goal))
6745 return 0;
6746 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6747 return 0;
6748 /* An address with side effects must be reexecuted. */
6749 switch (code)
6750 {
6751 case POST_INC:
6752 case PRE_INC:
6753 case POST_DEC:
6754 case PRE_DEC:
6755 case POST_MODIFY:
6756 case PRE_MODIFY:
6757 return 0;
6758 default:
6759 break;
6760 }
6761 goal_mem = 1;
6762 }
6763 else if (CONSTANT_P (goal))
6764 goal_const = 1;
6765 else if (GET_CODE (goal) == PLUS
6766 && XEXP (goal, 0) == stack_pointer_rtx
6767 && CONSTANT_P (XEXP (goal, 1)))
6768 goal_const = need_stable_sp = 1;
6769 else if (GET_CODE (goal) == PLUS
6770 && XEXP (goal, 0) == frame_pointer_rtx
6771 && CONSTANT_P (XEXP (goal, 1)))
6772 goal_const = 1;
6773 else
6774 return 0;
6775
6776 num = 0;
6777 /* Scan insns back from INSN, looking for one that copies
6778 a value into or out of GOAL.
6779 Stop and give up if we reach a label. */
6780
6781 while (1)
6782 {
6783 p = PREV_INSN (p);
6784 if (p && DEBUG_INSN_P (p))
6785 continue;
6786 num++;
6787 if (p == 0 || LABEL_P (p)
6788 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6789 return 0;
6790
6791 if (NONJUMP_INSN_P (p)
6792 /* If we don't want spill regs ... */
6793 && (! (reload_reg_p != 0
6794 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6795 /* ... then ignore insns introduced by reload; they aren't
6796 useful and can cause results in reload_as_needed to be
6797 different from what they were when calculating the need for
6798 spills. If we notice an input-reload insn here, we will
6799 reject it below, but it might hide a usable equivalent.
6800 That makes bad code. It may even fail: perhaps no reg was
6801 spilled for this insn because it was assumed we would find
6802 that equivalent. */
6803 || INSN_UID (p) < reload_first_uid))
6804 {
6805 rtx tem;
6806 pat = single_set (p);
6807
6808 /* First check for something that sets some reg equal to GOAL. */
6809 if (pat != 0
6810 && ((regno >= 0
6811 && true_regnum (SET_SRC (pat)) == regno
6812 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6813 ||
6814 (regno >= 0
6815 && true_regnum (SET_DEST (pat)) == regno
6816 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6817 ||
6818 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6819 /* When looking for stack pointer + const,
6820 make sure we don't use a stack adjust. */
6821 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6822 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6823 || (goal_mem
6824 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6825 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6826 || (goal_mem
6827 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6828 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6829 /* If we are looking for a constant,
6830 and something equivalent to that constant was copied
6831 into a reg, we can use that reg. */
6832 || (goal_const && REG_NOTES (p) != 0
6833 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6834 && ((rtx_equal_p (XEXP (tem, 0), goal)
6835 && (valueno
6836 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6837 || (REG_P (SET_DEST (pat))
6838 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6839 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6840 && CONST_INT_P (goal)
6841 && 0 != (goaltry
6842 = operand_subword (XEXP (tem, 0), 0, 0,
6843 VOIDmode))
6844 && rtx_equal_p (goal, goaltry)
6845 && (valtry
6846 = operand_subword (SET_DEST (pat), 0, 0,
6847 VOIDmode))
6848 && (valueno = true_regnum (valtry)) >= 0)))
6849 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6850 NULL_RTX))
6851 && REG_P (SET_DEST (pat))
6852 && GET_CODE (XEXP (tem, 0)) == CONST_DOUBLE
6853 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6854 && CONST_INT_P (goal)
6855 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6856 VOIDmode))
6857 && rtx_equal_p (goal, goaltry)
6858 && (valtry
6859 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6860 && (valueno = true_regnum (valtry)) >= 0)))
6861 {
6862 if (other >= 0)
6863 {
6864 if (valueno != other)
6865 continue;
6866 }
6867 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6868 continue;
6869 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6870 mode, valueno))
6871 continue;
6872 value = valtry;
6873 where = p;
6874 break;
6875 }
6876 }
6877 }
6878
6879 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6880 (or copying VALUE into GOAL, if GOAL is also a register).
6881 Now verify that VALUE is really valid. */
6882
6883 /* VALUENO is the register number of VALUE; a hard register. */
6884
6885 /* Don't try to re-use something that is killed in this insn. We want
6886 to be able to trust REG_UNUSED notes. */
6887 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6888 return 0;
6889
6890 /* If we propose to get the value from the stack pointer or if GOAL is
6891 a MEM based on the stack pointer, we need a stable SP. */
6892 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6893 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6894 goal)))
6895 need_stable_sp = 1;
6896
6897 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6898 if (GET_MODE (value) != mode)
6899 return 0;
6900
6901 /* Reject VALUE if it was loaded from GOAL
6902 and is also a register that appears in the address of GOAL. */
6903
6904 if (goal_mem && value == SET_DEST (single_set (where))
6905 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6906 goal, (rtx*) 0))
6907 return 0;
6908
6909 /* Reject registers that overlap GOAL. */
6910
6911 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6912 nregs = hard_regno_nregs[regno][mode];
6913 else
6914 nregs = 1;
6915 valuenregs = hard_regno_nregs[valueno][mode];
6916
6917 if (!goal_mem && !goal_const
6918 && regno + nregs > valueno && regno < valueno + valuenregs)
6919 return 0;
6920
6921 /* Reject VALUE if it is one of the regs reserved for reloads.
6922 Reload1 knows how to reuse them anyway, and it would get
6923 confused if we allocated one without its knowledge.
6924 (Now that insns introduced by reload are ignored above,
6925 this case shouldn't happen, but I'm not positive.) */
6926
6927 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6928 {
6929 int i;
6930 for (i = 0; i < valuenregs; ++i)
6931 if (reload_reg_p[valueno + i] >= 0)
6932 return 0;
6933 }
6934
6935 /* Reject VALUE if it is a register being used for an input reload
6936 even if it is not one of those reserved. */
6937
6938 if (reload_reg_p != 0)
6939 {
6940 int i;
6941 for (i = 0; i < n_reloads; i++)
6942 if (rld[i].reg_rtx != 0 && rld[i].in)
6943 {
6944 int regno1 = REGNO (rld[i].reg_rtx);
6945 int nregs1 = hard_regno_nregs[regno1]
6946 [GET_MODE (rld[i].reg_rtx)];
6947 if (regno1 < valueno + valuenregs
6948 && regno1 + nregs1 > valueno)
6949 return 0;
6950 }
6951 }
6952
6953 if (goal_mem)
6954 /* We must treat frame pointer as varying here,
6955 since it can vary--in a nonlocal goto as generated by expand_goto. */
6956 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6957
6958 /* Now verify that the values of GOAL and VALUE remain unaltered
6959 until INSN is reached. */
6960
6961 p = insn;
6962 while (1)
6963 {
6964 p = PREV_INSN (p);
6965 if (p == where)
6966 return value;
6967
6968 /* Don't trust the conversion past a function call
6969 if either of the two is in a call-clobbered register, or memory. */
6970 if (CALL_P (p))
6971 {
6972 int i;
6973
6974 if (goal_mem || need_stable_sp)
6975 return 0;
6976
6977 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6978 for (i = 0; i < nregs; ++i)
6979 if (call_used_regs[regno + i]
6980 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6981 return 0;
6982
6983 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6984 for (i = 0; i < valuenregs; ++i)
6985 if (call_used_regs[valueno + i]
6986 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6987 return 0;
6988 }
6989
6990 if (INSN_P (p))
6991 {
6992 pat = PATTERN (p);
6993
6994 /* Watch out for unspec_volatile, and volatile asms. */
6995 if (volatile_insn_p (pat))
6996 return 0;
6997
6998 /* If this insn P stores in either GOAL or VALUE, return 0.
6999 If GOAL is a memory ref and this insn writes memory, return 0.
7000 If GOAL is a memory ref and its address is not constant,
7001 and this insn P changes a register used in GOAL, return 0. */
7002
7003 if (GET_CODE (pat) == COND_EXEC)
7004 pat = COND_EXEC_CODE (pat);
7005 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
7006 {
7007 rtx dest = SET_DEST (pat);
7008 while (GET_CODE (dest) == SUBREG
7009 || GET_CODE (dest) == ZERO_EXTRACT
7010 || GET_CODE (dest) == STRICT_LOW_PART)
7011 dest = XEXP (dest, 0);
7012 if (REG_P (dest))
7013 {
7014 int xregno = REGNO (dest);
7015 int xnregs;
7016 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7017 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7018 else
7019 xnregs = 1;
7020 if (xregno < regno + nregs && xregno + xnregs > regno)
7021 return 0;
7022 if (xregno < valueno + valuenregs
7023 && xregno + xnregs > valueno)
7024 return 0;
7025 if (goal_mem_addr_varies
7026 && reg_overlap_mentioned_for_reload_p (dest, goal))
7027 return 0;
7028 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7029 return 0;
7030 }
7031 else if (goal_mem && MEM_P (dest)
7032 && ! push_operand (dest, GET_MODE (dest)))
7033 return 0;
7034 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7035 && reg_equiv_memory_loc (regno) != 0)
7036 return 0;
7037 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7038 return 0;
7039 }
7040 else if (GET_CODE (pat) == PARALLEL)
7041 {
7042 int i;
7043 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7044 {
7045 rtx v1 = XVECEXP (pat, 0, i);
7046 if (GET_CODE (v1) == COND_EXEC)
7047 v1 = COND_EXEC_CODE (v1);
7048 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7049 {
7050 rtx dest = SET_DEST (v1);
7051 while (GET_CODE (dest) == SUBREG
7052 || GET_CODE (dest) == ZERO_EXTRACT
7053 || GET_CODE (dest) == STRICT_LOW_PART)
7054 dest = XEXP (dest, 0);
7055 if (REG_P (dest))
7056 {
7057 int xregno = REGNO (dest);
7058 int xnregs;
7059 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7060 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7061 else
7062 xnregs = 1;
7063 if (xregno < regno + nregs
7064 && xregno + xnregs > regno)
7065 return 0;
7066 if (xregno < valueno + valuenregs
7067 && xregno + xnregs > valueno)
7068 return 0;
7069 if (goal_mem_addr_varies
7070 && reg_overlap_mentioned_for_reload_p (dest,
7071 goal))
7072 return 0;
7073 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7074 return 0;
7075 }
7076 else if (goal_mem && MEM_P (dest)
7077 && ! push_operand (dest, GET_MODE (dest)))
7078 return 0;
7079 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7080 && reg_equiv_memory_loc (regno) != 0)
7081 return 0;
7082 else if (need_stable_sp
7083 && push_operand (dest, GET_MODE (dest)))
7084 return 0;
7085 }
7086 }
7087 }
7088
7089 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7090 {
7091 rtx link;
7092
7093 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7094 link = XEXP (link, 1))
7095 {
7096 pat = XEXP (link, 0);
7097 if (GET_CODE (pat) == CLOBBER)
7098 {
7099 rtx dest = SET_DEST (pat);
7100
7101 if (REG_P (dest))
7102 {
7103 int xregno = REGNO (dest);
7104 int xnregs
7105 = hard_regno_nregs[xregno][GET_MODE (dest)];
7106
7107 if (xregno < regno + nregs
7108 && xregno + xnregs > regno)
7109 return 0;
7110 else if (xregno < valueno + valuenregs
7111 && xregno + xnregs > valueno)
7112 return 0;
7113 else if (goal_mem_addr_varies
7114 && reg_overlap_mentioned_for_reload_p (dest,
7115 goal))
7116 return 0;
7117 }
7118
7119 else if (goal_mem && MEM_P (dest)
7120 && ! push_operand (dest, GET_MODE (dest)))
7121 return 0;
7122 else if (need_stable_sp
7123 && push_operand (dest, GET_MODE (dest)))
7124 return 0;
7125 }
7126 }
7127 }
7128
7129 #ifdef AUTO_INC_DEC
7130 /* If this insn auto-increments or auto-decrements
7131 either regno or valueno, return 0 now.
7132 If GOAL is a memory ref and its address is not constant,
7133 and this insn P increments a register used in GOAL, return 0. */
7134 {
7135 rtx link;
7136
7137 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7138 if (REG_NOTE_KIND (link) == REG_INC
7139 && REG_P (XEXP (link, 0)))
7140 {
7141 int incno = REGNO (XEXP (link, 0));
7142 if (incno < regno + nregs && incno >= regno)
7143 return 0;
7144 if (incno < valueno + valuenregs && incno >= valueno)
7145 return 0;
7146 if (goal_mem_addr_varies
7147 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7148 goal))
7149 return 0;
7150 }
7151 }
7152 #endif
7153 }
7154 }
7155 }
7156 \f
7157 /* Find a place where INCED appears in an increment or decrement operator
7158 within X, and return the amount INCED is incremented or decremented by.
7159 The value is always positive. */
7160
7161 static int
7162 find_inc_amount (rtx x, rtx inced)
7163 {
7164 enum rtx_code code = GET_CODE (x);
7165 const char *fmt;
7166 int i;
7167
7168 if (code == MEM)
7169 {
7170 rtx addr = XEXP (x, 0);
7171 if ((GET_CODE (addr) == PRE_DEC
7172 || GET_CODE (addr) == POST_DEC
7173 || GET_CODE (addr) == PRE_INC
7174 || GET_CODE (addr) == POST_INC)
7175 && XEXP (addr, 0) == inced)
7176 return GET_MODE_SIZE (GET_MODE (x));
7177 else if ((GET_CODE (addr) == PRE_MODIFY
7178 || GET_CODE (addr) == POST_MODIFY)
7179 && GET_CODE (XEXP (addr, 1)) == PLUS
7180 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7181 && XEXP (addr, 0) == inced
7182 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7183 {
7184 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7185 return i < 0 ? -i : i;
7186 }
7187 }
7188
7189 fmt = GET_RTX_FORMAT (code);
7190 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7191 {
7192 if (fmt[i] == 'e')
7193 {
7194 int tem = find_inc_amount (XEXP (x, i), inced);
7195 if (tem != 0)
7196 return tem;
7197 }
7198 if (fmt[i] == 'E')
7199 {
7200 int j;
7201 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7202 {
7203 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7204 if (tem != 0)
7205 return tem;
7206 }
7207 }
7208 }
7209
7210 return 0;
7211 }
7212 \f
7213 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7214 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7215
7216 #ifdef AUTO_INC_DEC
7217 static int
7218 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7219 rtx insn)
7220 {
7221 rtx link;
7222
7223 gcc_assert (insn);
7224
7225 if (! INSN_P (insn))
7226 return 0;
7227
7228 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7229 if (REG_NOTE_KIND (link) == REG_INC)
7230 {
7231 unsigned int test = (int) REGNO (XEXP (link, 0));
7232 if (test >= regno && test < endregno)
7233 return 1;
7234 }
7235 return 0;
7236 }
7237 #else
7238
7239 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7240
7241 #endif
7242
7243 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7244 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7245 REG_INC. REGNO must refer to a hard register. */
7246
7247 int
7248 regno_clobbered_p (unsigned int regno, rtx insn, enum machine_mode mode,
7249 int sets)
7250 {
7251 unsigned int nregs, endregno;
7252
7253 /* regno must be a hard register. */
7254 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7255
7256 nregs = hard_regno_nregs[regno][mode];
7257 endregno = regno + nregs;
7258
7259 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7260 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7261 && REG_P (XEXP (PATTERN (insn), 0)))
7262 {
7263 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7264
7265 return test >= regno && test < endregno;
7266 }
7267
7268 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7269 return 1;
7270
7271 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7272 {
7273 int i = XVECLEN (PATTERN (insn), 0) - 1;
7274
7275 for (; i >= 0; i--)
7276 {
7277 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7278 if ((GET_CODE (elt) == CLOBBER
7279 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7280 && REG_P (XEXP (elt, 0)))
7281 {
7282 unsigned int test = REGNO (XEXP (elt, 0));
7283
7284 if (test >= regno && test < endregno)
7285 return 1;
7286 }
7287 if (sets == 2
7288 && reg_inc_found_and_valid_p (regno, endregno, elt))
7289 return 1;
7290 }
7291 }
7292
7293 return 0;
7294 }
7295
7296 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7297 rtx
7298 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7299 {
7300 int regno;
7301
7302 if (GET_MODE (reloadreg) == mode)
7303 return reloadreg;
7304
7305 regno = REGNO (reloadreg);
7306
7307 if (WORDS_BIG_ENDIAN)
7308 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7309 - (int) hard_regno_nregs[regno][mode];
7310
7311 return gen_rtx_REG (mode, regno);
7312 }
7313
7314 static const char *const reload_when_needed_name[] =
7315 {
7316 "RELOAD_FOR_INPUT",
7317 "RELOAD_FOR_OUTPUT",
7318 "RELOAD_FOR_INSN",
7319 "RELOAD_FOR_INPUT_ADDRESS",
7320 "RELOAD_FOR_INPADDR_ADDRESS",
7321 "RELOAD_FOR_OUTPUT_ADDRESS",
7322 "RELOAD_FOR_OUTADDR_ADDRESS",
7323 "RELOAD_FOR_OPERAND_ADDRESS",
7324 "RELOAD_FOR_OPADDR_ADDR",
7325 "RELOAD_OTHER",
7326 "RELOAD_FOR_OTHER_ADDRESS"
7327 };
7328
7329 /* These functions are used to print the variables set by 'find_reloads' */
7330
7331 DEBUG_FUNCTION void
7332 debug_reload_to_stream (FILE *f)
7333 {
7334 int r;
7335 const char *prefix;
7336
7337 if (! f)
7338 f = stderr;
7339 for (r = 0; r < n_reloads; r++)
7340 {
7341 fprintf (f, "Reload %d: ", r);
7342
7343 if (rld[r].in != 0)
7344 {
7345 fprintf (f, "reload_in (%s) = ",
7346 GET_MODE_NAME (rld[r].inmode));
7347 print_inline_rtx (f, rld[r].in, 24);
7348 fprintf (f, "\n\t");
7349 }
7350
7351 if (rld[r].out != 0)
7352 {
7353 fprintf (f, "reload_out (%s) = ",
7354 GET_MODE_NAME (rld[r].outmode));
7355 print_inline_rtx (f, rld[r].out, 24);
7356 fprintf (f, "\n\t");
7357 }
7358
7359 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7360
7361 fprintf (f, "%s (opnum = %d)",
7362 reload_when_needed_name[(int) rld[r].when_needed],
7363 rld[r].opnum);
7364
7365 if (rld[r].optional)
7366 fprintf (f, ", optional");
7367
7368 if (rld[r].nongroup)
7369 fprintf (f, ", nongroup");
7370
7371 if (rld[r].inc != 0)
7372 fprintf (f, ", inc by %d", rld[r].inc);
7373
7374 if (rld[r].nocombine)
7375 fprintf (f, ", can't combine");
7376
7377 if (rld[r].secondary_p)
7378 fprintf (f, ", secondary_reload_p");
7379
7380 if (rld[r].in_reg != 0)
7381 {
7382 fprintf (f, "\n\treload_in_reg: ");
7383 print_inline_rtx (f, rld[r].in_reg, 24);
7384 }
7385
7386 if (rld[r].out_reg != 0)
7387 {
7388 fprintf (f, "\n\treload_out_reg: ");
7389 print_inline_rtx (f, rld[r].out_reg, 24);
7390 }
7391
7392 if (rld[r].reg_rtx != 0)
7393 {
7394 fprintf (f, "\n\treload_reg_rtx: ");
7395 print_inline_rtx (f, rld[r].reg_rtx, 24);
7396 }
7397
7398 prefix = "\n\t";
7399 if (rld[r].secondary_in_reload != -1)
7400 {
7401 fprintf (f, "%ssecondary_in_reload = %d",
7402 prefix, rld[r].secondary_in_reload);
7403 prefix = ", ";
7404 }
7405
7406 if (rld[r].secondary_out_reload != -1)
7407 fprintf (f, "%ssecondary_out_reload = %d\n",
7408 prefix, rld[r].secondary_out_reload);
7409
7410 prefix = "\n\t";
7411 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7412 {
7413 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7414 insn_data[rld[r].secondary_in_icode].name);
7415 prefix = ", ";
7416 }
7417
7418 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7419 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7420 insn_data[rld[r].secondary_out_icode].name);
7421
7422 fprintf (f, "\n");
7423 }
7424 }
7425
7426 DEBUG_FUNCTION void
7427 debug_reload (void)
7428 {
7429 debug_reload_to_stream (stderr);
7430 }