]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload.c
5f560a941d3af21562840741087f00f27117316b
[thirdparty/gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "tm.h"
95 #include "rtl-error.h"
96 #include "tm_p.h"
97 #include "insn-config.h"
98 #include "expr.h"
99 #include "optabs.h"
100 #include "recog.h"
101 #include "df.h"
102 #include "reload.h"
103 #include "regs.h"
104 #include "addresses.h"
105 #include "hard-reg-set.h"
106 #include "flags.h"
107 #include "hashtab.h"
108 #include "hash-set.h"
109 #include "vec.h"
110 #include "machmode.h"
111 #include "input.h"
112 #include "function.h"
113 #include "params.h"
114 #include "target.h"
115 #include "ira.h"
116
117 /* True if X is a constant that can be forced into the constant pool.
118 MODE is the mode of the operand, or VOIDmode if not known. */
119 #define CONST_POOL_OK_P(MODE, X) \
120 ((MODE) != VOIDmode \
121 && CONSTANT_P (X) \
122 && GET_CODE (X) != HIGH \
123 && !targetm.cannot_force_const_mem (MODE, X))
124
125 /* True if C is a non-empty register class that has too few registers
126 to be safely used as a reload target class. */
127
128 static inline bool
129 small_register_class_p (reg_class_t rclass)
130 {
131 return (reg_class_size [(int) rclass] == 1
132 || (reg_class_size [(int) rclass] >= 1
133 && targetm.class_likely_spilled_p (rclass)));
134 }
135
136 \f
137 /* All reloads of the current insn are recorded here. See reload.h for
138 comments. */
139 int n_reloads;
140 struct reload rld[MAX_RELOADS];
141
142 /* All the "earlyclobber" operands of the current insn
143 are recorded here. */
144 int n_earlyclobbers;
145 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
146
147 int reload_n_operands;
148
149 /* Replacing reloads.
150
151 If `replace_reloads' is nonzero, then as each reload is recorded
152 an entry is made for it in the table `replacements'.
153 Then later `subst_reloads' can look through that table and
154 perform all the replacements needed. */
155
156 /* Nonzero means record the places to replace. */
157 static int replace_reloads;
158
159 /* Each replacement is recorded with a structure like this. */
160 struct replacement
161 {
162 rtx *where; /* Location to store in */
163 int what; /* which reload this is for */
164 enum machine_mode mode; /* mode it must have */
165 };
166
167 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
168
169 /* Number of replacements currently recorded. */
170 static int n_replacements;
171
172 /* Used to track what is modified by an operand. */
173 struct decomposition
174 {
175 int reg_flag; /* Nonzero if referencing a register. */
176 int safe; /* Nonzero if this can't conflict with anything. */
177 rtx base; /* Base address for MEM. */
178 HOST_WIDE_INT start; /* Starting offset or register number. */
179 HOST_WIDE_INT end; /* Ending offset or register number. */
180 };
181
182 #ifdef SECONDARY_MEMORY_NEEDED
183
184 /* Save MEMs needed to copy from one class of registers to another. One MEM
185 is used per mode, but normally only one or two modes are ever used.
186
187 We keep two versions, before and after register elimination. The one
188 after register elimination is record separately for each operand. This
189 is done in case the address is not valid to be sure that we separately
190 reload each. */
191
192 static rtx secondary_memlocs[NUM_MACHINE_MODES];
193 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
194 static int secondary_memlocs_elim_used = 0;
195 #endif
196
197 /* The instruction we are doing reloads for;
198 so we can test whether a register dies in it. */
199 static rtx_insn *this_insn;
200
201 /* Nonzero if this instruction is a user-specified asm with operands. */
202 static int this_insn_is_asm;
203
204 /* If hard_regs_live_known is nonzero,
205 we can tell which hard regs are currently live,
206 at least enough to succeed in choosing dummy reloads. */
207 static int hard_regs_live_known;
208
209 /* Indexed by hard reg number,
210 element is nonnegative if hard reg has been spilled.
211 This vector is passed to `find_reloads' as an argument
212 and is not changed here. */
213 static short *static_reload_reg_p;
214
215 /* Set to 1 in subst_reg_equivs if it changes anything. */
216 static int subst_reg_equivs_changed;
217
218 /* On return from push_reload, holds the reload-number for the OUT
219 operand, which can be different for that from the input operand. */
220 static int output_reloadnum;
221
222 /* Compare two RTX's. */
223 #define MATCHES(x, y) \
224 (x == y || (x != 0 && (REG_P (x) \
225 ? REG_P (y) && REGNO (x) == REGNO (y) \
226 : rtx_equal_p (x, y) && ! side_effects_p (x))))
227
228 /* Indicates if two reloads purposes are for similar enough things that we
229 can merge their reloads. */
230 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
231 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
232 || ((when1) == (when2) && (op1) == (op2)) \
233 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
234 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
236 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
237 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
238
239 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
240 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
241 ((when1) != (when2) \
242 || ! ((op1) == (op2) \
243 || (when1) == RELOAD_FOR_INPUT \
244 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
245 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
246
247 /* If we are going to reload an address, compute the reload type to
248 use. */
249 #define ADDR_TYPE(type) \
250 ((type) == RELOAD_FOR_INPUT_ADDRESS \
251 ? RELOAD_FOR_INPADDR_ADDRESS \
252 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
253 ? RELOAD_FOR_OUTADDR_ADDRESS \
254 : (type)))
255
256 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
257 enum machine_mode, enum reload_type,
258 enum insn_code *, secondary_reload_info *);
259 static enum reg_class find_valid_class (enum machine_mode, enum machine_mode,
260 int, unsigned int);
261 static void push_replacement (rtx *, int, enum machine_mode);
262 static void dup_replacements (rtx *, rtx *);
263 static void combine_reloads (void);
264 static int find_reusable_reload (rtx *, rtx, enum reg_class,
265 enum reload_type, int, int);
266 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, enum machine_mode,
267 enum machine_mode, reg_class_t, int, int);
268 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
269 static struct decomposition decompose (rtx);
270 static int immune_p (rtx, rtx, struct decomposition);
271 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
272 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
273 rtx_insn *, int *);
274 static rtx make_memloc (rtx, int);
275 static int maybe_memory_address_addr_space_p (enum machine_mode, rtx,
276 addr_space_t, rtx *);
277 static int find_reloads_address (enum machine_mode, rtx *, rtx, rtx *,
278 int, enum reload_type, int, rtx_insn *);
279 static rtx subst_reg_equivs (rtx, rtx_insn *);
280 static rtx subst_indexed_address (rtx);
281 static void update_auto_inc_notes (rtx_insn *, int, int);
282 static int find_reloads_address_1 (enum machine_mode, addr_space_t, rtx, int,
283 enum rtx_code, enum rtx_code, rtx *,
284 int, enum reload_type,int, rtx_insn *);
285 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
286 enum machine_mode, int,
287 enum reload_type, int);
288 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
289 int, rtx_insn *, int *);
290 static void copy_replacements_1 (rtx *, rtx *, int);
291 static int find_inc_amount (rtx, rtx);
292 static int refers_to_mem_for_reload_p (rtx);
293 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
294 rtx, rtx *);
295
296 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
297 list yet. */
298
299 static void
300 push_reg_equiv_alt_mem (int regno, rtx mem)
301 {
302 rtx it;
303
304 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
305 if (rtx_equal_p (XEXP (it, 0), mem))
306 return;
307
308 reg_equiv_alt_mem_list (regno)
309 = alloc_EXPR_LIST (REG_EQUIV, mem,
310 reg_equiv_alt_mem_list (regno));
311 }
312 \f
313 /* Determine if any secondary reloads are needed for loading (if IN_P is
314 nonzero) or storing (if IN_P is zero) X to or from a reload register of
315 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
316 are needed, push them.
317
318 Return the reload number of the secondary reload we made, or -1 if
319 we didn't need one. *PICODE is set to the insn_code to use if we do
320 need a secondary reload. */
321
322 static int
323 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
324 enum reg_class reload_class,
325 enum machine_mode reload_mode, enum reload_type type,
326 enum insn_code *picode, secondary_reload_info *prev_sri)
327 {
328 enum reg_class rclass = NO_REGS;
329 enum reg_class scratch_class;
330 enum machine_mode mode = reload_mode;
331 enum insn_code icode = CODE_FOR_nothing;
332 enum insn_code t_icode = CODE_FOR_nothing;
333 enum reload_type secondary_type;
334 int s_reload, t_reload = -1;
335 const char *scratch_constraint;
336 secondary_reload_info sri;
337
338 if (type == RELOAD_FOR_INPUT_ADDRESS
339 || type == RELOAD_FOR_OUTPUT_ADDRESS
340 || type == RELOAD_FOR_INPADDR_ADDRESS
341 || type == RELOAD_FOR_OUTADDR_ADDRESS)
342 secondary_type = type;
343 else
344 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
345
346 *picode = CODE_FOR_nothing;
347
348 /* If X is a paradoxical SUBREG, use the inner value to determine both the
349 mode and object being reloaded. */
350 if (paradoxical_subreg_p (x))
351 {
352 x = SUBREG_REG (x);
353 reload_mode = GET_MODE (x);
354 }
355
356 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
357 is still a pseudo-register by now, it *must* have an equivalent MEM
358 but we don't want to assume that), use that equivalent when seeing if
359 a secondary reload is needed since whether or not a reload is needed
360 might be sensitive to the form of the MEM. */
361
362 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
363 && reg_equiv_mem (REGNO (x)))
364 x = reg_equiv_mem (REGNO (x));
365
366 sri.icode = CODE_FOR_nothing;
367 sri.prev_sri = prev_sri;
368 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
369 reload_mode, &sri);
370 icode = (enum insn_code) sri.icode;
371
372 /* If we don't need any secondary registers, done. */
373 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
374 return -1;
375
376 if (rclass != NO_REGS)
377 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
378 reload_mode, type, &t_icode, &sri);
379
380 /* If we will be using an insn, the secondary reload is for a
381 scratch register. */
382
383 if (icode != CODE_FOR_nothing)
384 {
385 /* If IN_P is nonzero, the reload register will be the output in
386 operand 0. If IN_P is zero, the reload register will be the input
387 in operand 1. Outputs should have an initial "=", which we must
388 skip. */
389
390 /* ??? It would be useful to be able to handle only two, or more than
391 three, operands, but for now we can only handle the case of having
392 exactly three: output, input and one temp/scratch. */
393 gcc_assert (insn_data[(int) icode].n_operands == 3);
394
395 /* ??? We currently have no way to represent a reload that needs
396 an icode to reload from an intermediate tertiary reload register.
397 We should probably have a new field in struct reload to tag a
398 chain of scratch operand reloads onto. */
399 gcc_assert (rclass == NO_REGS);
400
401 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
402 gcc_assert (*scratch_constraint == '=');
403 scratch_constraint++;
404 if (*scratch_constraint == '&')
405 scratch_constraint++;
406 scratch_class = (reg_class_for_constraint
407 (lookup_constraint (scratch_constraint)));
408
409 rclass = scratch_class;
410 mode = insn_data[(int) icode].operand[2].mode;
411 }
412
413 /* This case isn't valid, so fail. Reload is allowed to use the same
414 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
415 in the case of a secondary register, we actually need two different
416 registers for correct code. We fail here to prevent the possibility of
417 silently generating incorrect code later.
418
419 The convention is that secondary input reloads are valid only if the
420 secondary_class is different from class. If you have such a case, you
421 can not use secondary reloads, you must work around the problem some
422 other way.
423
424 Allow this when a reload_in/out pattern is being used. I.e. assume
425 that the generated code handles this case. */
426
427 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
428 || t_icode != CODE_FOR_nothing);
429
430 /* See if we can reuse an existing secondary reload. */
431 for (s_reload = 0; s_reload < n_reloads; s_reload++)
432 if (rld[s_reload].secondary_p
433 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
434 || reg_class_subset_p (rld[s_reload].rclass, rclass))
435 && ((in_p && rld[s_reload].inmode == mode)
436 || (! in_p && rld[s_reload].outmode == mode))
437 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
438 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
439 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
440 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
441 && (small_register_class_p (rclass)
442 || targetm.small_register_classes_for_mode_p (VOIDmode))
443 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
444 opnum, rld[s_reload].opnum))
445 {
446 if (in_p)
447 rld[s_reload].inmode = mode;
448 if (! in_p)
449 rld[s_reload].outmode = mode;
450
451 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
452 rld[s_reload].rclass = rclass;
453
454 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
455 rld[s_reload].optional &= optional;
456 rld[s_reload].secondary_p = 1;
457 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
458 opnum, rld[s_reload].opnum))
459 rld[s_reload].when_needed = RELOAD_OTHER;
460
461 break;
462 }
463
464 if (s_reload == n_reloads)
465 {
466 #ifdef SECONDARY_MEMORY_NEEDED
467 /* If we need a memory location to copy between the two reload regs,
468 set it up now. Note that we do the input case before making
469 the reload and the output case after. This is due to the
470 way reloads are output. */
471
472 if (in_p && icode == CODE_FOR_nothing
473 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
474 {
475 get_secondary_mem (x, reload_mode, opnum, type);
476
477 /* We may have just added new reloads. Make sure we add
478 the new reload at the end. */
479 s_reload = n_reloads;
480 }
481 #endif
482
483 /* We need to make a new secondary reload for this register class. */
484 rld[s_reload].in = rld[s_reload].out = 0;
485 rld[s_reload].rclass = rclass;
486
487 rld[s_reload].inmode = in_p ? mode : VOIDmode;
488 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
489 rld[s_reload].reg_rtx = 0;
490 rld[s_reload].optional = optional;
491 rld[s_reload].inc = 0;
492 /* Maybe we could combine these, but it seems too tricky. */
493 rld[s_reload].nocombine = 1;
494 rld[s_reload].in_reg = 0;
495 rld[s_reload].out_reg = 0;
496 rld[s_reload].opnum = opnum;
497 rld[s_reload].when_needed = secondary_type;
498 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
499 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
500 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
501 rld[s_reload].secondary_out_icode
502 = ! in_p ? t_icode : CODE_FOR_nothing;
503 rld[s_reload].secondary_p = 1;
504
505 n_reloads++;
506
507 #ifdef SECONDARY_MEMORY_NEEDED
508 if (! in_p && icode == CODE_FOR_nothing
509 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
510 get_secondary_mem (x, mode, opnum, type);
511 #endif
512 }
513
514 *picode = icode;
515 return s_reload;
516 }
517
518 /* If a secondary reload is needed, return its class. If both an intermediate
519 register and a scratch register is needed, we return the class of the
520 intermediate register. */
521 reg_class_t
522 secondary_reload_class (bool in_p, reg_class_t rclass, enum machine_mode mode,
523 rtx x)
524 {
525 enum insn_code icode;
526 secondary_reload_info sri;
527
528 sri.icode = CODE_FOR_nothing;
529 sri.prev_sri = NULL;
530 rclass
531 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
532 icode = (enum insn_code) sri.icode;
533
534 /* If there are no secondary reloads at all, we return NO_REGS.
535 If an intermediate register is needed, we return its class. */
536 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
537 return rclass;
538
539 /* No intermediate register is needed, but we have a special reload
540 pattern, which we assume for now needs a scratch register. */
541 return scratch_reload_class (icode);
542 }
543
544 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
545 three operands, verify that operand 2 is an output operand, and return
546 its register class.
547 ??? We'd like to be able to handle any pattern with at least 2 operands,
548 for zero or more scratch registers, but that needs more infrastructure. */
549 enum reg_class
550 scratch_reload_class (enum insn_code icode)
551 {
552 const char *scratch_constraint;
553 enum reg_class rclass;
554
555 gcc_assert (insn_data[(int) icode].n_operands == 3);
556 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
557 gcc_assert (*scratch_constraint == '=');
558 scratch_constraint++;
559 if (*scratch_constraint == '&')
560 scratch_constraint++;
561 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
562 gcc_assert (rclass != NO_REGS);
563 return rclass;
564 }
565 \f
566 #ifdef SECONDARY_MEMORY_NEEDED
567
568 /* Return a memory location that will be used to copy X in mode MODE.
569 If we haven't already made a location for this mode in this insn,
570 call find_reloads_address on the location being returned. */
571
572 rtx
573 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, enum machine_mode mode,
574 int opnum, enum reload_type type)
575 {
576 rtx loc;
577 int mem_valid;
578
579 /* By default, if MODE is narrower than a word, widen it to a word.
580 This is required because most machines that require these memory
581 locations do not support short load and stores from all registers
582 (e.g., FP registers). */
583
584 #ifdef SECONDARY_MEMORY_NEEDED_MODE
585 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
586 #else
587 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
588 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
589 #endif
590
591 /* If we already have made a MEM for this operand in MODE, return it. */
592 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
593 return secondary_memlocs_elim[(int) mode][opnum];
594
595 /* If this is the first time we've tried to get a MEM for this mode,
596 allocate a new one. `something_changed' in reload will get set
597 by noticing that the frame size has changed. */
598
599 if (secondary_memlocs[(int) mode] == 0)
600 {
601 #ifdef SECONDARY_MEMORY_NEEDED_RTX
602 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
603 #else
604 secondary_memlocs[(int) mode]
605 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
606 #endif
607 }
608
609 /* Get a version of the address doing any eliminations needed. If that
610 didn't give us a new MEM, make a new one if it isn't valid. */
611
612 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
613 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
614 MEM_ADDR_SPACE (loc));
615
616 if (! mem_valid && loc == secondary_memlocs[(int) mode])
617 loc = copy_rtx (loc);
618
619 /* The only time the call below will do anything is if the stack
620 offset is too large. In that case IND_LEVELS doesn't matter, so we
621 can just pass a zero. Adjust the type to be the address of the
622 corresponding object. If the address was valid, save the eliminated
623 address. If it wasn't valid, we need to make a reload each time, so
624 don't save it. */
625
626 if (! mem_valid)
627 {
628 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
629 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
630 : RELOAD_OTHER);
631
632 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
633 opnum, type, 0, 0);
634 }
635
636 secondary_memlocs_elim[(int) mode][opnum] = loc;
637 if (secondary_memlocs_elim_used <= (int)mode)
638 secondary_memlocs_elim_used = (int)mode + 1;
639 return loc;
640 }
641
642 /* Clear any secondary memory locations we've made. */
643
644 void
645 clear_secondary_mem (void)
646 {
647 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
648 }
649 #endif /* SECONDARY_MEMORY_NEEDED */
650 \f
651
652 /* Find the largest class which has at least one register valid in
653 mode INNER, and which for every such register, that register number
654 plus N is also valid in OUTER (if in range) and is cheap to move
655 into REGNO. Such a class must exist. */
656
657 static enum reg_class
658 find_valid_class (enum machine_mode outer ATTRIBUTE_UNUSED,
659 enum machine_mode inner ATTRIBUTE_UNUSED, int n,
660 unsigned int dest_regno ATTRIBUTE_UNUSED)
661 {
662 int best_cost = -1;
663 int rclass;
664 int regno;
665 enum reg_class best_class = NO_REGS;
666 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
667 unsigned int best_size = 0;
668 int cost;
669
670 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
671 {
672 int bad = 0;
673 int good = 0;
674 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
675 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
676 {
677 if (HARD_REGNO_MODE_OK (regno, inner))
678 {
679 good = 1;
680 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
681 && ! HARD_REGNO_MODE_OK (regno + n, outer))
682 bad = 1;
683 }
684 }
685
686 if (bad || !good)
687 continue;
688 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
689
690 if ((reg_class_size[rclass] > best_size
691 && (best_cost < 0 || best_cost >= cost))
692 || best_cost > cost)
693 {
694 best_class = (enum reg_class) rclass;
695 best_size = reg_class_size[rclass];
696 best_cost = register_move_cost (outer, (enum reg_class) rclass,
697 dest_class);
698 }
699 }
700
701 gcc_assert (best_size != 0);
702
703 return best_class;
704 }
705
706 /* We are trying to reload a subreg of something that is not a register.
707 Find the largest class which contains only registers valid in
708 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
709 which we would eventually like to obtain the object. */
710
711 static enum reg_class
712 find_valid_class_1 (enum machine_mode outer ATTRIBUTE_UNUSED,
713 enum machine_mode mode ATTRIBUTE_UNUSED,
714 enum reg_class dest_class ATTRIBUTE_UNUSED)
715 {
716 int best_cost = -1;
717 int rclass;
718 int regno;
719 enum reg_class best_class = NO_REGS;
720 unsigned int best_size = 0;
721 int cost;
722
723 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
724 {
725 int bad = 0;
726 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
727 {
728 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
729 && !HARD_REGNO_MODE_OK (regno, mode))
730 bad = 1;
731 }
732
733 if (bad)
734 continue;
735
736 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
737
738 if ((reg_class_size[rclass] > best_size
739 && (best_cost < 0 || best_cost >= cost))
740 || best_cost > cost)
741 {
742 best_class = (enum reg_class) rclass;
743 best_size = reg_class_size[rclass];
744 best_cost = register_move_cost (outer, (enum reg_class) rclass,
745 dest_class);
746 }
747 }
748
749 gcc_assert (best_size != 0);
750
751 #ifdef LIMIT_RELOAD_CLASS
752 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
753 #endif
754 return best_class;
755 }
756 \f
757 /* Return the number of a previously made reload that can be combined with
758 a new one, or n_reloads if none of the existing reloads can be used.
759 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
760 push_reload, they determine the kind of the new reload that we try to
761 combine. P_IN points to the corresponding value of IN, which can be
762 modified by this function.
763 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
764
765 static int
766 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
767 enum reload_type type, int opnum, int dont_share)
768 {
769 rtx in = *p_in;
770 int i;
771 /* We can't merge two reloads if the output of either one is
772 earlyclobbered. */
773
774 if (earlyclobber_operand_p (out))
775 return n_reloads;
776
777 /* We can use an existing reload if the class is right
778 and at least one of IN and OUT is a match
779 and the other is at worst neutral.
780 (A zero compared against anything is neutral.)
781
782 For targets with small register classes, don't use existing reloads
783 unless they are for the same thing since that can cause us to need
784 more reload registers than we otherwise would. */
785
786 for (i = 0; i < n_reloads; i++)
787 if ((reg_class_subset_p (rclass, rld[i].rclass)
788 || reg_class_subset_p (rld[i].rclass, rclass))
789 /* If the existing reload has a register, it must fit our class. */
790 && (rld[i].reg_rtx == 0
791 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
792 true_regnum (rld[i].reg_rtx)))
793 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
794 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
795 || (out != 0 && MATCHES (rld[i].out, out)
796 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
797 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
798 && (small_register_class_p (rclass)
799 || targetm.small_register_classes_for_mode_p (VOIDmode))
800 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
801 return i;
802
803 /* Reloading a plain reg for input can match a reload to postincrement
804 that reg, since the postincrement's value is the right value.
805 Likewise, it can match a preincrement reload, since we regard
806 the preincrementation as happening before any ref in this insn
807 to that register. */
808 for (i = 0; i < n_reloads; i++)
809 if ((reg_class_subset_p (rclass, rld[i].rclass)
810 || reg_class_subset_p (rld[i].rclass, rclass))
811 /* If the existing reload has a register, it must fit our
812 class. */
813 && (rld[i].reg_rtx == 0
814 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
815 true_regnum (rld[i].reg_rtx)))
816 && out == 0 && rld[i].out == 0 && rld[i].in != 0
817 && ((REG_P (in)
818 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
819 && MATCHES (XEXP (rld[i].in, 0), in))
820 || (REG_P (rld[i].in)
821 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
822 && MATCHES (XEXP (in, 0), rld[i].in)))
823 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
824 && (small_register_class_p (rclass)
825 || targetm.small_register_classes_for_mode_p (VOIDmode))
826 && MERGABLE_RELOADS (type, rld[i].when_needed,
827 opnum, rld[i].opnum))
828 {
829 /* Make sure reload_in ultimately has the increment,
830 not the plain register. */
831 if (REG_P (in))
832 *p_in = rld[i].in;
833 return i;
834 }
835 return n_reloads;
836 }
837
838 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
839 expression. MODE is the mode that X will be used in. OUTPUT is true if
840 the function is invoked for the output part of an enclosing reload. */
841
842 static bool
843 reload_inner_reg_of_subreg (rtx x, enum machine_mode mode, bool output)
844 {
845 rtx inner;
846
847 /* Only SUBREGs are problematical. */
848 if (GET_CODE (x) != SUBREG)
849 return false;
850
851 inner = SUBREG_REG (x);
852
853 /* If INNER is a constant or PLUS, then INNER will need reloading. */
854 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
855 return true;
856
857 /* If INNER is not a hard register, then INNER will not need reloading. */
858 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
859 return false;
860
861 /* If INNER is not ok for MODE, then INNER will need reloading. */
862 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
863 return true;
864
865 /* If this is for an output, and the outer part is a word or smaller,
866 INNER is larger than a word and the number of registers in INNER is
867 not the same as the number of words in INNER, then INNER will need
868 reloading (with an in-out reload). */
869 return (output
870 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
871 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
872 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
873 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
874 }
875
876 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
877 requiring an extra reload register. The caller has already found that
878 IN contains some reference to REGNO, so check that we can produce the
879 new value in a single step. E.g. if we have
880 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
881 instruction that adds one to a register, this should succeed.
882 However, if we have something like
883 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
884 needs to be loaded into a register first, we need a separate reload
885 register.
886 Such PLUS reloads are generated by find_reload_address_part.
887 The out-of-range PLUS expressions are usually introduced in the instruction
888 patterns by register elimination and substituting pseudos without a home
889 by their function-invariant equivalences. */
890 static int
891 can_reload_into (rtx in, int regno, enum machine_mode mode)
892 {
893 rtx dst;
894 rtx_insn *test_insn;
895 int r = 0;
896 struct recog_data_d save_recog_data;
897
898 /* For matching constraints, we often get notional input reloads where
899 we want to use the original register as the reload register. I.e.
900 technically this is a non-optional input-output reload, but IN is
901 already a valid register, and has been chosen as the reload register.
902 Speed this up, since it trivially works. */
903 if (REG_P (in))
904 return 1;
905
906 /* To test MEMs properly, we'd have to take into account all the reloads
907 that are already scheduled, which can become quite complicated.
908 And since we've already handled address reloads for this MEM, it
909 should always succeed anyway. */
910 if (MEM_P (in))
911 return 1;
912
913 /* If we can make a simple SET insn that does the job, everything should
914 be fine. */
915 dst = gen_rtx_REG (mode, regno);
916 test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in));
917 save_recog_data = recog_data;
918 if (recog_memoized (test_insn) >= 0)
919 {
920 extract_insn (test_insn);
921 r = constrain_operands (1, get_enabled_alternatives (test_insn));
922 }
923 recog_data = save_recog_data;
924 return r;
925 }
926
927 /* Record one reload that needs to be performed.
928 IN is an rtx saying where the data are to be found before this instruction.
929 OUT says where they must be stored after the instruction.
930 (IN is zero for data not read, and OUT is zero for data not written.)
931 INLOC and OUTLOC point to the places in the instructions where
932 IN and OUT were found.
933 If IN and OUT are both nonzero, it means the same register must be used
934 to reload both IN and OUT.
935
936 RCLASS is a register class required for the reloaded data.
937 INMODE is the machine mode that the instruction requires
938 for the reg that replaces IN and OUTMODE is likewise for OUT.
939
940 If IN is zero, then OUT's location and mode should be passed as
941 INLOC and INMODE.
942
943 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
944
945 OPTIONAL nonzero means this reload does not need to be performed:
946 it can be discarded if that is more convenient.
947
948 OPNUM and TYPE say what the purpose of this reload is.
949
950 The return value is the reload-number for this reload.
951
952 If both IN and OUT are nonzero, in some rare cases we might
953 want to make two separate reloads. (Actually we never do this now.)
954 Therefore, the reload-number for OUT is stored in
955 output_reloadnum when we return; the return value applies to IN.
956 Usually (presently always), when IN and OUT are nonzero,
957 the two reload-numbers are equal, but the caller should be careful to
958 distinguish them. */
959
960 int
961 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
962 enum reg_class rclass, enum machine_mode inmode,
963 enum machine_mode outmode, int strict_low, int optional,
964 int opnum, enum reload_type type)
965 {
966 int i;
967 int dont_share = 0;
968 int dont_remove_subreg = 0;
969 #ifdef LIMIT_RELOAD_CLASS
970 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
971 #endif
972 int secondary_in_reload = -1, secondary_out_reload = -1;
973 enum insn_code secondary_in_icode = CODE_FOR_nothing;
974 enum insn_code secondary_out_icode = CODE_FOR_nothing;
975 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
976 subreg_in_class = NO_REGS;
977
978 /* INMODE and/or OUTMODE could be VOIDmode if no mode
979 has been specified for the operand. In that case,
980 use the operand's mode as the mode to reload. */
981 if (inmode == VOIDmode && in != 0)
982 inmode = GET_MODE (in);
983 if (outmode == VOIDmode && out != 0)
984 outmode = GET_MODE (out);
985
986 /* If find_reloads and friends until now missed to replace a pseudo
987 with a constant of reg_equiv_constant something went wrong
988 beforehand.
989 Note that it can't simply be done here if we missed it earlier
990 since the constant might need to be pushed into the literal pool
991 and the resulting memref would probably need further
992 reloading. */
993 if (in != 0 && REG_P (in))
994 {
995 int regno = REGNO (in);
996
997 gcc_assert (regno < FIRST_PSEUDO_REGISTER
998 || reg_renumber[regno] >= 0
999 || reg_equiv_constant (regno) == NULL_RTX);
1000 }
1001
1002 /* reg_equiv_constant only contains constants which are obviously
1003 not appropriate as destination. So if we would need to replace
1004 the destination pseudo with a constant we are in real
1005 trouble. */
1006 if (out != 0 && REG_P (out))
1007 {
1008 int regno = REGNO (out);
1009
1010 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1011 || reg_renumber[regno] >= 0
1012 || reg_equiv_constant (regno) == NULL_RTX);
1013 }
1014
1015 /* If we have a read-write operand with an address side-effect,
1016 change either IN or OUT so the side-effect happens only once. */
1017 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1018 switch (GET_CODE (XEXP (in, 0)))
1019 {
1020 case POST_INC: case POST_DEC: case POST_MODIFY:
1021 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1022 break;
1023
1024 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1025 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1026 break;
1027
1028 default:
1029 break;
1030 }
1031
1032 /* If we are reloading a (SUBREG constant ...), really reload just the
1033 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1034 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1035 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1036 register is a pseudo, also reload the inside expression.
1037 For machines that extend byte loads, do this for any SUBREG of a pseudo
1038 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1039 M2 is an integral mode that gets extended when loaded.
1040 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1041 where either M1 is not valid for R or M2 is wider than a word but we
1042 only need one register to store an M2-sized quantity in R.
1043 (However, if OUT is nonzero, we need to reload the reg *and*
1044 the subreg, so do nothing here, and let following statement handle it.)
1045
1046 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1047 we can't handle it here because CONST_INT does not indicate a mode.
1048
1049 Similarly, we must reload the inside expression if we have a
1050 STRICT_LOW_PART (presumably, in == out in this case).
1051
1052 Also reload the inner expression if it does not require a secondary
1053 reload but the SUBREG does.
1054
1055 Finally, reload the inner expression if it is a register that is in
1056 the class whose registers cannot be referenced in a different size
1057 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1058 cannot reload just the inside since we might end up with the wrong
1059 register class. But if it is inside a STRICT_LOW_PART, we have
1060 no choice, so we hope we do get the right register class there. */
1061
1062 if (in != 0 && GET_CODE (in) == SUBREG
1063 && (subreg_lowpart_p (in) || strict_low)
1064 #ifdef CANNOT_CHANGE_MODE_CLASS
1065 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1066 #endif
1067 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1068 && (CONSTANT_P (SUBREG_REG (in))
1069 || GET_CODE (SUBREG_REG (in)) == PLUS
1070 || strict_low
1071 || (((REG_P (SUBREG_REG (in))
1072 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1073 || MEM_P (SUBREG_REG (in)))
1074 && ((GET_MODE_PRECISION (inmode)
1075 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1076 #ifdef LOAD_EXTEND_OP
1077 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1078 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1079 <= UNITS_PER_WORD)
1080 && (GET_MODE_PRECISION (inmode)
1081 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1082 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1083 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1084 #endif
1085 #ifdef WORD_REGISTER_OPERATIONS
1086 || ((GET_MODE_PRECISION (inmode)
1087 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1088 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1089 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1090 / UNITS_PER_WORD)))
1091 #endif
1092 ))
1093 || (REG_P (SUBREG_REG (in))
1094 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1095 /* The case where out is nonzero
1096 is handled differently in the following statement. */
1097 && (out == 0 || subreg_lowpart_p (in))
1098 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1099 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1100 > UNITS_PER_WORD)
1101 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1102 / UNITS_PER_WORD)
1103 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1104 [GET_MODE (SUBREG_REG (in))]))
1105 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1106 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1107 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1108 SUBREG_REG (in))
1109 == NO_REGS))
1110 #ifdef CANNOT_CHANGE_MODE_CLASS
1111 || (REG_P (SUBREG_REG (in))
1112 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1113 && REG_CANNOT_CHANGE_MODE_P
1114 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1115 #endif
1116 ))
1117 {
1118 #ifdef LIMIT_RELOAD_CLASS
1119 in_subreg_loc = inloc;
1120 #endif
1121 inloc = &SUBREG_REG (in);
1122 in = *inloc;
1123 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1124 if (MEM_P (in))
1125 /* This is supposed to happen only for paradoxical subregs made by
1126 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1127 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1128 #endif
1129 inmode = GET_MODE (in);
1130 }
1131
1132 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1133 where M1 is not valid for R if it was not handled by the code above.
1134
1135 Similar issue for (SUBREG constant ...) if it was not handled by the
1136 code above. This can happen if SUBREG_BYTE != 0.
1137
1138 However, we must reload the inner reg *as well as* the subreg in
1139 that case. */
1140
1141 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1142 {
1143 if (REG_P (SUBREG_REG (in)))
1144 subreg_in_class
1145 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1146 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1147 GET_MODE (SUBREG_REG (in)),
1148 SUBREG_BYTE (in),
1149 GET_MODE (in)),
1150 REGNO (SUBREG_REG (in)));
1151 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1152 subreg_in_class = find_valid_class_1 (inmode,
1153 GET_MODE (SUBREG_REG (in)),
1154 rclass);
1155
1156 /* This relies on the fact that emit_reload_insns outputs the
1157 instructions for input reloads of type RELOAD_OTHER in the same
1158 order as the reloads. Thus if the outer reload is also of type
1159 RELOAD_OTHER, we are guaranteed that this inner reload will be
1160 output before the outer reload. */
1161 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1162 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1163 dont_remove_subreg = 1;
1164 }
1165
1166 /* Similarly for paradoxical and problematical SUBREGs on the output.
1167 Note that there is no reason we need worry about the previous value
1168 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1169 entitled to clobber it all (except in the case of a word mode subreg
1170 or of a STRICT_LOW_PART, in that latter case the constraint should
1171 label it input-output.) */
1172 if (out != 0 && GET_CODE (out) == SUBREG
1173 && (subreg_lowpart_p (out) || strict_low)
1174 #ifdef CANNOT_CHANGE_MODE_CLASS
1175 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1176 #endif
1177 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1178 && (CONSTANT_P (SUBREG_REG (out))
1179 || strict_low
1180 || (((REG_P (SUBREG_REG (out))
1181 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1182 || MEM_P (SUBREG_REG (out)))
1183 && ((GET_MODE_PRECISION (outmode)
1184 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1185 #ifdef WORD_REGISTER_OPERATIONS
1186 || ((GET_MODE_PRECISION (outmode)
1187 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1188 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1189 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1190 / UNITS_PER_WORD)))
1191 #endif
1192 ))
1193 || (REG_P (SUBREG_REG (out))
1194 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1195 /* The case of a word mode subreg
1196 is handled differently in the following statement. */
1197 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1198 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1199 > UNITS_PER_WORD))
1200 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1201 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1202 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1203 SUBREG_REG (out))
1204 == NO_REGS))
1205 #ifdef CANNOT_CHANGE_MODE_CLASS
1206 || (REG_P (SUBREG_REG (out))
1207 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1208 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1209 GET_MODE (SUBREG_REG (out)),
1210 outmode))
1211 #endif
1212 ))
1213 {
1214 #ifdef LIMIT_RELOAD_CLASS
1215 out_subreg_loc = outloc;
1216 #endif
1217 outloc = &SUBREG_REG (out);
1218 out = *outloc;
1219 #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
1220 gcc_assert (!MEM_P (out)
1221 || GET_MODE_SIZE (GET_MODE (out))
1222 <= GET_MODE_SIZE (outmode));
1223 #endif
1224 outmode = GET_MODE (out);
1225 }
1226
1227 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1228 where either M1 is not valid for R or M2 is wider than a word but we
1229 only need one register to store an M2-sized quantity in R.
1230
1231 However, we must reload the inner reg *as well as* the subreg in
1232 that case and the inner reg is an in-out reload. */
1233
1234 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1235 {
1236 enum reg_class in_out_class
1237 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1238 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1239 GET_MODE (SUBREG_REG (out)),
1240 SUBREG_BYTE (out),
1241 GET_MODE (out)),
1242 REGNO (SUBREG_REG (out)));
1243
1244 /* This relies on the fact that emit_reload_insns outputs the
1245 instructions for output reloads of type RELOAD_OTHER in reverse
1246 order of the reloads. Thus if the outer reload is also of type
1247 RELOAD_OTHER, we are guaranteed that this inner reload will be
1248 output after the outer reload. */
1249 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1250 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1251 0, 0, opnum, RELOAD_OTHER);
1252 dont_remove_subreg = 1;
1253 }
1254
1255 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1256 if (in != 0 && out != 0 && MEM_P (out)
1257 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1258 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1259 dont_share = 1;
1260
1261 /* If IN is a SUBREG of a hard register, make a new REG. This
1262 simplifies some of the cases below. */
1263
1264 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1265 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1266 && ! dont_remove_subreg)
1267 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1268
1269 /* Similarly for OUT. */
1270 if (out != 0 && GET_CODE (out) == SUBREG
1271 && REG_P (SUBREG_REG (out))
1272 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1273 && ! dont_remove_subreg)
1274 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1275
1276 /* Narrow down the class of register wanted if that is
1277 desirable on this machine for efficiency. */
1278 {
1279 reg_class_t preferred_class = rclass;
1280
1281 if (in != 0)
1282 preferred_class = targetm.preferred_reload_class (in, rclass);
1283
1284 /* Output reloads may need analogous treatment, different in detail. */
1285 if (out != 0)
1286 preferred_class
1287 = targetm.preferred_output_reload_class (out, preferred_class);
1288
1289 /* Discard what the target said if we cannot do it. */
1290 if (preferred_class != NO_REGS
1291 || (optional && type == RELOAD_FOR_OUTPUT))
1292 rclass = (enum reg_class) preferred_class;
1293 }
1294
1295 /* Make sure we use a class that can handle the actual pseudo
1296 inside any subreg. For example, on the 386, QImode regs
1297 can appear within SImode subregs. Although GENERAL_REGS
1298 can handle SImode, QImode needs a smaller class. */
1299 #ifdef LIMIT_RELOAD_CLASS
1300 if (in_subreg_loc)
1301 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1302 else if (in != 0 && GET_CODE (in) == SUBREG)
1303 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1304
1305 if (out_subreg_loc)
1306 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1307 if (out != 0 && GET_CODE (out) == SUBREG)
1308 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1309 #endif
1310
1311 /* Verify that this class is at least possible for the mode that
1312 is specified. */
1313 if (this_insn_is_asm)
1314 {
1315 enum machine_mode mode;
1316 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1317 mode = inmode;
1318 else
1319 mode = outmode;
1320 if (mode == VOIDmode)
1321 {
1322 error_for_asm (this_insn, "cannot reload integer constant "
1323 "operand in %<asm%>");
1324 mode = word_mode;
1325 if (in != 0)
1326 inmode = word_mode;
1327 if (out != 0)
1328 outmode = word_mode;
1329 }
1330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1331 if (HARD_REGNO_MODE_OK (i, mode)
1332 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1333 break;
1334 if (i == FIRST_PSEUDO_REGISTER)
1335 {
1336 error_for_asm (this_insn, "impossible register constraint "
1337 "in %<asm%>");
1338 /* Avoid further trouble with this insn. */
1339 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1340 /* We used to continue here setting class to ALL_REGS, but it triggers
1341 sanity check on i386 for:
1342 void foo(long double d)
1343 {
1344 asm("" :: "a" (d));
1345 }
1346 Returning zero here ought to be safe as we take care in
1347 find_reloads to not process the reloads when instruction was
1348 replaced by USE. */
1349
1350 return 0;
1351 }
1352 }
1353
1354 /* Optional output reloads are always OK even if we have no register class,
1355 since the function of these reloads is only to have spill_reg_store etc.
1356 set, so that the storing insn can be deleted later. */
1357 gcc_assert (rclass != NO_REGS
1358 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1359
1360 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1361
1362 if (i == n_reloads)
1363 {
1364 /* See if we need a secondary reload register to move between CLASS
1365 and IN or CLASS and OUT. Get the icode and push any required reloads
1366 needed for each of them if so. */
1367
1368 if (in != 0)
1369 secondary_in_reload
1370 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1371 &secondary_in_icode, NULL);
1372 if (out != 0 && GET_CODE (out) != SCRATCH)
1373 secondary_out_reload
1374 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1375 type, &secondary_out_icode, NULL);
1376
1377 /* We found no existing reload suitable for re-use.
1378 So add an additional reload. */
1379
1380 #ifdef SECONDARY_MEMORY_NEEDED
1381 if (subreg_in_class == NO_REGS
1382 && in != 0
1383 && (REG_P (in)
1384 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1385 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1386 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1387 /* If a memory location is needed for the copy, make one. */
1388 if (subreg_in_class != NO_REGS
1389 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1390 get_secondary_mem (in, inmode, opnum, type);
1391 #endif
1392
1393 i = n_reloads;
1394 rld[i].in = in;
1395 rld[i].out = out;
1396 rld[i].rclass = rclass;
1397 rld[i].inmode = inmode;
1398 rld[i].outmode = outmode;
1399 rld[i].reg_rtx = 0;
1400 rld[i].optional = optional;
1401 rld[i].inc = 0;
1402 rld[i].nocombine = 0;
1403 rld[i].in_reg = inloc ? *inloc : 0;
1404 rld[i].out_reg = outloc ? *outloc : 0;
1405 rld[i].opnum = opnum;
1406 rld[i].when_needed = type;
1407 rld[i].secondary_in_reload = secondary_in_reload;
1408 rld[i].secondary_out_reload = secondary_out_reload;
1409 rld[i].secondary_in_icode = secondary_in_icode;
1410 rld[i].secondary_out_icode = secondary_out_icode;
1411 rld[i].secondary_p = 0;
1412
1413 n_reloads++;
1414
1415 #ifdef SECONDARY_MEMORY_NEEDED
1416 if (out != 0
1417 && (REG_P (out)
1418 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1419 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1420 && SECONDARY_MEMORY_NEEDED (rclass,
1421 REGNO_REG_CLASS (reg_or_subregno (out)),
1422 outmode))
1423 get_secondary_mem (out, outmode, opnum, type);
1424 #endif
1425 }
1426 else
1427 {
1428 /* We are reusing an existing reload,
1429 but we may have additional information for it.
1430 For example, we may now have both IN and OUT
1431 while the old one may have just one of them. */
1432
1433 /* The modes can be different. If they are, we want to reload in
1434 the larger mode, so that the value is valid for both modes. */
1435 if (inmode != VOIDmode
1436 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1437 rld[i].inmode = inmode;
1438 if (outmode != VOIDmode
1439 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1440 rld[i].outmode = outmode;
1441 if (in != 0)
1442 {
1443 rtx in_reg = inloc ? *inloc : 0;
1444 /* If we merge reloads for two distinct rtl expressions that
1445 are identical in content, there might be duplicate address
1446 reloads. Remove the extra set now, so that if we later find
1447 that we can inherit this reload, we can get rid of the
1448 address reloads altogether.
1449
1450 Do not do this if both reloads are optional since the result
1451 would be an optional reload which could potentially leave
1452 unresolved address replacements.
1453
1454 It is not sufficient to call transfer_replacements since
1455 choose_reload_regs will remove the replacements for address
1456 reloads of inherited reloads which results in the same
1457 problem. */
1458 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1459 && ! (rld[i].optional && optional))
1460 {
1461 /* We must keep the address reload with the lower operand
1462 number alive. */
1463 if (opnum > rld[i].opnum)
1464 {
1465 remove_address_replacements (in);
1466 in = rld[i].in;
1467 in_reg = rld[i].in_reg;
1468 }
1469 else
1470 remove_address_replacements (rld[i].in);
1471 }
1472 /* When emitting reloads we don't necessarily look at the in-
1473 and outmode, but also directly at the operands (in and out).
1474 So we can't simply overwrite them with whatever we have found
1475 for this (to-be-merged) reload, we have to "merge" that too.
1476 Reusing another reload already verified that we deal with the
1477 same operands, just possibly in different modes. So we
1478 overwrite the operands only when the new mode is larger.
1479 See also PR33613. */
1480 if (!rld[i].in
1481 || GET_MODE_SIZE (GET_MODE (in))
1482 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1483 rld[i].in = in;
1484 if (!rld[i].in_reg
1485 || (in_reg
1486 && GET_MODE_SIZE (GET_MODE (in_reg))
1487 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1488 rld[i].in_reg = in_reg;
1489 }
1490 if (out != 0)
1491 {
1492 if (!rld[i].out
1493 || (out
1494 && GET_MODE_SIZE (GET_MODE (out))
1495 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1496 rld[i].out = out;
1497 if (outloc
1498 && (!rld[i].out_reg
1499 || GET_MODE_SIZE (GET_MODE (*outloc))
1500 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1501 rld[i].out_reg = *outloc;
1502 }
1503 if (reg_class_subset_p (rclass, rld[i].rclass))
1504 rld[i].rclass = rclass;
1505 rld[i].optional &= optional;
1506 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1507 opnum, rld[i].opnum))
1508 rld[i].when_needed = RELOAD_OTHER;
1509 rld[i].opnum = MIN (rld[i].opnum, opnum);
1510 }
1511
1512 /* If the ostensible rtx being reloaded differs from the rtx found
1513 in the location to substitute, this reload is not safe to combine
1514 because we cannot reliably tell whether it appears in the insn. */
1515
1516 if (in != 0 && in != *inloc)
1517 rld[i].nocombine = 1;
1518
1519 #if 0
1520 /* This was replaced by changes in find_reloads_address_1 and the new
1521 function inc_for_reload, which go with a new meaning of reload_inc. */
1522
1523 /* If this is an IN/OUT reload in an insn that sets the CC,
1524 it must be for an autoincrement. It doesn't work to store
1525 the incremented value after the insn because that would clobber the CC.
1526 So we must do the increment of the value reloaded from,
1527 increment it, store it back, then decrement again. */
1528 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1529 {
1530 out = 0;
1531 rld[i].out = 0;
1532 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1533 /* If we did not find a nonzero amount-to-increment-by,
1534 that contradicts the belief that IN is being incremented
1535 in an address in this insn. */
1536 gcc_assert (rld[i].inc != 0);
1537 }
1538 #endif
1539
1540 /* If we will replace IN and OUT with the reload-reg,
1541 record where they are located so that substitution need
1542 not do a tree walk. */
1543
1544 if (replace_reloads)
1545 {
1546 if (inloc != 0)
1547 {
1548 struct replacement *r = &replacements[n_replacements++];
1549 r->what = i;
1550 r->where = inloc;
1551 r->mode = inmode;
1552 }
1553 if (outloc != 0 && outloc != inloc)
1554 {
1555 struct replacement *r = &replacements[n_replacements++];
1556 r->what = i;
1557 r->where = outloc;
1558 r->mode = outmode;
1559 }
1560 }
1561
1562 /* If this reload is just being introduced and it has both
1563 an incoming quantity and an outgoing quantity that are
1564 supposed to be made to match, see if either one of the two
1565 can serve as the place to reload into.
1566
1567 If one of them is acceptable, set rld[i].reg_rtx
1568 to that one. */
1569
1570 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1571 {
1572 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1573 inmode, outmode,
1574 rld[i].rclass, i,
1575 earlyclobber_operand_p (out));
1576
1577 /* If the outgoing register already contains the same value
1578 as the incoming one, we can dispense with loading it.
1579 The easiest way to tell the caller that is to give a phony
1580 value for the incoming operand (same as outgoing one). */
1581 if (rld[i].reg_rtx == out
1582 && (REG_P (in) || CONSTANT_P (in))
1583 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1584 static_reload_reg_p, i, inmode))
1585 rld[i].in = out;
1586 }
1587
1588 /* If this is an input reload and the operand contains a register that
1589 dies in this insn and is used nowhere else, see if it is the right class
1590 to be used for this reload. Use it if so. (This occurs most commonly
1591 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1592 this if it is also an output reload that mentions the register unless
1593 the output is a SUBREG that clobbers an entire register.
1594
1595 Note that the operand might be one of the spill regs, if it is a
1596 pseudo reg and we are in a block where spilling has not taken place.
1597 But if there is no spilling in this block, that is OK.
1598 An explicitly used hard reg cannot be a spill reg. */
1599
1600 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1601 {
1602 rtx note;
1603 int regno;
1604 enum machine_mode rel_mode = inmode;
1605
1606 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1607 rel_mode = outmode;
1608
1609 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1610 if (REG_NOTE_KIND (note) == REG_DEAD
1611 && REG_P (XEXP (note, 0))
1612 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1613 && reg_mentioned_p (XEXP (note, 0), in)
1614 /* Check that a former pseudo is valid; see find_dummy_reload. */
1615 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1616 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1617 ORIGINAL_REGNO (XEXP (note, 0)))
1618 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1619 && ! refers_to_regno_for_reload_p (regno,
1620 end_hard_regno (rel_mode,
1621 regno),
1622 PATTERN (this_insn), inloc)
1623 /* If this is also an output reload, IN cannot be used as
1624 the reload register if it is set in this insn unless IN
1625 is also OUT. */
1626 && (out == 0 || in == out
1627 || ! hard_reg_set_here_p (regno,
1628 end_hard_regno (rel_mode, regno),
1629 PATTERN (this_insn)))
1630 /* ??? Why is this code so different from the previous?
1631 Is there any simple coherent way to describe the two together?
1632 What's going on here. */
1633 && (in != out
1634 || (GET_CODE (in) == SUBREG
1635 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1636 / UNITS_PER_WORD)
1637 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1638 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1639 /* Make sure the operand fits in the reg that dies. */
1640 && (GET_MODE_SIZE (rel_mode)
1641 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1642 && HARD_REGNO_MODE_OK (regno, inmode)
1643 && HARD_REGNO_MODE_OK (regno, outmode))
1644 {
1645 unsigned int offs;
1646 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1647 hard_regno_nregs[regno][outmode]);
1648
1649 for (offs = 0; offs < nregs; offs++)
1650 if (fixed_regs[regno + offs]
1651 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1652 regno + offs))
1653 break;
1654
1655 if (offs == nregs
1656 && (! (refers_to_regno_for_reload_p
1657 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1658 || can_reload_into (in, regno, inmode)))
1659 {
1660 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1661 break;
1662 }
1663 }
1664 }
1665
1666 if (out)
1667 output_reloadnum = i;
1668
1669 return i;
1670 }
1671
1672 /* Record an additional place we must replace a value
1673 for which we have already recorded a reload.
1674 RELOADNUM is the value returned by push_reload
1675 when the reload was recorded.
1676 This is used in insn patterns that use match_dup. */
1677
1678 static void
1679 push_replacement (rtx *loc, int reloadnum, enum machine_mode mode)
1680 {
1681 if (replace_reloads)
1682 {
1683 struct replacement *r = &replacements[n_replacements++];
1684 r->what = reloadnum;
1685 r->where = loc;
1686 r->mode = mode;
1687 }
1688 }
1689
1690 /* Duplicate any replacement we have recorded to apply at
1691 location ORIG_LOC to also be performed at DUP_LOC.
1692 This is used in insn patterns that use match_dup. */
1693
1694 static void
1695 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1696 {
1697 int i, n = n_replacements;
1698
1699 for (i = 0; i < n; i++)
1700 {
1701 struct replacement *r = &replacements[i];
1702 if (r->where == orig_loc)
1703 push_replacement (dup_loc, r->what, r->mode);
1704 }
1705 }
1706 \f
1707 /* Transfer all replacements that used to be in reload FROM to be in
1708 reload TO. */
1709
1710 void
1711 transfer_replacements (int to, int from)
1712 {
1713 int i;
1714
1715 for (i = 0; i < n_replacements; i++)
1716 if (replacements[i].what == from)
1717 replacements[i].what = to;
1718 }
1719 \f
1720 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1721 or a subpart of it. If we have any replacements registered for IN_RTX,
1722 cancel the reloads that were supposed to load them.
1723 Return nonzero if we canceled any reloads. */
1724 int
1725 remove_address_replacements (rtx in_rtx)
1726 {
1727 int i, j;
1728 char reload_flags[MAX_RELOADS];
1729 int something_changed = 0;
1730
1731 memset (reload_flags, 0, sizeof reload_flags);
1732 for (i = 0, j = 0; i < n_replacements; i++)
1733 {
1734 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1735 reload_flags[replacements[i].what] |= 1;
1736 else
1737 {
1738 replacements[j++] = replacements[i];
1739 reload_flags[replacements[i].what] |= 2;
1740 }
1741 }
1742 /* Note that the following store must be done before the recursive calls. */
1743 n_replacements = j;
1744
1745 for (i = n_reloads - 1; i >= 0; i--)
1746 {
1747 if (reload_flags[i] == 1)
1748 {
1749 deallocate_reload_reg (i);
1750 remove_address_replacements (rld[i].in);
1751 rld[i].in = 0;
1752 something_changed = 1;
1753 }
1754 }
1755 return something_changed;
1756 }
1757 \f
1758 /* If there is only one output reload, and it is not for an earlyclobber
1759 operand, try to combine it with a (logically unrelated) input reload
1760 to reduce the number of reload registers needed.
1761
1762 This is safe if the input reload does not appear in
1763 the value being output-reloaded, because this implies
1764 it is not needed any more once the original insn completes.
1765
1766 If that doesn't work, see we can use any of the registers that
1767 die in this insn as a reload register. We can if it is of the right
1768 class and does not appear in the value being output-reloaded. */
1769
1770 static void
1771 combine_reloads (void)
1772 {
1773 int i, regno;
1774 int output_reload = -1;
1775 int secondary_out = -1;
1776 rtx note;
1777
1778 /* Find the output reload; return unless there is exactly one
1779 and that one is mandatory. */
1780
1781 for (i = 0; i < n_reloads; i++)
1782 if (rld[i].out != 0)
1783 {
1784 if (output_reload >= 0)
1785 return;
1786 output_reload = i;
1787 }
1788
1789 if (output_reload < 0 || rld[output_reload].optional)
1790 return;
1791
1792 /* An input-output reload isn't combinable. */
1793
1794 if (rld[output_reload].in != 0)
1795 return;
1796
1797 /* If this reload is for an earlyclobber operand, we can't do anything. */
1798 if (earlyclobber_operand_p (rld[output_reload].out))
1799 return;
1800
1801 /* If there is a reload for part of the address of this operand, we would
1802 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1803 its life to the point where doing this combine would not lower the
1804 number of spill registers needed. */
1805 for (i = 0; i < n_reloads; i++)
1806 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1807 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1808 && rld[i].opnum == rld[output_reload].opnum)
1809 return;
1810
1811 /* Check each input reload; can we combine it? */
1812
1813 for (i = 0; i < n_reloads; i++)
1814 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1815 /* Life span of this reload must not extend past main insn. */
1816 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1817 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1818 && rld[i].when_needed != RELOAD_OTHER
1819 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1820 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1821 [(int) rld[output_reload].outmode])
1822 && rld[i].inc == 0
1823 && rld[i].reg_rtx == 0
1824 #ifdef SECONDARY_MEMORY_NEEDED
1825 /* Don't combine two reloads with different secondary
1826 memory locations. */
1827 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1828 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1829 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1830 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1831 #endif
1832 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1833 ? (rld[i].rclass == rld[output_reload].rclass)
1834 : (reg_class_subset_p (rld[i].rclass,
1835 rld[output_reload].rclass)
1836 || reg_class_subset_p (rld[output_reload].rclass,
1837 rld[i].rclass)))
1838 && (MATCHES (rld[i].in, rld[output_reload].out)
1839 /* Args reversed because the first arg seems to be
1840 the one that we imagine being modified
1841 while the second is the one that might be affected. */
1842 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1843 rld[i].in)
1844 /* However, if the input is a register that appears inside
1845 the output, then we also can't share.
1846 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1847 If the same reload reg is used for both reg 69 and the
1848 result to be stored in memory, then that result
1849 will clobber the address of the memory ref. */
1850 && ! (REG_P (rld[i].in)
1851 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1852 rld[output_reload].out))))
1853 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1854 rld[i].when_needed != RELOAD_FOR_INPUT)
1855 && (reg_class_size[(int) rld[i].rclass]
1856 || targetm.small_register_classes_for_mode_p (VOIDmode))
1857 /* We will allow making things slightly worse by combining an
1858 input and an output, but no worse than that. */
1859 && (rld[i].when_needed == RELOAD_FOR_INPUT
1860 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1861 {
1862 int j;
1863
1864 /* We have found a reload to combine with! */
1865 rld[i].out = rld[output_reload].out;
1866 rld[i].out_reg = rld[output_reload].out_reg;
1867 rld[i].outmode = rld[output_reload].outmode;
1868 /* Mark the old output reload as inoperative. */
1869 rld[output_reload].out = 0;
1870 /* The combined reload is needed for the entire insn. */
1871 rld[i].when_needed = RELOAD_OTHER;
1872 /* If the output reload had a secondary reload, copy it. */
1873 if (rld[output_reload].secondary_out_reload != -1)
1874 {
1875 rld[i].secondary_out_reload
1876 = rld[output_reload].secondary_out_reload;
1877 rld[i].secondary_out_icode
1878 = rld[output_reload].secondary_out_icode;
1879 }
1880
1881 #ifdef SECONDARY_MEMORY_NEEDED
1882 /* Copy any secondary MEM. */
1883 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1884 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1885 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1886 #endif
1887 /* If required, minimize the register class. */
1888 if (reg_class_subset_p (rld[output_reload].rclass,
1889 rld[i].rclass))
1890 rld[i].rclass = rld[output_reload].rclass;
1891
1892 /* Transfer all replacements from the old reload to the combined. */
1893 for (j = 0; j < n_replacements; j++)
1894 if (replacements[j].what == output_reload)
1895 replacements[j].what = i;
1896
1897 return;
1898 }
1899
1900 /* If this insn has only one operand that is modified or written (assumed
1901 to be the first), it must be the one corresponding to this reload. It
1902 is safe to use anything that dies in this insn for that output provided
1903 that it does not occur in the output (we already know it isn't an
1904 earlyclobber. If this is an asm insn, give up. */
1905
1906 if (INSN_CODE (this_insn) == -1)
1907 return;
1908
1909 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1910 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1911 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1912 return;
1913
1914 /* See if some hard register that dies in this insn and is not used in
1915 the output is the right class. Only works if the register we pick
1916 up can fully hold our output reload. */
1917 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1918 if (REG_NOTE_KIND (note) == REG_DEAD
1919 && REG_P (XEXP (note, 0))
1920 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1921 rld[output_reload].out)
1922 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1923 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1924 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1925 regno)
1926 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1927 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1928 /* Ensure that a secondary or tertiary reload for this output
1929 won't want this register. */
1930 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1931 || (!(TEST_HARD_REG_BIT
1932 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1933 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1934 || !(TEST_HARD_REG_BIT
1935 (reg_class_contents[(int) rld[secondary_out].rclass],
1936 regno)))))
1937 && !fixed_regs[regno]
1938 /* Check that a former pseudo is valid; see find_dummy_reload. */
1939 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1940 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1941 ORIGINAL_REGNO (XEXP (note, 0)))
1942 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1943 {
1944 rld[output_reload].reg_rtx
1945 = gen_rtx_REG (rld[output_reload].outmode, regno);
1946 return;
1947 }
1948 }
1949 \f
1950 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1951 See if one of IN and OUT is a register that may be used;
1952 this is desirable since a spill-register won't be needed.
1953 If so, return the register rtx that proves acceptable.
1954
1955 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1956 RCLASS is the register class required for the reload.
1957
1958 If FOR_REAL is >= 0, it is the number of the reload,
1959 and in some cases when it can be discovered that OUT doesn't need
1960 to be computed, clear out rld[FOR_REAL].out.
1961
1962 If FOR_REAL is -1, this should not be done, because this call
1963 is just to see if a register can be found, not to find and install it.
1964
1965 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1966 puts an additional constraint on being able to use IN for OUT since
1967 IN must not appear elsewhere in the insn (it is assumed that IN itself
1968 is safe from the earlyclobber). */
1969
1970 static rtx
1971 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1972 enum machine_mode inmode, enum machine_mode outmode,
1973 reg_class_t rclass, int for_real, int earlyclobber)
1974 {
1975 rtx in = real_in;
1976 rtx out = real_out;
1977 int in_offset = 0;
1978 int out_offset = 0;
1979 rtx value = 0;
1980
1981 /* If operands exceed a word, we can't use either of them
1982 unless they have the same size. */
1983 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1984 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1985 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1986 return 0;
1987
1988 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1989 respectively refers to a hard register. */
1990
1991 /* Find the inside of any subregs. */
1992 while (GET_CODE (out) == SUBREG)
1993 {
1994 if (REG_P (SUBREG_REG (out))
1995 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1996 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1997 GET_MODE (SUBREG_REG (out)),
1998 SUBREG_BYTE (out),
1999 GET_MODE (out));
2000 out = SUBREG_REG (out);
2001 }
2002 while (GET_CODE (in) == SUBREG)
2003 {
2004 if (REG_P (SUBREG_REG (in))
2005 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2006 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2007 GET_MODE (SUBREG_REG (in)),
2008 SUBREG_BYTE (in),
2009 GET_MODE (in));
2010 in = SUBREG_REG (in);
2011 }
2012
2013 /* Narrow down the reg class, the same way push_reload will;
2014 otherwise we might find a dummy now, but push_reload won't. */
2015 {
2016 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2017 if (preferred_class != NO_REGS)
2018 rclass = (enum reg_class) preferred_class;
2019 }
2020
2021 /* See if OUT will do. */
2022 if (REG_P (out)
2023 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2024 {
2025 unsigned int regno = REGNO (out) + out_offset;
2026 unsigned int nwords = hard_regno_nregs[regno][outmode];
2027 rtx saved_rtx;
2028
2029 /* When we consider whether the insn uses OUT,
2030 ignore references within IN. They don't prevent us
2031 from copying IN into OUT, because those refs would
2032 move into the insn that reloads IN.
2033
2034 However, we only ignore IN in its role as this reload.
2035 If the insn uses IN elsewhere and it contains OUT,
2036 that counts. We can't be sure it's the "same" operand
2037 so it might not go through this reload.
2038
2039 We also need to avoid using OUT if it, or part of it, is a
2040 fixed register. Modifying such registers, even transiently,
2041 may have undefined effects on the machine, such as modifying
2042 the stack pointer. */
2043 saved_rtx = *inloc;
2044 *inloc = const0_rtx;
2045
2046 if (regno < FIRST_PSEUDO_REGISTER
2047 && HARD_REGNO_MODE_OK (regno, outmode)
2048 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2049 PATTERN (this_insn), outloc))
2050 {
2051 unsigned int i;
2052
2053 for (i = 0; i < nwords; i++)
2054 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2055 regno + i)
2056 || fixed_regs[regno + i])
2057 break;
2058
2059 if (i == nwords)
2060 {
2061 if (REG_P (real_out))
2062 value = real_out;
2063 else
2064 value = gen_rtx_REG (outmode, regno);
2065 }
2066 }
2067
2068 *inloc = saved_rtx;
2069 }
2070
2071 /* Consider using IN if OUT was not acceptable
2072 or if OUT dies in this insn (like the quotient in a divmod insn).
2073 We can't use IN unless it is dies in this insn,
2074 which means we must know accurately which hard regs are live.
2075 Also, the result can't go in IN if IN is used within OUT,
2076 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2077 if (hard_regs_live_known
2078 && REG_P (in)
2079 && REGNO (in) < FIRST_PSEUDO_REGISTER
2080 && (value == 0
2081 || find_reg_note (this_insn, REG_UNUSED, real_out))
2082 && find_reg_note (this_insn, REG_DEAD, real_in)
2083 && !fixed_regs[REGNO (in)]
2084 && HARD_REGNO_MODE_OK (REGNO (in),
2085 /* The only case where out and real_out might
2086 have different modes is where real_out
2087 is a subreg, and in that case, out
2088 has a real mode. */
2089 (GET_MODE (out) != VOIDmode
2090 ? GET_MODE (out) : outmode))
2091 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2092 /* However only do this if we can be sure that this input
2093 operand doesn't correspond with an uninitialized pseudo.
2094 global can assign some hardreg to it that is the same as
2095 the one assigned to a different, also live pseudo (as it
2096 can ignore the conflict). We must never introduce writes
2097 to such hardregs, as they would clobber the other live
2098 pseudo. See PR 20973. */
2099 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2100 ORIGINAL_REGNO (in))
2101 /* Similarly, only do this if we can be sure that the death
2102 note is still valid. global can assign some hardreg to
2103 the pseudo referenced in the note and simultaneously a
2104 subword of this hardreg to a different, also live pseudo,
2105 because only another subword of the hardreg is actually
2106 used in the insn. This cannot happen if the pseudo has
2107 been assigned exactly one hardreg. See PR 33732. */
2108 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2109 {
2110 unsigned int regno = REGNO (in) + in_offset;
2111 unsigned int nwords = hard_regno_nregs[regno][inmode];
2112
2113 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2114 && ! hard_reg_set_here_p (regno, regno + nwords,
2115 PATTERN (this_insn))
2116 && (! earlyclobber
2117 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2118 PATTERN (this_insn), inloc)))
2119 {
2120 unsigned int i;
2121
2122 for (i = 0; i < nwords; i++)
2123 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2124 regno + i))
2125 break;
2126
2127 if (i == nwords)
2128 {
2129 /* If we were going to use OUT as the reload reg
2130 and changed our mind, it means OUT is a dummy that
2131 dies here. So don't bother copying value to it. */
2132 if (for_real >= 0 && value == real_out)
2133 rld[for_real].out = 0;
2134 if (REG_P (real_in))
2135 value = real_in;
2136 else
2137 value = gen_rtx_REG (inmode, regno);
2138 }
2139 }
2140 }
2141
2142 return value;
2143 }
2144 \f
2145 /* This page contains subroutines used mainly for determining
2146 whether the IN or an OUT of a reload can serve as the
2147 reload register. */
2148
2149 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2150
2151 int
2152 earlyclobber_operand_p (rtx x)
2153 {
2154 int i;
2155
2156 for (i = 0; i < n_earlyclobbers; i++)
2157 if (reload_earlyclobbers[i] == x)
2158 return 1;
2159
2160 return 0;
2161 }
2162
2163 /* Return 1 if expression X alters a hard reg in the range
2164 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2165 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2166 X should be the body of an instruction. */
2167
2168 static int
2169 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2170 {
2171 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2172 {
2173 rtx op0 = SET_DEST (x);
2174
2175 while (GET_CODE (op0) == SUBREG)
2176 op0 = SUBREG_REG (op0);
2177 if (REG_P (op0))
2178 {
2179 unsigned int r = REGNO (op0);
2180
2181 /* See if this reg overlaps range under consideration. */
2182 if (r < end_regno
2183 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2184 return 1;
2185 }
2186 }
2187 else if (GET_CODE (x) == PARALLEL)
2188 {
2189 int i = XVECLEN (x, 0) - 1;
2190
2191 for (; i >= 0; i--)
2192 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2193 return 1;
2194 }
2195
2196 return 0;
2197 }
2198
2199 /* Return 1 if ADDR is a valid memory address for mode MODE
2200 in address space AS, and check that each pseudo reg has the
2201 proper kind of hard reg. */
2202
2203 int
2204 strict_memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2205 rtx addr, addr_space_t as)
2206 {
2207 #ifdef GO_IF_LEGITIMATE_ADDRESS
2208 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2209 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2210 return 0;
2211
2212 win:
2213 return 1;
2214 #else
2215 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2216 #endif
2217 }
2218 \f
2219 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2220 if they are the same hard reg, and has special hacks for
2221 autoincrement and autodecrement.
2222 This is specifically intended for find_reloads to use
2223 in determining whether two operands match.
2224 X is the operand whose number is the lower of the two.
2225
2226 The value is 2 if Y contains a pre-increment that matches
2227 a non-incrementing address in X. */
2228
2229 /* ??? To be completely correct, we should arrange to pass
2230 for X the output operand and for Y the input operand.
2231 For now, we assume that the output operand has the lower number
2232 because that is natural in (SET output (... input ...)). */
2233
2234 int
2235 operands_match_p (rtx x, rtx y)
2236 {
2237 int i;
2238 RTX_CODE code = GET_CODE (x);
2239 const char *fmt;
2240 int success_2;
2241
2242 if (x == y)
2243 return 1;
2244 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2245 && (REG_P (y) || (GET_CODE (y) == SUBREG
2246 && REG_P (SUBREG_REG (y)))))
2247 {
2248 int j;
2249
2250 if (code == SUBREG)
2251 {
2252 i = REGNO (SUBREG_REG (x));
2253 if (i >= FIRST_PSEUDO_REGISTER)
2254 goto slow;
2255 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2256 GET_MODE (SUBREG_REG (x)),
2257 SUBREG_BYTE (x),
2258 GET_MODE (x));
2259 }
2260 else
2261 i = REGNO (x);
2262
2263 if (GET_CODE (y) == SUBREG)
2264 {
2265 j = REGNO (SUBREG_REG (y));
2266 if (j >= FIRST_PSEUDO_REGISTER)
2267 goto slow;
2268 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2269 GET_MODE (SUBREG_REG (y)),
2270 SUBREG_BYTE (y),
2271 GET_MODE (y));
2272 }
2273 else
2274 j = REGNO (y);
2275
2276 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2277 multiple hard register group of scalar integer registers, so that
2278 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2279 register. */
2280 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2281 && SCALAR_INT_MODE_P (GET_MODE (x))
2282 && i < FIRST_PSEUDO_REGISTER)
2283 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2284 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2285 && SCALAR_INT_MODE_P (GET_MODE (y))
2286 && j < FIRST_PSEUDO_REGISTER)
2287 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2288
2289 return i == j;
2290 }
2291 /* If two operands must match, because they are really a single
2292 operand of an assembler insn, then two postincrements are invalid
2293 because the assembler insn would increment only once.
2294 On the other hand, a postincrement matches ordinary indexing
2295 if the postincrement is the output operand. */
2296 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2297 return operands_match_p (XEXP (x, 0), y);
2298 /* Two preincrements are invalid
2299 because the assembler insn would increment only once.
2300 On the other hand, a preincrement matches ordinary indexing
2301 if the preincrement is the input operand.
2302 In this case, return 2, since some callers need to do special
2303 things when this happens. */
2304 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2305 || GET_CODE (y) == PRE_MODIFY)
2306 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2307
2308 slow:
2309
2310 /* Now we have disposed of all the cases in which different rtx codes
2311 can match. */
2312 if (code != GET_CODE (y))
2313 return 0;
2314
2315 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2316 if (GET_MODE (x) != GET_MODE (y))
2317 return 0;
2318
2319 /* MEMs referring to different address space are not equivalent. */
2320 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2321 return 0;
2322
2323 switch (code)
2324 {
2325 CASE_CONST_UNIQUE:
2326 return 0;
2327
2328 case LABEL_REF:
2329 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2330 case SYMBOL_REF:
2331 return XSTR (x, 0) == XSTR (y, 0);
2332
2333 default:
2334 break;
2335 }
2336
2337 /* Compare the elements. If any pair of corresponding elements
2338 fail to match, return 0 for the whole things. */
2339
2340 success_2 = 0;
2341 fmt = GET_RTX_FORMAT (code);
2342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2343 {
2344 int val, j;
2345 switch (fmt[i])
2346 {
2347 case 'w':
2348 if (XWINT (x, i) != XWINT (y, i))
2349 return 0;
2350 break;
2351
2352 case 'i':
2353 if (XINT (x, i) != XINT (y, i))
2354 return 0;
2355 break;
2356
2357 case 'e':
2358 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2359 if (val == 0)
2360 return 0;
2361 /* If any subexpression returns 2,
2362 we should return 2 if we are successful. */
2363 if (val == 2)
2364 success_2 = 1;
2365 break;
2366
2367 case '0':
2368 break;
2369
2370 case 'E':
2371 if (XVECLEN (x, i) != XVECLEN (y, i))
2372 return 0;
2373 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2374 {
2375 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2376 if (val == 0)
2377 return 0;
2378 if (val == 2)
2379 success_2 = 1;
2380 }
2381 break;
2382
2383 /* It is believed that rtx's at this level will never
2384 contain anything but integers and other rtx's,
2385 except for within LABEL_REFs and SYMBOL_REFs. */
2386 default:
2387 gcc_unreachable ();
2388 }
2389 }
2390 return 1 + success_2;
2391 }
2392 \f
2393 /* Describe the range of registers or memory referenced by X.
2394 If X is a register, set REG_FLAG and put the first register
2395 number into START and the last plus one into END.
2396 If X is a memory reference, put a base address into BASE
2397 and a range of integer offsets into START and END.
2398 If X is pushing on the stack, we can assume it causes no trouble,
2399 so we set the SAFE field. */
2400
2401 static struct decomposition
2402 decompose (rtx x)
2403 {
2404 struct decomposition val;
2405 int all_const = 0;
2406
2407 memset (&val, 0, sizeof (val));
2408
2409 switch (GET_CODE (x))
2410 {
2411 case MEM:
2412 {
2413 rtx base = NULL_RTX, offset = 0;
2414 rtx addr = XEXP (x, 0);
2415
2416 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2417 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2418 {
2419 val.base = XEXP (addr, 0);
2420 val.start = -GET_MODE_SIZE (GET_MODE (x));
2421 val.end = GET_MODE_SIZE (GET_MODE (x));
2422 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2423 return val;
2424 }
2425
2426 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2427 {
2428 if (GET_CODE (XEXP (addr, 1)) == PLUS
2429 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2430 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2431 {
2432 val.base = XEXP (addr, 0);
2433 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2434 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2435 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2436 return val;
2437 }
2438 }
2439
2440 if (GET_CODE (addr) == CONST)
2441 {
2442 addr = XEXP (addr, 0);
2443 all_const = 1;
2444 }
2445 if (GET_CODE (addr) == PLUS)
2446 {
2447 if (CONSTANT_P (XEXP (addr, 0)))
2448 {
2449 base = XEXP (addr, 1);
2450 offset = XEXP (addr, 0);
2451 }
2452 else if (CONSTANT_P (XEXP (addr, 1)))
2453 {
2454 base = XEXP (addr, 0);
2455 offset = XEXP (addr, 1);
2456 }
2457 }
2458
2459 if (offset == 0)
2460 {
2461 base = addr;
2462 offset = const0_rtx;
2463 }
2464 if (GET_CODE (offset) == CONST)
2465 offset = XEXP (offset, 0);
2466 if (GET_CODE (offset) == PLUS)
2467 {
2468 if (CONST_INT_P (XEXP (offset, 0)))
2469 {
2470 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2471 offset = XEXP (offset, 0);
2472 }
2473 else if (CONST_INT_P (XEXP (offset, 1)))
2474 {
2475 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2476 offset = XEXP (offset, 1);
2477 }
2478 else
2479 {
2480 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2481 offset = const0_rtx;
2482 }
2483 }
2484 else if (!CONST_INT_P (offset))
2485 {
2486 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2487 offset = const0_rtx;
2488 }
2489
2490 if (all_const && GET_CODE (base) == PLUS)
2491 base = gen_rtx_CONST (GET_MODE (base), base);
2492
2493 gcc_assert (CONST_INT_P (offset));
2494
2495 val.start = INTVAL (offset);
2496 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2497 val.base = base;
2498 }
2499 break;
2500
2501 case REG:
2502 val.reg_flag = 1;
2503 val.start = true_regnum (x);
2504 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2505 {
2506 /* A pseudo with no hard reg. */
2507 val.start = REGNO (x);
2508 val.end = val.start + 1;
2509 }
2510 else
2511 /* A hard reg. */
2512 val.end = end_hard_regno (GET_MODE (x), val.start);
2513 break;
2514
2515 case SUBREG:
2516 if (!REG_P (SUBREG_REG (x)))
2517 /* This could be more precise, but it's good enough. */
2518 return decompose (SUBREG_REG (x));
2519 val.reg_flag = 1;
2520 val.start = true_regnum (x);
2521 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2522 return decompose (SUBREG_REG (x));
2523 else
2524 /* A hard reg. */
2525 val.end = val.start + subreg_nregs (x);
2526 break;
2527
2528 case SCRATCH:
2529 /* This hasn't been assigned yet, so it can't conflict yet. */
2530 val.safe = 1;
2531 break;
2532
2533 default:
2534 gcc_assert (CONSTANT_P (x));
2535 val.safe = 1;
2536 break;
2537 }
2538 return val;
2539 }
2540
2541 /* Return 1 if altering Y will not modify the value of X.
2542 Y is also described by YDATA, which should be decompose (Y). */
2543
2544 static int
2545 immune_p (rtx x, rtx y, struct decomposition ydata)
2546 {
2547 struct decomposition xdata;
2548
2549 if (ydata.reg_flag)
2550 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2551 if (ydata.safe)
2552 return 1;
2553
2554 gcc_assert (MEM_P (y));
2555 /* If Y is memory and X is not, Y can't affect X. */
2556 if (!MEM_P (x))
2557 return 1;
2558
2559 xdata = decompose (x);
2560
2561 if (! rtx_equal_p (xdata.base, ydata.base))
2562 {
2563 /* If bases are distinct symbolic constants, there is no overlap. */
2564 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2565 return 1;
2566 /* Constants and stack slots never overlap. */
2567 if (CONSTANT_P (xdata.base)
2568 && (ydata.base == frame_pointer_rtx
2569 || ydata.base == hard_frame_pointer_rtx
2570 || ydata.base == stack_pointer_rtx))
2571 return 1;
2572 if (CONSTANT_P (ydata.base)
2573 && (xdata.base == frame_pointer_rtx
2574 || xdata.base == hard_frame_pointer_rtx
2575 || xdata.base == stack_pointer_rtx))
2576 return 1;
2577 /* If either base is variable, we don't know anything. */
2578 return 0;
2579 }
2580
2581 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2582 }
2583
2584 /* Similar, but calls decompose. */
2585
2586 int
2587 safe_from_earlyclobber (rtx op, rtx clobber)
2588 {
2589 struct decomposition early_data;
2590
2591 early_data = decompose (clobber);
2592 return immune_p (op, clobber, early_data);
2593 }
2594 \f
2595 /* Main entry point of this file: search the body of INSN
2596 for values that need reloading and record them with push_reload.
2597 REPLACE nonzero means record also where the values occur
2598 so that subst_reloads can be used.
2599
2600 IND_LEVELS says how many levels of indirection are supported by this
2601 machine; a value of zero means that a memory reference is not a valid
2602 memory address.
2603
2604 LIVE_KNOWN says we have valid information about which hard
2605 regs are live at each point in the program; this is true when
2606 we are called from global_alloc but false when stupid register
2607 allocation has been done.
2608
2609 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2610 which is nonnegative if the reg has been commandeered for reloading into.
2611 It is copied into STATIC_RELOAD_REG_P and referenced from there
2612 by various subroutines.
2613
2614 Return TRUE if some operands need to be changed, because of swapping
2615 commutative operands, reg_equiv_address substitution, or whatever. */
2616
2617 int
2618 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2619 short *reload_reg_p)
2620 {
2621 int insn_code_number;
2622 int i, j;
2623 int noperands;
2624 /* These start out as the constraints for the insn
2625 and they are chewed up as we consider alternatives. */
2626 const char *constraints[MAX_RECOG_OPERANDS];
2627 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2628 a register. */
2629 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2630 char pref_or_nothing[MAX_RECOG_OPERANDS];
2631 /* Nonzero for a MEM operand whose entire address needs a reload.
2632 May be -1 to indicate the entire address may or may not need a reload. */
2633 int address_reloaded[MAX_RECOG_OPERANDS];
2634 /* Nonzero for an address operand that needs to be completely reloaded.
2635 May be -1 to indicate the entire operand may or may not need a reload. */
2636 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2637 /* Value of enum reload_type to use for operand. */
2638 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2639 /* Value of enum reload_type to use within address of operand. */
2640 enum reload_type address_type[MAX_RECOG_OPERANDS];
2641 /* Save the usage of each operand. */
2642 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2643 int no_input_reloads = 0, no_output_reloads = 0;
2644 int n_alternatives;
2645 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2646 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2647 char this_alternative_win[MAX_RECOG_OPERANDS];
2648 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2649 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2650 int this_alternative_matches[MAX_RECOG_OPERANDS];
2651 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2652 int this_alternative_number;
2653 int goal_alternative_number = 0;
2654 int operand_reloadnum[MAX_RECOG_OPERANDS];
2655 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2656 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2657 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2658 char goal_alternative_win[MAX_RECOG_OPERANDS];
2659 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2660 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2661 int goal_alternative_swapped;
2662 int best;
2663 int commutative;
2664 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2665 rtx substed_operand[MAX_RECOG_OPERANDS];
2666 rtx body = PATTERN (insn);
2667 rtx set = single_set (insn);
2668 int goal_earlyclobber = 0, this_earlyclobber;
2669 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
2670 int retval = 0;
2671
2672 this_insn = insn;
2673 n_reloads = 0;
2674 n_replacements = 0;
2675 n_earlyclobbers = 0;
2676 replace_reloads = replace;
2677 hard_regs_live_known = live_known;
2678 static_reload_reg_p = reload_reg_p;
2679
2680 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2681 neither are insns that SET cc0. Insns that use CC0 are not allowed
2682 to have any input reloads. */
2683 if (JUMP_P (insn) || CALL_P (insn))
2684 no_output_reloads = 1;
2685
2686 #ifdef HAVE_cc0
2687 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2688 no_input_reloads = 1;
2689 if (reg_set_p (cc0_rtx, PATTERN (insn)))
2690 no_output_reloads = 1;
2691 #endif
2692
2693 #ifdef SECONDARY_MEMORY_NEEDED
2694 /* The eliminated forms of any secondary memory locations are per-insn, so
2695 clear them out here. */
2696
2697 if (secondary_memlocs_elim_used)
2698 {
2699 memset (secondary_memlocs_elim, 0,
2700 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2701 secondary_memlocs_elim_used = 0;
2702 }
2703 #endif
2704
2705 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2706 is cheap to move between them. If it is not, there may not be an insn
2707 to do the copy, so we may need a reload. */
2708 if (GET_CODE (body) == SET
2709 && REG_P (SET_DEST (body))
2710 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2711 && REG_P (SET_SRC (body))
2712 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2713 && register_move_cost (GET_MODE (SET_SRC (body)),
2714 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2715 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2716 return 0;
2717
2718 extract_insn (insn);
2719
2720 noperands = reload_n_operands = recog_data.n_operands;
2721 n_alternatives = recog_data.n_alternatives;
2722
2723 /* Just return "no reloads" if insn has no operands with constraints. */
2724 if (noperands == 0 || n_alternatives == 0)
2725 return 0;
2726
2727 insn_code_number = INSN_CODE (insn);
2728 this_insn_is_asm = insn_code_number < 0;
2729
2730 memcpy (operand_mode, recog_data.operand_mode,
2731 noperands * sizeof (enum machine_mode));
2732 memcpy (constraints, recog_data.constraints,
2733 noperands * sizeof (const char *));
2734
2735 commutative = -1;
2736
2737 /* If we will need to know, later, whether some pair of operands
2738 are the same, we must compare them now and save the result.
2739 Reloading the base and index registers will clobber them
2740 and afterward they will fail to match. */
2741
2742 for (i = 0; i < noperands; i++)
2743 {
2744 const char *p;
2745 int c;
2746 char *end;
2747
2748 substed_operand[i] = recog_data.operand[i];
2749 p = constraints[i];
2750
2751 modified[i] = RELOAD_READ;
2752
2753 /* Scan this operand's constraint to see if it is an output operand,
2754 an in-out operand, is commutative, or should match another. */
2755
2756 while ((c = *p))
2757 {
2758 p += CONSTRAINT_LEN (c, p);
2759 switch (c)
2760 {
2761 case '=':
2762 modified[i] = RELOAD_WRITE;
2763 break;
2764 case '+':
2765 modified[i] = RELOAD_READ_WRITE;
2766 break;
2767 case '%':
2768 {
2769 /* The last operand should not be marked commutative. */
2770 gcc_assert (i != noperands - 1);
2771
2772 /* We currently only support one commutative pair of
2773 operands. Some existing asm code currently uses more
2774 than one pair. Previously, that would usually work,
2775 but sometimes it would crash the compiler. We
2776 continue supporting that case as well as we can by
2777 silently ignoring all but the first pair. In the
2778 future we may handle it correctly. */
2779 if (commutative < 0)
2780 commutative = i;
2781 else
2782 gcc_assert (this_insn_is_asm);
2783 }
2784 break;
2785 /* Use of ISDIGIT is tempting here, but it may get expensive because
2786 of locale support we don't want. */
2787 case '0': case '1': case '2': case '3': case '4':
2788 case '5': case '6': case '7': case '8': case '9':
2789 {
2790 c = strtoul (p - 1, &end, 10);
2791 p = end;
2792
2793 operands_match[c][i]
2794 = operands_match_p (recog_data.operand[c],
2795 recog_data.operand[i]);
2796
2797 /* An operand may not match itself. */
2798 gcc_assert (c != i);
2799
2800 /* If C can be commuted with C+1, and C might need to match I,
2801 then C+1 might also need to match I. */
2802 if (commutative >= 0)
2803 {
2804 if (c == commutative || c == commutative + 1)
2805 {
2806 int other = c + (c == commutative ? 1 : -1);
2807 operands_match[other][i]
2808 = operands_match_p (recog_data.operand[other],
2809 recog_data.operand[i]);
2810 }
2811 if (i == commutative || i == commutative + 1)
2812 {
2813 int other = i + (i == commutative ? 1 : -1);
2814 operands_match[c][other]
2815 = operands_match_p (recog_data.operand[c],
2816 recog_data.operand[other]);
2817 }
2818 /* Note that C is supposed to be less than I.
2819 No need to consider altering both C and I because in
2820 that case we would alter one into the other. */
2821 }
2822 }
2823 }
2824 }
2825 }
2826
2827 /* Examine each operand that is a memory reference or memory address
2828 and reload parts of the addresses into index registers.
2829 Also here any references to pseudo regs that didn't get hard regs
2830 but are equivalent to constants get replaced in the insn itself
2831 with those constants. Nobody will ever see them again.
2832
2833 Finally, set up the preferred classes of each operand. */
2834
2835 for (i = 0; i < noperands; i++)
2836 {
2837 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2838
2839 address_reloaded[i] = 0;
2840 address_operand_reloaded[i] = 0;
2841 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2842 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2843 : RELOAD_OTHER);
2844 address_type[i]
2845 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2846 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2847 : RELOAD_OTHER);
2848
2849 if (*constraints[i] == 0)
2850 /* Ignore things like match_operator operands. */
2851 ;
2852 else if (insn_extra_address_constraint
2853 (lookup_constraint (constraints[i])))
2854 {
2855 address_operand_reloaded[i]
2856 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2857 recog_data.operand[i],
2858 recog_data.operand_loc[i],
2859 i, operand_type[i], ind_levels, insn);
2860
2861 /* If we now have a simple operand where we used to have a
2862 PLUS or MULT, re-recognize and try again. */
2863 if ((OBJECT_P (*recog_data.operand_loc[i])
2864 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2865 && (GET_CODE (recog_data.operand[i]) == MULT
2866 || GET_CODE (recog_data.operand[i]) == PLUS))
2867 {
2868 INSN_CODE (insn) = -1;
2869 retval = find_reloads (insn, replace, ind_levels, live_known,
2870 reload_reg_p);
2871 return retval;
2872 }
2873
2874 recog_data.operand[i] = *recog_data.operand_loc[i];
2875 substed_operand[i] = recog_data.operand[i];
2876
2877 /* Address operands are reloaded in their existing mode,
2878 no matter what is specified in the machine description. */
2879 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2880
2881 /* If the address is a single CONST_INT pick address mode
2882 instead otherwise we will later not know in which mode
2883 the reload should be performed. */
2884 if (operand_mode[i] == VOIDmode)
2885 operand_mode[i] = Pmode;
2886
2887 }
2888 else if (code == MEM)
2889 {
2890 address_reloaded[i]
2891 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2892 recog_data.operand_loc[i],
2893 XEXP (recog_data.operand[i], 0),
2894 &XEXP (recog_data.operand[i], 0),
2895 i, address_type[i], ind_levels, insn);
2896 recog_data.operand[i] = *recog_data.operand_loc[i];
2897 substed_operand[i] = recog_data.operand[i];
2898 }
2899 else if (code == SUBREG)
2900 {
2901 rtx reg = SUBREG_REG (recog_data.operand[i]);
2902 rtx op
2903 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2904 ind_levels,
2905 set != 0
2906 && &SET_DEST (set) == recog_data.operand_loc[i],
2907 insn,
2908 &address_reloaded[i]);
2909
2910 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2911 that didn't get a hard register, emit a USE with a REG_EQUAL
2912 note in front so that we might inherit a previous, possibly
2913 wider reload. */
2914
2915 if (replace
2916 && MEM_P (op)
2917 && REG_P (reg)
2918 && (GET_MODE_SIZE (GET_MODE (reg))
2919 >= GET_MODE_SIZE (GET_MODE (op)))
2920 && reg_equiv_constant (REGNO (reg)) == 0)
2921 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2922 insn),
2923 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2924
2925 substed_operand[i] = recog_data.operand[i] = op;
2926 }
2927 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2928 /* We can get a PLUS as an "operand" as a result of register
2929 elimination. See eliminate_regs and gen_reload. We handle
2930 a unary operator by reloading the operand. */
2931 substed_operand[i] = recog_data.operand[i]
2932 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2933 ind_levels, 0, insn,
2934 &address_reloaded[i]);
2935 else if (code == REG)
2936 {
2937 /* This is equivalent to calling find_reloads_toplev.
2938 The code is duplicated for speed.
2939 When we find a pseudo always equivalent to a constant,
2940 we replace it by the constant. We must be sure, however,
2941 that we don't try to replace it in the insn in which it
2942 is being set. */
2943 int regno = REGNO (recog_data.operand[i]);
2944 if (reg_equiv_constant (regno) != 0
2945 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2946 {
2947 /* Record the existing mode so that the check if constants are
2948 allowed will work when operand_mode isn't specified. */
2949
2950 if (operand_mode[i] == VOIDmode)
2951 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2952
2953 substed_operand[i] = recog_data.operand[i]
2954 = reg_equiv_constant (regno);
2955 }
2956 if (reg_equiv_memory_loc (regno) != 0
2957 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2958 /* We need not give a valid is_set_dest argument since the case
2959 of a constant equivalence was checked above. */
2960 substed_operand[i] = recog_data.operand[i]
2961 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2962 ind_levels, 0, insn,
2963 &address_reloaded[i]);
2964 }
2965 /* If the operand is still a register (we didn't replace it with an
2966 equivalent), get the preferred class to reload it into. */
2967 code = GET_CODE (recog_data.operand[i]);
2968 preferred_class[i]
2969 = ((code == REG && REGNO (recog_data.operand[i])
2970 >= FIRST_PSEUDO_REGISTER)
2971 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2972 : NO_REGS);
2973 pref_or_nothing[i]
2974 = (code == REG
2975 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2976 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2977 }
2978
2979 /* If this is simply a copy from operand 1 to operand 0, merge the
2980 preferred classes for the operands. */
2981 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2982 && recog_data.operand[1] == SET_SRC (set))
2983 {
2984 preferred_class[0] = preferred_class[1]
2985 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2986 pref_or_nothing[0] |= pref_or_nothing[1];
2987 pref_or_nothing[1] |= pref_or_nothing[0];
2988 }
2989
2990 /* Now see what we need for pseudo-regs that didn't get hard regs
2991 or got the wrong kind of hard reg. For this, we must consider
2992 all the operands together against the register constraints. */
2993
2994 best = MAX_RECOG_OPERANDS * 2 + 600;
2995
2996 goal_alternative_swapped = 0;
2997
2998 /* The constraints are made of several alternatives.
2999 Each operand's constraint looks like foo,bar,... with commas
3000 separating the alternatives. The first alternatives for all
3001 operands go together, the second alternatives go together, etc.
3002
3003 First loop over alternatives. */
3004
3005 alternative_mask enabled = get_enabled_alternatives (insn);
3006 for (this_alternative_number = 0;
3007 this_alternative_number < n_alternatives;
3008 this_alternative_number++)
3009 {
3010 int swapped;
3011
3012 if (!TEST_BIT (enabled, this_alternative_number))
3013 {
3014 int i;
3015
3016 for (i = 0; i < recog_data.n_operands; i++)
3017 constraints[i] = skip_alternative (constraints[i]);
3018
3019 continue;
3020 }
3021
3022 /* If insn is commutative (it's safe to exchange a certain pair
3023 of operands) then we need to try each alternative twice, the
3024 second time matching those two operands as if we had
3025 exchanged them. To do this, really exchange them in
3026 operands. */
3027 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3028 {
3029 /* Loop over operands for one constraint alternative. */
3030 /* LOSERS counts those that don't fit this alternative
3031 and would require loading. */
3032 int losers = 0;
3033 /* BAD is set to 1 if it some operand can't fit this alternative
3034 even after reloading. */
3035 int bad = 0;
3036 /* REJECT is a count of how undesirable this alternative says it is
3037 if any reloading is required. If the alternative matches exactly
3038 then REJECT is ignored, but otherwise it gets this much
3039 counted against it in addition to the reloading needed. Each
3040 ? counts three times here since we want the disparaging caused by
3041 a bad register class to only count 1/3 as much. */
3042 int reject = 0;
3043
3044 if (swapped)
3045 {
3046 enum reg_class tclass;
3047 int t;
3048
3049 recog_data.operand[commutative] = substed_operand[commutative + 1];
3050 recog_data.operand[commutative + 1] = substed_operand[commutative];
3051 /* Swap the duplicates too. */
3052 for (i = 0; i < recog_data.n_dups; i++)
3053 if (recog_data.dup_num[i] == commutative
3054 || recog_data.dup_num[i] == commutative + 1)
3055 *recog_data.dup_loc[i]
3056 = recog_data.operand[(int) recog_data.dup_num[i]];
3057
3058 tclass = preferred_class[commutative];
3059 preferred_class[commutative] = preferred_class[commutative + 1];
3060 preferred_class[commutative + 1] = tclass;
3061
3062 t = pref_or_nothing[commutative];
3063 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3064 pref_or_nothing[commutative + 1] = t;
3065
3066 t = address_reloaded[commutative];
3067 address_reloaded[commutative] = address_reloaded[commutative + 1];
3068 address_reloaded[commutative + 1] = t;
3069 }
3070
3071 this_earlyclobber = 0;
3072
3073 for (i = 0; i < noperands; i++)
3074 {
3075 const char *p = constraints[i];
3076 char *end;
3077 int len;
3078 int win = 0;
3079 int did_match = 0;
3080 /* 0 => this operand can be reloaded somehow for this alternative. */
3081 int badop = 1;
3082 /* 0 => this operand can be reloaded if the alternative allows regs. */
3083 int winreg = 0;
3084 int c;
3085 int m;
3086 rtx operand = recog_data.operand[i];
3087 int offset = 0;
3088 /* Nonzero means this is a MEM that must be reloaded into a reg
3089 regardless of what the constraint says. */
3090 int force_reload = 0;
3091 int offmemok = 0;
3092 /* Nonzero if a constant forced into memory would be OK for this
3093 operand. */
3094 int constmemok = 0;
3095 int earlyclobber = 0;
3096 enum constraint_num cn;
3097 enum reg_class cl;
3098
3099 /* If the predicate accepts a unary operator, it means that
3100 we need to reload the operand, but do not do this for
3101 match_operator and friends. */
3102 if (UNARY_P (operand) && *p != 0)
3103 operand = XEXP (operand, 0);
3104
3105 /* If the operand is a SUBREG, extract
3106 the REG or MEM (or maybe even a constant) within.
3107 (Constants can occur as a result of reg_equiv_constant.) */
3108
3109 while (GET_CODE (operand) == SUBREG)
3110 {
3111 /* Offset only matters when operand is a REG and
3112 it is a hard reg. This is because it is passed
3113 to reg_fits_class_p if it is a REG and all pseudos
3114 return 0 from that function. */
3115 if (REG_P (SUBREG_REG (operand))
3116 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3117 {
3118 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3119 GET_MODE (SUBREG_REG (operand)),
3120 SUBREG_BYTE (operand),
3121 GET_MODE (operand)) < 0)
3122 force_reload = 1;
3123 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3124 GET_MODE (SUBREG_REG (operand)),
3125 SUBREG_BYTE (operand),
3126 GET_MODE (operand));
3127 }
3128 operand = SUBREG_REG (operand);
3129 /* Force reload if this is a constant or PLUS or if there may
3130 be a problem accessing OPERAND in the outer mode. */
3131 if (CONSTANT_P (operand)
3132 || GET_CODE (operand) == PLUS
3133 /* We must force a reload of paradoxical SUBREGs
3134 of a MEM because the alignment of the inner value
3135 may not be enough to do the outer reference. On
3136 big-endian machines, it may also reference outside
3137 the object.
3138
3139 On machines that extend byte operations and we have a
3140 SUBREG where both the inner and outer modes are no wider
3141 than a word and the inner mode is narrower, is integral,
3142 and gets extended when loaded from memory, combine.c has
3143 made assumptions about the behavior of the machine in such
3144 register access. If the data is, in fact, in memory we
3145 must always load using the size assumed to be in the
3146 register and let the insn do the different-sized
3147 accesses.
3148
3149 This is doubly true if WORD_REGISTER_OPERATIONS. In
3150 this case eliminate_regs has left non-paradoxical
3151 subregs for push_reload to see. Make sure it does
3152 by forcing the reload.
3153
3154 ??? When is it right at this stage to have a subreg
3155 of a mem that is _not_ to be handled specially? IMO
3156 those should have been reduced to just a mem. */
3157 || ((MEM_P (operand)
3158 || (REG_P (operand)
3159 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3160 #ifndef WORD_REGISTER_OPERATIONS
3161 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3162 < BIGGEST_ALIGNMENT)
3163 && (GET_MODE_SIZE (operand_mode[i])
3164 > GET_MODE_SIZE (GET_MODE (operand))))
3165 || BYTES_BIG_ENDIAN
3166 #ifdef LOAD_EXTEND_OP
3167 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3168 && (GET_MODE_SIZE (GET_MODE (operand))
3169 <= UNITS_PER_WORD)
3170 && (GET_MODE_SIZE (operand_mode[i])
3171 > GET_MODE_SIZE (GET_MODE (operand)))
3172 && INTEGRAL_MODE_P (GET_MODE (operand))
3173 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3174 #endif
3175 )
3176 #endif
3177 )
3178 )
3179 force_reload = 1;
3180 }
3181
3182 this_alternative[i] = NO_REGS;
3183 this_alternative_win[i] = 0;
3184 this_alternative_match_win[i] = 0;
3185 this_alternative_offmemok[i] = 0;
3186 this_alternative_earlyclobber[i] = 0;
3187 this_alternative_matches[i] = -1;
3188
3189 /* An empty constraint or empty alternative
3190 allows anything which matched the pattern. */
3191 if (*p == 0 || *p == ',')
3192 win = 1, badop = 0;
3193
3194 /* Scan this alternative's specs for this operand;
3195 set WIN if the operand fits any letter in this alternative.
3196 Otherwise, clear BADOP if this operand could
3197 fit some letter after reloads,
3198 or set WINREG if this operand could fit after reloads
3199 provided the constraint allows some registers. */
3200
3201 do
3202 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3203 {
3204 case '\0':
3205 len = 0;
3206 break;
3207 case ',':
3208 c = '\0';
3209 break;
3210
3211 case '?':
3212 reject += 6;
3213 break;
3214
3215 case '!':
3216 reject = 600;
3217 break;
3218
3219 case '#':
3220 /* Ignore rest of this alternative as far as
3221 reloading is concerned. */
3222 do
3223 p++;
3224 while (*p && *p != ',');
3225 len = 0;
3226 break;
3227
3228 case '0': case '1': case '2': case '3': case '4':
3229 case '5': case '6': case '7': case '8': case '9':
3230 m = strtoul (p, &end, 10);
3231 p = end;
3232 len = 0;
3233
3234 this_alternative_matches[i] = m;
3235 /* We are supposed to match a previous operand.
3236 If we do, we win if that one did.
3237 If we do not, count both of the operands as losers.
3238 (This is too conservative, since most of the time
3239 only a single reload insn will be needed to make
3240 the two operands win. As a result, this alternative
3241 may be rejected when it is actually desirable.) */
3242 if ((swapped && (m != commutative || i != commutative + 1))
3243 /* If we are matching as if two operands were swapped,
3244 also pretend that operands_match had been computed
3245 with swapped.
3246 But if I is the second of those and C is the first,
3247 don't exchange them, because operands_match is valid
3248 only on one side of its diagonal. */
3249 ? (operands_match
3250 [(m == commutative || m == commutative + 1)
3251 ? 2 * commutative + 1 - m : m]
3252 [(i == commutative || i == commutative + 1)
3253 ? 2 * commutative + 1 - i : i])
3254 : operands_match[m][i])
3255 {
3256 /* If we are matching a non-offsettable address where an
3257 offsettable address was expected, then we must reject
3258 this combination, because we can't reload it. */
3259 if (this_alternative_offmemok[m]
3260 && MEM_P (recog_data.operand[m])
3261 && this_alternative[m] == NO_REGS
3262 && ! this_alternative_win[m])
3263 bad = 1;
3264
3265 did_match = this_alternative_win[m];
3266 }
3267 else
3268 {
3269 /* Operands don't match. */
3270 rtx value;
3271 int loc1, loc2;
3272 /* Retroactively mark the operand we had to match
3273 as a loser, if it wasn't already. */
3274 if (this_alternative_win[m])
3275 losers++;
3276 this_alternative_win[m] = 0;
3277 if (this_alternative[m] == NO_REGS)
3278 bad = 1;
3279 /* But count the pair only once in the total badness of
3280 this alternative, if the pair can be a dummy reload.
3281 The pointers in operand_loc are not swapped; swap
3282 them by hand if necessary. */
3283 if (swapped && i == commutative)
3284 loc1 = commutative + 1;
3285 else if (swapped && i == commutative + 1)
3286 loc1 = commutative;
3287 else
3288 loc1 = i;
3289 if (swapped && m == commutative)
3290 loc2 = commutative + 1;
3291 else if (swapped && m == commutative + 1)
3292 loc2 = commutative;
3293 else
3294 loc2 = m;
3295 value
3296 = find_dummy_reload (recog_data.operand[i],
3297 recog_data.operand[m],
3298 recog_data.operand_loc[loc1],
3299 recog_data.operand_loc[loc2],
3300 operand_mode[i], operand_mode[m],
3301 this_alternative[m], -1,
3302 this_alternative_earlyclobber[m]);
3303
3304 if (value != 0)
3305 losers--;
3306 }
3307 /* This can be fixed with reloads if the operand
3308 we are supposed to match can be fixed with reloads. */
3309 badop = 0;
3310 this_alternative[i] = this_alternative[m];
3311
3312 /* If we have to reload this operand and some previous
3313 operand also had to match the same thing as this
3314 operand, we don't know how to do that. So reject this
3315 alternative. */
3316 if (! did_match || force_reload)
3317 for (j = 0; j < i; j++)
3318 if (this_alternative_matches[j]
3319 == this_alternative_matches[i])
3320 {
3321 badop = 1;
3322 break;
3323 }
3324 break;
3325
3326 case 'p':
3327 /* All necessary reloads for an address_operand
3328 were handled in find_reloads_address. */
3329 this_alternative[i]
3330 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3331 ADDRESS, SCRATCH);
3332 win = 1;
3333 badop = 0;
3334 break;
3335
3336 case TARGET_MEM_CONSTRAINT:
3337 if (force_reload)
3338 break;
3339 if (MEM_P (operand)
3340 || (REG_P (operand)
3341 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3342 && reg_renumber[REGNO (operand)] < 0))
3343 win = 1;
3344 if (CONST_POOL_OK_P (operand_mode[i], operand))
3345 badop = 0;
3346 constmemok = 1;
3347 break;
3348
3349 case '<':
3350 if (MEM_P (operand)
3351 && ! address_reloaded[i]
3352 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3353 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3354 win = 1;
3355 break;
3356
3357 case '>':
3358 if (MEM_P (operand)
3359 && ! address_reloaded[i]
3360 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3361 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3362 win = 1;
3363 break;
3364
3365 /* Memory operand whose address is not offsettable. */
3366 case 'V':
3367 if (force_reload)
3368 break;
3369 if (MEM_P (operand)
3370 && ! (ind_levels ? offsettable_memref_p (operand)
3371 : offsettable_nonstrict_memref_p (operand))
3372 /* Certain mem addresses will become offsettable
3373 after they themselves are reloaded. This is important;
3374 we don't want our own handling of unoffsettables
3375 to override the handling of reg_equiv_address. */
3376 && !(REG_P (XEXP (operand, 0))
3377 && (ind_levels == 0
3378 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3379 win = 1;
3380 break;
3381
3382 /* Memory operand whose address is offsettable. */
3383 case 'o':
3384 if (force_reload)
3385 break;
3386 if ((MEM_P (operand)
3387 /* If IND_LEVELS, find_reloads_address won't reload a
3388 pseudo that didn't get a hard reg, so we have to
3389 reject that case. */
3390 && ((ind_levels ? offsettable_memref_p (operand)
3391 : offsettable_nonstrict_memref_p (operand))
3392 /* A reloaded address is offsettable because it is now
3393 just a simple register indirect. */
3394 || address_reloaded[i] == 1))
3395 || (REG_P (operand)
3396 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3397 && reg_renumber[REGNO (operand)] < 0
3398 /* If reg_equiv_address is nonzero, we will be
3399 loading it into a register; hence it will be
3400 offsettable, but we cannot say that reg_equiv_mem
3401 is offsettable without checking. */
3402 && ((reg_equiv_mem (REGNO (operand)) != 0
3403 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3404 || (reg_equiv_address (REGNO (operand)) != 0))))
3405 win = 1;
3406 if (CONST_POOL_OK_P (operand_mode[i], operand)
3407 || MEM_P (operand))
3408 badop = 0;
3409 constmemok = 1;
3410 offmemok = 1;
3411 break;
3412
3413 case '&':
3414 /* Output operand that is stored before the need for the
3415 input operands (and their index registers) is over. */
3416 earlyclobber = 1, this_earlyclobber = 1;
3417 break;
3418
3419 case 'X':
3420 force_reload = 0;
3421 win = 1;
3422 break;
3423
3424 case 'g':
3425 if (! force_reload
3426 /* A PLUS is never a valid operand, but reload can make
3427 it from a register when eliminating registers. */
3428 && GET_CODE (operand) != PLUS
3429 /* A SCRATCH is not a valid operand. */
3430 && GET_CODE (operand) != SCRATCH
3431 && (! CONSTANT_P (operand)
3432 || ! flag_pic
3433 || LEGITIMATE_PIC_OPERAND_P (operand))
3434 && (GENERAL_REGS == ALL_REGS
3435 || !REG_P (operand)
3436 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3437 && reg_renumber[REGNO (operand)] < 0)))
3438 win = 1;
3439 cl = GENERAL_REGS;
3440 goto reg;
3441
3442 default:
3443 cn = lookup_constraint (p);
3444 switch (get_constraint_type (cn))
3445 {
3446 case CT_REGISTER:
3447 cl = reg_class_for_constraint (cn);
3448 if (cl != NO_REGS)
3449 goto reg;
3450 break;
3451
3452 case CT_CONST_INT:
3453 if (CONST_INT_P (operand)
3454 && (insn_const_int_ok_for_constraint
3455 (INTVAL (operand), cn)))
3456 win = true;
3457 break;
3458
3459 case CT_MEMORY:
3460 if (force_reload)
3461 break;
3462 if (constraint_satisfied_p (operand, cn))
3463 win = 1;
3464 /* If the address was already reloaded,
3465 we win as well. */
3466 else if (MEM_P (operand) && address_reloaded[i] == 1)
3467 win = 1;
3468 /* Likewise if the address will be reloaded because
3469 reg_equiv_address is nonzero. For reg_equiv_mem
3470 we have to check. */
3471 else if (REG_P (operand)
3472 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3473 && reg_renumber[REGNO (operand)] < 0
3474 && ((reg_equiv_mem (REGNO (operand)) != 0
3475 && (constraint_satisfied_p
3476 (reg_equiv_mem (REGNO (operand)),
3477 cn)))
3478 || (reg_equiv_address (REGNO (operand))
3479 != 0)))
3480 win = 1;
3481
3482 /* If we didn't already win, we can reload
3483 constants via force_const_mem, and other
3484 MEMs by reloading the address like for 'o'. */
3485 if (CONST_POOL_OK_P (operand_mode[i], operand)
3486 || MEM_P (operand))
3487 badop = 0;
3488 constmemok = 1;
3489 offmemok = 1;
3490 break;
3491
3492 case CT_ADDRESS:
3493 if (constraint_satisfied_p (operand, cn))
3494 win = 1;
3495
3496 /* If we didn't already win, we can reload
3497 the address into a base register. */
3498 this_alternative[i]
3499 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3500 ADDRESS, SCRATCH);
3501 badop = 0;
3502 break;
3503
3504 case CT_FIXED_FORM:
3505 if (constraint_satisfied_p (operand, cn))
3506 win = 1;
3507 break;
3508 }
3509 break;
3510
3511 reg:
3512 this_alternative[i]
3513 = reg_class_subunion[this_alternative[i]][cl];
3514 if (GET_MODE (operand) == BLKmode)
3515 break;
3516 winreg = 1;
3517 if (REG_P (operand)
3518 && reg_fits_class_p (operand, this_alternative[i],
3519 offset, GET_MODE (recog_data.operand[i])))
3520 win = 1;
3521 break;
3522 }
3523 while ((p += len), c);
3524
3525 if (swapped == (commutative >= 0 ? 1 : 0))
3526 constraints[i] = p;
3527
3528 /* If this operand could be handled with a reg,
3529 and some reg is allowed, then this operand can be handled. */
3530 if (winreg && this_alternative[i] != NO_REGS
3531 && (win || !class_only_fixed_regs[this_alternative[i]]))
3532 badop = 0;
3533
3534 /* Record which operands fit this alternative. */
3535 this_alternative_earlyclobber[i] = earlyclobber;
3536 if (win && ! force_reload)
3537 this_alternative_win[i] = 1;
3538 else if (did_match && ! force_reload)
3539 this_alternative_match_win[i] = 1;
3540 else
3541 {
3542 int const_to_mem = 0;
3543
3544 this_alternative_offmemok[i] = offmemok;
3545 losers++;
3546 if (badop)
3547 bad = 1;
3548 /* Alternative loses if it has no regs for a reg operand. */
3549 if (REG_P (operand)
3550 && this_alternative[i] == NO_REGS
3551 && this_alternative_matches[i] < 0)
3552 bad = 1;
3553
3554 /* If this is a constant that is reloaded into the desired
3555 class by copying it to memory first, count that as another
3556 reload. This is consistent with other code and is
3557 required to avoid choosing another alternative when
3558 the constant is moved into memory by this function on
3559 an early reload pass. Note that the test here is
3560 precisely the same as in the code below that calls
3561 force_const_mem. */
3562 if (CONST_POOL_OK_P (operand_mode[i], operand)
3563 && ((targetm.preferred_reload_class (operand,
3564 this_alternative[i])
3565 == NO_REGS)
3566 || no_input_reloads))
3567 {
3568 const_to_mem = 1;
3569 if (this_alternative[i] != NO_REGS)
3570 losers++;
3571 }
3572
3573 /* Alternative loses if it requires a type of reload not
3574 permitted for this insn. We can always reload SCRATCH
3575 and objects with a REG_UNUSED note. */
3576 if (GET_CODE (operand) != SCRATCH
3577 && modified[i] != RELOAD_READ && no_output_reloads
3578 && ! find_reg_note (insn, REG_UNUSED, operand))
3579 bad = 1;
3580 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3581 && ! const_to_mem)
3582 bad = 1;
3583
3584 /* If we can't reload this value at all, reject this
3585 alternative. Note that we could also lose due to
3586 LIMIT_RELOAD_CLASS, but we don't check that
3587 here. */
3588
3589 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3590 {
3591 if (targetm.preferred_reload_class (operand,
3592 this_alternative[i])
3593 == NO_REGS)
3594 reject = 600;
3595
3596 if (operand_type[i] == RELOAD_FOR_OUTPUT
3597 && (targetm.preferred_output_reload_class (operand,
3598 this_alternative[i])
3599 == NO_REGS))
3600 reject = 600;
3601 }
3602
3603 /* We prefer to reload pseudos over reloading other things,
3604 since such reloads may be able to be eliminated later.
3605 If we are reloading a SCRATCH, we won't be generating any
3606 insns, just using a register, so it is also preferred.
3607 So bump REJECT in other cases. Don't do this in the
3608 case where we are forcing a constant into memory and
3609 it will then win since we don't want to have a different
3610 alternative match then. */
3611 if (! (REG_P (operand)
3612 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3613 && GET_CODE (operand) != SCRATCH
3614 && ! (const_to_mem && constmemok))
3615 reject += 2;
3616
3617 /* Input reloads can be inherited more often than output
3618 reloads can be removed, so penalize output reloads. */
3619 if (operand_type[i] != RELOAD_FOR_INPUT
3620 && GET_CODE (operand) != SCRATCH)
3621 reject++;
3622 }
3623
3624 /* If this operand is a pseudo register that didn't get
3625 a hard reg and this alternative accepts some
3626 register, see if the class that we want is a subset
3627 of the preferred class for this register. If not,
3628 but it intersects that class, use the preferred class
3629 instead. If it does not intersect the preferred
3630 class, show that usage of this alternative should be
3631 discouraged; it will be discouraged more still if the
3632 register is `preferred or nothing'. We do this
3633 because it increases the chance of reusing our spill
3634 register in a later insn and avoiding a pair of
3635 memory stores and loads.
3636
3637 Don't bother with this if this alternative will
3638 accept this operand.
3639
3640 Don't do this for a multiword operand, since it is
3641 only a small win and has the risk of requiring more
3642 spill registers, which could cause a large loss.
3643
3644 Don't do this if the preferred class has only one
3645 register because we might otherwise exhaust the
3646 class. */
3647
3648 if (! win && ! did_match
3649 && this_alternative[i] != NO_REGS
3650 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3651 && reg_class_size [(int) preferred_class[i]] > 0
3652 && ! small_register_class_p (preferred_class[i]))
3653 {
3654 if (! reg_class_subset_p (this_alternative[i],
3655 preferred_class[i]))
3656 {
3657 /* Since we don't have a way of forming the intersection,
3658 we just do something special if the preferred class
3659 is a subset of the class we have; that's the most
3660 common case anyway. */
3661 if (reg_class_subset_p (preferred_class[i],
3662 this_alternative[i]))
3663 this_alternative[i] = preferred_class[i];
3664 else
3665 reject += (2 + 2 * pref_or_nothing[i]);
3666 }
3667 }
3668 }
3669
3670 /* Now see if any output operands that are marked "earlyclobber"
3671 in this alternative conflict with any input operands
3672 or any memory addresses. */
3673
3674 for (i = 0; i < noperands; i++)
3675 if (this_alternative_earlyclobber[i]
3676 && (this_alternative_win[i] || this_alternative_match_win[i]))
3677 {
3678 struct decomposition early_data;
3679
3680 early_data = decompose (recog_data.operand[i]);
3681
3682 gcc_assert (modified[i] != RELOAD_READ);
3683
3684 if (this_alternative[i] == NO_REGS)
3685 {
3686 this_alternative_earlyclobber[i] = 0;
3687 gcc_assert (this_insn_is_asm);
3688 error_for_asm (this_insn,
3689 "%<&%> constraint used with no register class");
3690 }
3691
3692 for (j = 0; j < noperands; j++)
3693 /* Is this an input operand or a memory ref? */
3694 if ((MEM_P (recog_data.operand[j])
3695 || modified[j] != RELOAD_WRITE)
3696 && j != i
3697 /* Ignore things like match_operator operands. */
3698 && !recog_data.is_operator[j]
3699 /* Don't count an input operand that is constrained to match
3700 the early clobber operand. */
3701 && ! (this_alternative_matches[j] == i
3702 && rtx_equal_p (recog_data.operand[i],
3703 recog_data.operand[j]))
3704 /* Is it altered by storing the earlyclobber operand? */
3705 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3706 early_data))
3707 {
3708 /* If the output is in a non-empty few-regs class,
3709 it's costly to reload it, so reload the input instead. */
3710 if (small_register_class_p (this_alternative[i])
3711 && (REG_P (recog_data.operand[j])
3712 || GET_CODE (recog_data.operand[j]) == SUBREG))
3713 {
3714 losers++;
3715 this_alternative_win[j] = 0;
3716 this_alternative_match_win[j] = 0;
3717 }
3718 else
3719 break;
3720 }
3721 /* If an earlyclobber operand conflicts with something,
3722 it must be reloaded, so request this and count the cost. */
3723 if (j != noperands)
3724 {
3725 losers++;
3726 this_alternative_win[i] = 0;
3727 this_alternative_match_win[j] = 0;
3728 for (j = 0; j < noperands; j++)
3729 if (this_alternative_matches[j] == i
3730 && this_alternative_match_win[j])
3731 {
3732 this_alternative_win[j] = 0;
3733 this_alternative_match_win[j] = 0;
3734 losers++;
3735 }
3736 }
3737 }
3738
3739 /* If one alternative accepts all the operands, no reload required,
3740 choose that alternative; don't consider the remaining ones. */
3741 if (losers == 0)
3742 {
3743 /* Unswap these so that they are never swapped at `finish'. */
3744 if (swapped)
3745 {
3746 recog_data.operand[commutative] = substed_operand[commutative];
3747 recog_data.operand[commutative + 1]
3748 = substed_operand[commutative + 1];
3749 }
3750 for (i = 0; i < noperands; i++)
3751 {
3752 goal_alternative_win[i] = this_alternative_win[i];
3753 goal_alternative_match_win[i] = this_alternative_match_win[i];
3754 goal_alternative[i] = this_alternative[i];
3755 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3756 goal_alternative_matches[i] = this_alternative_matches[i];
3757 goal_alternative_earlyclobber[i]
3758 = this_alternative_earlyclobber[i];
3759 }
3760 goal_alternative_number = this_alternative_number;
3761 goal_alternative_swapped = swapped;
3762 goal_earlyclobber = this_earlyclobber;
3763 goto finish;
3764 }
3765
3766 /* REJECT, set by the ! and ? constraint characters and when a register
3767 would be reloaded into a non-preferred class, discourages the use of
3768 this alternative for a reload goal. REJECT is incremented by six
3769 for each ? and two for each non-preferred class. */
3770 losers = losers * 6 + reject;
3771
3772 /* If this alternative can be made to work by reloading,
3773 and it needs less reloading than the others checked so far,
3774 record it as the chosen goal for reloading. */
3775 if (! bad)
3776 {
3777 if (best > losers)
3778 {
3779 for (i = 0; i < noperands; i++)
3780 {
3781 goal_alternative[i] = this_alternative[i];
3782 goal_alternative_win[i] = this_alternative_win[i];
3783 goal_alternative_match_win[i]
3784 = this_alternative_match_win[i];
3785 goal_alternative_offmemok[i]
3786 = this_alternative_offmemok[i];
3787 goal_alternative_matches[i] = this_alternative_matches[i];
3788 goal_alternative_earlyclobber[i]
3789 = this_alternative_earlyclobber[i];
3790 }
3791 goal_alternative_swapped = swapped;
3792 best = losers;
3793 goal_alternative_number = this_alternative_number;
3794 goal_earlyclobber = this_earlyclobber;
3795 }
3796 }
3797
3798 if (swapped)
3799 {
3800 enum reg_class tclass;
3801 int t;
3802
3803 /* If the commutative operands have been swapped, swap
3804 them back in order to check the next alternative. */
3805 recog_data.operand[commutative] = substed_operand[commutative];
3806 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3807 /* Unswap the duplicates too. */
3808 for (i = 0; i < recog_data.n_dups; i++)
3809 if (recog_data.dup_num[i] == commutative
3810 || recog_data.dup_num[i] == commutative + 1)
3811 *recog_data.dup_loc[i]
3812 = recog_data.operand[(int) recog_data.dup_num[i]];
3813
3814 /* Unswap the operand related information as well. */
3815 tclass = preferred_class[commutative];
3816 preferred_class[commutative] = preferred_class[commutative + 1];
3817 preferred_class[commutative + 1] = tclass;
3818
3819 t = pref_or_nothing[commutative];
3820 pref_or_nothing[commutative] = pref_or_nothing[commutative + 1];
3821 pref_or_nothing[commutative + 1] = t;
3822
3823 t = address_reloaded[commutative];
3824 address_reloaded[commutative] = address_reloaded[commutative + 1];
3825 address_reloaded[commutative + 1] = t;
3826 }
3827 }
3828 }
3829
3830 /* The operands don't meet the constraints.
3831 goal_alternative describes the alternative
3832 that we could reach by reloading the fewest operands.
3833 Reload so as to fit it. */
3834
3835 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3836 {
3837 /* No alternative works with reloads?? */
3838 if (insn_code_number >= 0)
3839 fatal_insn ("unable to generate reloads for:", insn);
3840 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3841 /* Avoid further trouble with this insn. */
3842 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3843 n_reloads = 0;
3844 return 0;
3845 }
3846
3847 /* Jump to `finish' from above if all operands are valid already.
3848 In that case, goal_alternative_win is all 1. */
3849 finish:
3850
3851 /* Right now, for any pair of operands I and J that are required to match,
3852 with I < J,
3853 goal_alternative_matches[J] is I.
3854 Set up goal_alternative_matched as the inverse function:
3855 goal_alternative_matched[I] = J. */
3856
3857 for (i = 0; i < noperands; i++)
3858 goal_alternative_matched[i] = -1;
3859
3860 for (i = 0; i < noperands; i++)
3861 if (! goal_alternative_win[i]
3862 && goal_alternative_matches[i] >= 0)
3863 goal_alternative_matched[goal_alternative_matches[i]] = i;
3864
3865 for (i = 0; i < noperands; i++)
3866 goal_alternative_win[i] |= goal_alternative_match_win[i];
3867
3868 /* If the best alternative is with operands 1 and 2 swapped,
3869 consider them swapped before reporting the reloads. Update the
3870 operand numbers of any reloads already pushed. */
3871
3872 if (goal_alternative_swapped)
3873 {
3874 rtx tem;
3875
3876 tem = substed_operand[commutative];
3877 substed_operand[commutative] = substed_operand[commutative + 1];
3878 substed_operand[commutative + 1] = tem;
3879 tem = recog_data.operand[commutative];
3880 recog_data.operand[commutative] = recog_data.operand[commutative + 1];
3881 recog_data.operand[commutative + 1] = tem;
3882 tem = *recog_data.operand_loc[commutative];
3883 *recog_data.operand_loc[commutative]
3884 = *recog_data.operand_loc[commutative + 1];
3885 *recog_data.operand_loc[commutative + 1] = tem;
3886
3887 for (i = 0; i < n_reloads; i++)
3888 {
3889 if (rld[i].opnum == commutative)
3890 rld[i].opnum = commutative + 1;
3891 else if (rld[i].opnum == commutative + 1)
3892 rld[i].opnum = commutative;
3893 }
3894 }
3895
3896 for (i = 0; i < noperands; i++)
3897 {
3898 operand_reloadnum[i] = -1;
3899
3900 /* If this is an earlyclobber operand, we need to widen the scope.
3901 The reload must remain valid from the start of the insn being
3902 reloaded until after the operand is stored into its destination.
3903 We approximate this with RELOAD_OTHER even though we know that we
3904 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3905
3906 One special case that is worth checking is when we have an
3907 output that is earlyclobber but isn't used past the insn (typically
3908 a SCRATCH). In this case, we only need have the reload live
3909 through the insn itself, but not for any of our input or output
3910 reloads.
3911 But we must not accidentally narrow the scope of an existing
3912 RELOAD_OTHER reload - leave these alone.
3913
3914 In any case, anything needed to address this operand can remain
3915 however they were previously categorized. */
3916
3917 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3918 operand_type[i]
3919 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3920 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3921 }
3922
3923 /* Any constants that aren't allowed and can't be reloaded
3924 into registers are here changed into memory references. */
3925 for (i = 0; i < noperands; i++)
3926 if (! goal_alternative_win[i])
3927 {
3928 rtx op = recog_data.operand[i];
3929 rtx subreg = NULL_RTX;
3930 rtx plus = NULL_RTX;
3931 enum machine_mode mode = operand_mode[i];
3932
3933 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3934 push_reload so we have to let them pass here. */
3935 if (GET_CODE (op) == SUBREG)
3936 {
3937 subreg = op;
3938 op = SUBREG_REG (op);
3939 mode = GET_MODE (op);
3940 }
3941
3942 if (GET_CODE (op) == PLUS)
3943 {
3944 plus = op;
3945 op = XEXP (op, 1);
3946 }
3947
3948 if (CONST_POOL_OK_P (mode, op)
3949 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3950 == NO_REGS)
3951 || no_input_reloads))
3952 {
3953 int this_address_reloaded;
3954 rtx tem = force_const_mem (mode, op);
3955
3956 /* If we stripped a SUBREG or a PLUS above add it back. */
3957 if (plus != NULL_RTX)
3958 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3959
3960 if (subreg != NULL_RTX)
3961 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3962
3963 this_address_reloaded = 0;
3964 substed_operand[i] = recog_data.operand[i]
3965 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3966 0, insn, &this_address_reloaded);
3967
3968 /* If the alternative accepts constant pool refs directly
3969 there will be no reload needed at all. */
3970 if (plus == NULL_RTX
3971 && subreg == NULL_RTX
3972 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3973 ? substed_operand[i]
3974 : NULL,
3975 recog_data.constraints[i],
3976 goal_alternative_number))
3977 goal_alternative_win[i] = 1;
3978 }
3979 }
3980
3981 /* Record the values of the earlyclobber operands for the caller. */
3982 if (goal_earlyclobber)
3983 for (i = 0; i < noperands; i++)
3984 if (goal_alternative_earlyclobber[i])
3985 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3986
3987 /* Now record reloads for all the operands that need them. */
3988 for (i = 0; i < noperands; i++)
3989 if (! goal_alternative_win[i])
3990 {
3991 /* Operands that match previous ones have already been handled. */
3992 if (goal_alternative_matches[i] >= 0)
3993 ;
3994 /* Handle an operand with a nonoffsettable address
3995 appearing where an offsettable address will do
3996 by reloading the address into a base register.
3997
3998 ??? We can also do this when the operand is a register and
3999 reg_equiv_mem is not offsettable, but this is a bit tricky,
4000 so we don't bother with it. It may not be worth doing. */
4001 else if (goal_alternative_matched[i] == -1
4002 && goal_alternative_offmemok[i]
4003 && MEM_P (recog_data.operand[i]))
4004 {
4005 /* If the address to be reloaded is a VOIDmode constant,
4006 use the default address mode as mode of the reload register,
4007 as would have been done by find_reloads_address. */
4008 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4009 enum machine_mode address_mode;
4010
4011 address_mode = get_address_mode (recog_data.operand[i]);
4012 operand_reloadnum[i]
4013 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4014 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4015 base_reg_class (VOIDmode, as, MEM, SCRATCH),
4016 address_mode,
4017 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4018 rld[operand_reloadnum[i]].inc
4019 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4020
4021 /* If this operand is an output, we will have made any
4022 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4023 now we are treating part of the operand as an input, so
4024 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4025
4026 if (modified[i] == RELOAD_WRITE)
4027 {
4028 for (j = 0; j < n_reloads; j++)
4029 {
4030 if (rld[j].opnum == i)
4031 {
4032 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4033 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4034 else if (rld[j].when_needed
4035 == RELOAD_FOR_OUTADDR_ADDRESS)
4036 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4037 }
4038 }
4039 }
4040 }
4041 else if (goal_alternative_matched[i] == -1)
4042 {
4043 operand_reloadnum[i]
4044 = push_reload ((modified[i] != RELOAD_WRITE
4045 ? recog_data.operand[i] : 0),
4046 (modified[i] != RELOAD_READ
4047 ? recog_data.operand[i] : 0),
4048 (modified[i] != RELOAD_WRITE
4049 ? recog_data.operand_loc[i] : 0),
4050 (modified[i] != RELOAD_READ
4051 ? recog_data.operand_loc[i] : 0),
4052 (enum reg_class) goal_alternative[i],
4053 (modified[i] == RELOAD_WRITE
4054 ? VOIDmode : operand_mode[i]),
4055 (modified[i] == RELOAD_READ
4056 ? VOIDmode : operand_mode[i]),
4057 (insn_code_number < 0 ? 0
4058 : insn_data[insn_code_number].operand[i].strict_low),
4059 0, i, operand_type[i]);
4060 }
4061 /* In a matching pair of operands, one must be input only
4062 and the other must be output only.
4063 Pass the input operand as IN and the other as OUT. */
4064 else if (modified[i] == RELOAD_READ
4065 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4066 {
4067 operand_reloadnum[i]
4068 = push_reload (recog_data.operand[i],
4069 recog_data.operand[goal_alternative_matched[i]],
4070 recog_data.operand_loc[i],
4071 recog_data.operand_loc[goal_alternative_matched[i]],
4072 (enum reg_class) goal_alternative[i],
4073 operand_mode[i],
4074 operand_mode[goal_alternative_matched[i]],
4075 0, 0, i, RELOAD_OTHER);
4076 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4077 }
4078 else if (modified[i] == RELOAD_WRITE
4079 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4080 {
4081 operand_reloadnum[goal_alternative_matched[i]]
4082 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4083 recog_data.operand[i],
4084 recog_data.operand_loc[goal_alternative_matched[i]],
4085 recog_data.operand_loc[i],
4086 (enum reg_class) goal_alternative[i],
4087 operand_mode[goal_alternative_matched[i]],
4088 operand_mode[i],
4089 0, 0, i, RELOAD_OTHER);
4090 operand_reloadnum[i] = output_reloadnum;
4091 }
4092 else
4093 {
4094 gcc_assert (insn_code_number < 0);
4095 error_for_asm (insn, "inconsistent operand constraints "
4096 "in an %<asm%>");
4097 /* Avoid further trouble with this insn. */
4098 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4099 n_reloads = 0;
4100 return 0;
4101 }
4102 }
4103 else if (goal_alternative_matched[i] < 0
4104 && goal_alternative_matches[i] < 0
4105 && address_operand_reloaded[i] != 1
4106 && optimize)
4107 {
4108 /* For each non-matching operand that's a MEM or a pseudo-register
4109 that didn't get a hard register, make an optional reload.
4110 This may get done even if the insn needs no reloads otherwise. */
4111
4112 rtx operand = recog_data.operand[i];
4113
4114 while (GET_CODE (operand) == SUBREG)
4115 operand = SUBREG_REG (operand);
4116 if ((MEM_P (operand)
4117 || (REG_P (operand)
4118 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4119 /* If this is only for an output, the optional reload would not
4120 actually cause us to use a register now, just note that
4121 something is stored here. */
4122 && (goal_alternative[i] != NO_REGS
4123 || modified[i] == RELOAD_WRITE)
4124 && ! no_input_reloads
4125 /* An optional output reload might allow to delete INSN later.
4126 We mustn't make in-out reloads on insns that are not permitted
4127 output reloads.
4128 If this is an asm, we can't delete it; we must not even call
4129 push_reload for an optional output reload in this case,
4130 because we can't be sure that the constraint allows a register,
4131 and push_reload verifies the constraints for asms. */
4132 && (modified[i] == RELOAD_READ
4133 || (! no_output_reloads && ! this_insn_is_asm)))
4134 operand_reloadnum[i]
4135 = push_reload ((modified[i] != RELOAD_WRITE
4136 ? recog_data.operand[i] : 0),
4137 (modified[i] != RELOAD_READ
4138 ? recog_data.operand[i] : 0),
4139 (modified[i] != RELOAD_WRITE
4140 ? recog_data.operand_loc[i] : 0),
4141 (modified[i] != RELOAD_READ
4142 ? recog_data.operand_loc[i] : 0),
4143 (enum reg_class) goal_alternative[i],
4144 (modified[i] == RELOAD_WRITE
4145 ? VOIDmode : operand_mode[i]),
4146 (modified[i] == RELOAD_READ
4147 ? VOIDmode : operand_mode[i]),
4148 (insn_code_number < 0 ? 0
4149 : insn_data[insn_code_number].operand[i].strict_low),
4150 1, i, operand_type[i]);
4151 /* If a memory reference remains (either as a MEM or a pseudo that
4152 did not get a hard register), yet we can't make an optional
4153 reload, check if this is actually a pseudo register reference;
4154 we then need to emit a USE and/or a CLOBBER so that reload
4155 inheritance will do the right thing. */
4156 else if (replace
4157 && (MEM_P (operand)
4158 || (REG_P (operand)
4159 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4160 && reg_renumber [REGNO (operand)] < 0)))
4161 {
4162 operand = *recog_data.operand_loc[i];
4163
4164 while (GET_CODE (operand) == SUBREG)
4165 operand = SUBREG_REG (operand);
4166 if (REG_P (operand))
4167 {
4168 if (modified[i] != RELOAD_WRITE)
4169 /* We mark the USE with QImode so that we recognize
4170 it as one that can be safely deleted at the end
4171 of reload. */
4172 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4173 insn), QImode);
4174 if (modified[i] != RELOAD_READ)
4175 emit_insn_after (gen_clobber (operand), insn);
4176 }
4177 }
4178 }
4179 else if (goal_alternative_matches[i] >= 0
4180 && goal_alternative_win[goal_alternative_matches[i]]
4181 && modified[i] == RELOAD_READ
4182 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4183 && ! no_input_reloads && ! no_output_reloads
4184 && optimize)
4185 {
4186 /* Similarly, make an optional reload for a pair of matching
4187 objects that are in MEM or a pseudo that didn't get a hard reg. */
4188
4189 rtx operand = recog_data.operand[i];
4190
4191 while (GET_CODE (operand) == SUBREG)
4192 operand = SUBREG_REG (operand);
4193 if ((MEM_P (operand)
4194 || (REG_P (operand)
4195 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4196 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4197 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4198 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4199 recog_data.operand[i],
4200 recog_data.operand_loc[goal_alternative_matches[i]],
4201 recog_data.operand_loc[i],
4202 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4203 operand_mode[goal_alternative_matches[i]],
4204 operand_mode[i],
4205 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4206 }
4207
4208 /* Perform whatever substitutions on the operands we are supposed
4209 to make due to commutativity or replacement of registers
4210 with equivalent constants or memory slots. */
4211
4212 for (i = 0; i < noperands; i++)
4213 {
4214 /* We only do this on the last pass through reload, because it is
4215 possible for some data (like reg_equiv_address) to be changed during
4216 later passes. Moreover, we lose the opportunity to get a useful
4217 reload_{in,out}_reg when we do these replacements. */
4218
4219 if (replace)
4220 {
4221 rtx substitution = substed_operand[i];
4222
4223 *recog_data.operand_loc[i] = substitution;
4224
4225 /* If we're replacing an operand with a LABEL_REF, we need to
4226 make sure that there's a REG_LABEL_OPERAND note attached to
4227 this instruction. */
4228 if (GET_CODE (substitution) == LABEL_REF
4229 && !find_reg_note (insn, REG_LABEL_OPERAND,
4230 LABEL_REF_LABEL (substitution))
4231 /* For a JUMP_P, if it was a branch target it must have
4232 already been recorded as such. */
4233 && (!JUMP_P (insn)
4234 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4235 insn)))
4236 {
4237 add_reg_note (insn, REG_LABEL_OPERAND,
4238 LABEL_REF_LABEL (substitution));
4239 if (LABEL_P (LABEL_REF_LABEL (substitution)))
4240 ++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4241 }
4242
4243 }
4244 else
4245 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4246 }
4247
4248 /* If this insn pattern contains any MATCH_DUP's, make sure that
4249 they will be substituted if the operands they match are substituted.
4250 Also do now any substitutions we already did on the operands.
4251
4252 Don't do this if we aren't making replacements because we might be
4253 propagating things allocated by frame pointer elimination into places
4254 it doesn't expect. */
4255
4256 if (insn_code_number >= 0 && replace)
4257 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4258 {
4259 int opno = recog_data.dup_num[i];
4260 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4261 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4262 }
4263
4264 #if 0
4265 /* This loses because reloading of prior insns can invalidate the equivalence
4266 (or at least find_equiv_reg isn't smart enough to find it any more),
4267 causing this insn to need more reload regs than it needed before.
4268 It may be too late to make the reload regs available.
4269 Now this optimization is done safely in choose_reload_regs. */
4270
4271 /* For each reload of a reg into some other class of reg,
4272 search for an existing equivalent reg (same value now) in the right class.
4273 We can use it as long as we don't need to change its contents. */
4274 for (i = 0; i < n_reloads; i++)
4275 if (rld[i].reg_rtx == 0
4276 && rld[i].in != 0
4277 && REG_P (rld[i].in)
4278 && rld[i].out == 0)
4279 {
4280 rld[i].reg_rtx
4281 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4282 static_reload_reg_p, 0, rld[i].inmode);
4283 /* Prevent generation of insn to load the value
4284 because the one we found already has the value. */
4285 if (rld[i].reg_rtx)
4286 rld[i].in = rld[i].reg_rtx;
4287 }
4288 #endif
4289
4290 /* If we detected error and replaced asm instruction by USE, forget about the
4291 reloads. */
4292 if (GET_CODE (PATTERN (insn)) == USE
4293 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4294 n_reloads = 0;
4295
4296 /* Perhaps an output reload can be combined with another
4297 to reduce needs by one. */
4298 if (!goal_earlyclobber)
4299 combine_reloads ();
4300
4301 /* If we have a pair of reloads for parts of an address, they are reloading
4302 the same object, the operands themselves were not reloaded, and they
4303 are for two operands that are supposed to match, merge the reloads and
4304 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4305
4306 for (i = 0; i < n_reloads; i++)
4307 {
4308 int k;
4309
4310 for (j = i + 1; j < n_reloads; j++)
4311 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4312 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4313 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4314 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4315 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4316 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4317 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4318 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4319 && rtx_equal_p (rld[i].in, rld[j].in)
4320 && (operand_reloadnum[rld[i].opnum] < 0
4321 || rld[operand_reloadnum[rld[i].opnum]].optional)
4322 && (operand_reloadnum[rld[j].opnum] < 0
4323 || rld[operand_reloadnum[rld[j].opnum]].optional)
4324 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4325 || (goal_alternative_matches[rld[j].opnum]
4326 == rld[i].opnum)))
4327 {
4328 for (k = 0; k < n_replacements; k++)
4329 if (replacements[k].what == j)
4330 replacements[k].what = i;
4331
4332 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4333 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4334 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4335 else
4336 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4337 rld[j].in = 0;
4338 }
4339 }
4340
4341 /* Scan all the reloads and update their type.
4342 If a reload is for the address of an operand and we didn't reload
4343 that operand, change the type. Similarly, change the operand number
4344 of a reload when two operands match. If a reload is optional, treat it
4345 as though the operand isn't reloaded.
4346
4347 ??? This latter case is somewhat odd because if we do the optional
4348 reload, it means the object is hanging around. Thus we need only
4349 do the address reload if the optional reload was NOT done.
4350
4351 Change secondary reloads to be the address type of their operand, not
4352 the normal type.
4353
4354 If an operand's reload is now RELOAD_OTHER, change any
4355 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4356 RELOAD_FOR_OTHER_ADDRESS. */
4357
4358 for (i = 0; i < n_reloads; i++)
4359 {
4360 if (rld[i].secondary_p
4361 && rld[i].when_needed == operand_type[rld[i].opnum])
4362 rld[i].when_needed = address_type[rld[i].opnum];
4363
4364 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4365 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4366 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4367 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4368 && (operand_reloadnum[rld[i].opnum] < 0
4369 || rld[operand_reloadnum[rld[i].opnum]].optional))
4370 {
4371 /* If we have a secondary reload to go along with this reload,
4372 change its type to RELOAD_FOR_OPADDR_ADDR. */
4373
4374 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4375 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4376 && rld[i].secondary_in_reload != -1)
4377 {
4378 int secondary_in_reload = rld[i].secondary_in_reload;
4379
4380 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4381
4382 /* If there's a tertiary reload we have to change it also. */
4383 if (secondary_in_reload > 0
4384 && rld[secondary_in_reload].secondary_in_reload != -1)
4385 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4386 = RELOAD_FOR_OPADDR_ADDR;
4387 }
4388
4389 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4390 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4391 && rld[i].secondary_out_reload != -1)
4392 {
4393 int secondary_out_reload = rld[i].secondary_out_reload;
4394
4395 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4396
4397 /* If there's a tertiary reload we have to change it also. */
4398 if (secondary_out_reload
4399 && rld[secondary_out_reload].secondary_out_reload != -1)
4400 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4401 = RELOAD_FOR_OPADDR_ADDR;
4402 }
4403
4404 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4405 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4406 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4407 else
4408 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4409 }
4410
4411 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4412 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4413 && operand_reloadnum[rld[i].opnum] >= 0
4414 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4415 == RELOAD_OTHER))
4416 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4417
4418 if (goal_alternative_matches[rld[i].opnum] >= 0)
4419 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4420 }
4421
4422 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4423 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4424 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4425
4426 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4427 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4428 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4429 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4430 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4431 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4432 This is complicated by the fact that a single operand can have more
4433 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4434 choose_reload_regs without affecting code quality, and cases that
4435 actually fail are extremely rare, so it turns out to be better to fix
4436 the problem here by not generating cases that choose_reload_regs will
4437 fail for. */
4438 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4439 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4440 a single operand.
4441 We can reduce the register pressure by exploiting that a
4442 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4443 does not conflict with any of them, if it is only used for the first of
4444 the RELOAD_FOR_X_ADDRESS reloads. */
4445 {
4446 int first_op_addr_num = -2;
4447 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4448 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4449 int need_change = 0;
4450 /* We use last_op_addr_reload and the contents of the above arrays
4451 first as flags - -2 means no instance encountered, -1 means exactly
4452 one instance encountered.
4453 If more than one instance has been encountered, we store the reload
4454 number of the first reload of the kind in question; reload numbers
4455 are known to be non-negative. */
4456 for (i = 0; i < noperands; i++)
4457 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4458 for (i = n_reloads - 1; i >= 0; i--)
4459 {
4460 switch (rld[i].when_needed)
4461 {
4462 case RELOAD_FOR_OPERAND_ADDRESS:
4463 if (++first_op_addr_num >= 0)
4464 {
4465 first_op_addr_num = i;
4466 need_change = 1;
4467 }
4468 break;
4469 case RELOAD_FOR_INPUT_ADDRESS:
4470 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4471 {
4472 first_inpaddr_num[rld[i].opnum] = i;
4473 need_change = 1;
4474 }
4475 break;
4476 case RELOAD_FOR_OUTPUT_ADDRESS:
4477 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4478 {
4479 first_outpaddr_num[rld[i].opnum] = i;
4480 need_change = 1;
4481 }
4482 break;
4483 default:
4484 break;
4485 }
4486 }
4487
4488 if (need_change)
4489 {
4490 for (i = 0; i < n_reloads; i++)
4491 {
4492 int first_num;
4493 enum reload_type type;
4494
4495 switch (rld[i].when_needed)
4496 {
4497 case RELOAD_FOR_OPADDR_ADDR:
4498 first_num = first_op_addr_num;
4499 type = RELOAD_FOR_OPERAND_ADDRESS;
4500 break;
4501 case RELOAD_FOR_INPADDR_ADDRESS:
4502 first_num = first_inpaddr_num[rld[i].opnum];
4503 type = RELOAD_FOR_INPUT_ADDRESS;
4504 break;
4505 case RELOAD_FOR_OUTADDR_ADDRESS:
4506 first_num = first_outpaddr_num[rld[i].opnum];
4507 type = RELOAD_FOR_OUTPUT_ADDRESS;
4508 break;
4509 default:
4510 continue;
4511 }
4512 if (first_num < 0)
4513 continue;
4514 else if (i > first_num)
4515 rld[i].when_needed = type;
4516 else
4517 {
4518 /* Check if the only TYPE reload that uses reload I is
4519 reload FIRST_NUM. */
4520 for (j = n_reloads - 1; j > first_num; j--)
4521 {
4522 if (rld[j].when_needed == type
4523 && (rld[i].secondary_p
4524 ? rld[j].secondary_in_reload == i
4525 : reg_mentioned_p (rld[i].in, rld[j].in)))
4526 {
4527 rld[i].when_needed = type;
4528 break;
4529 }
4530 }
4531 }
4532 }
4533 }
4534 }
4535
4536 /* See if we have any reloads that are now allowed to be merged
4537 because we've changed when the reload is needed to
4538 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4539 check for the most common cases. */
4540
4541 for (i = 0; i < n_reloads; i++)
4542 if (rld[i].in != 0 && rld[i].out == 0
4543 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4544 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4545 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4546 for (j = 0; j < n_reloads; j++)
4547 if (i != j && rld[j].in != 0 && rld[j].out == 0
4548 && rld[j].when_needed == rld[i].when_needed
4549 && MATCHES (rld[i].in, rld[j].in)
4550 && rld[i].rclass == rld[j].rclass
4551 && !rld[i].nocombine && !rld[j].nocombine
4552 && rld[i].reg_rtx == rld[j].reg_rtx)
4553 {
4554 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4555 transfer_replacements (i, j);
4556 rld[j].in = 0;
4557 }
4558
4559 #ifdef HAVE_cc0
4560 /* If we made any reloads for addresses, see if they violate a
4561 "no input reloads" requirement for this insn. But loads that we
4562 do after the insn (such as for output addresses) are fine. */
4563 if (no_input_reloads)
4564 for (i = 0; i < n_reloads; i++)
4565 gcc_assert (rld[i].in == 0
4566 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4567 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4568 #endif
4569
4570 /* Compute reload_mode and reload_nregs. */
4571 for (i = 0; i < n_reloads; i++)
4572 {
4573 rld[i].mode
4574 = (rld[i].inmode == VOIDmode
4575 || (GET_MODE_SIZE (rld[i].outmode)
4576 > GET_MODE_SIZE (rld[i].inmode)))
4577 ? rld[i].outmode : rld[i].inmode;
4578
4579 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4580 }
4581
4582 /* Special case a simple move with an input reload and a
4583 destination of a hard reg, if the hard reg is ok, use it. */
4584 for (i = 0; i < n_reloads; i++)
4585 if (rld[i].when_needed == RELOAD_FOR_INPUT
4586 && GET_CODE (PATTERN (insn)) == SET
4587 && REG_P (SET_DEST (PATTERN (insn)))
4588 && (SET_SRC (PATTERN (insn)) == rld[i].in
4589 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4590 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4591 {
4592 rtx dest = SET_DEST (PATTERN (insn));
4593 unsigned int regno = REGNO (dest);
4594
4595 if (regno < FIRST_PSEUDO_REGISTER
4596 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4597 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4598 {
4599 int nr = hard_regno_nregs[regno][rld[i].mode];
4600 int ok = 1, nri;
4601
4602 for (nri = 1; nri < nr; nri ++)
4603 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4604 {
4605 ok = 0;
4606 break;
4607 }
4608
4609 if (ok)
4610 rld[i].reg_rtx = dest;
4611 }
4612 }
4613
4614 return retval;
4615 }
4616
4617 /* Return true if alternative number ALTNUM in constraint-string
4618 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4619 MEM gives the reference if it didn't need any reloads, otherwise it
4620 is null. */
4621
4622 static bool
4623 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4624 const char *constraint, int altnum)
4625 {
4626 int c;
4627
4628 /* Skip alternatives before the one requested. */
4629 while (altnum > 0)
4630 {
4631 while (*constraint++ != ',')
4632 ;
4633 altnum--;
4634 }
4635 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4636 If one of them is present, this alternative accepts the result of
4637 passing a constant-pool reference through find_reloads_toplev.
4638
4639 The same is true of extra memory constraints if the address
4640 was reloaded into a register. However, the target may elect
4641 to disallow the original constant address, forcing it to be
4642 reloaded into a register instead. */
4643 for (; (c = *constraint) && c != ',' && c != '#';
4644 constraint += CONSTRAINT_LEN (c, constraint))
4645 {
4646 enum constraint_num cn = lookup_constraint (constraint);
4647 if (insn_extra_memory_constraint (cn)
4648 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4649 return true;
4650 }
4651 return false;
4652 }
4653 \f
4654 /* Scan X for memory references and scan the addresses for reloading.
4655 Also checks for references to "constant" regs that we want to eliminate
4656 and replaces them with the values they stand for.
4657 We may alter X destructively if it contains a reference to such.
4658 If X is just a constant reg, we return the equivalent value
4659 instead of X.
4660
4661 IND_LEVELS says how many levels of indirect addressing this machine
4662 supports.
4663
4664 OPNUM and TYPE identify the purpose of the reload.
4665
4666 IS_SET_DEST is true if X is the destination of a SET, which is not
4667 appropriate to be replaced by a constant.
4668
4669 INSN, if nonzero, is the insn in which we do the reload. It is used
4670 to determine if we may generate output reloads, and where to put USEs
4671 for pseudos that we have to replace with stack slots.
4672
4673 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4674 result of find_reloads_address. */
4675
4676 static rtx
4677 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4678 int ind_levels, int is_set_dest, rtx_insn *insn,
4679 int *address_reloaded)
4680 {
4681 RTX_CODE code = GET_CODE (x);
4682
4683 const char *fmt = GET_RTX_FORMAT (code);
4684 int i;
4685 int copied;
4686
4687 if (code == REG)
4688 {
4689 /* This code is duplicated for speed in find_reloads. */
4690 int regno = REGNO (x);
4691 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4692 x = reg_equiv_constant (regno);
4693 #if 0
4694 /* This creates (subreg (mem...)) which would cause an unnecessary
4695 reload of the mem. */
4696 else if (reg_equiv_mem (regno) != 0)
4697 x = reg_equiv_mem (regno);
4698 #endif
4699 else if (reg_equiv_memory_loc (regno)
4700 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4701 {
4702 rtx mem = make_memloc (x, regno);
4703 if (reg_equiv_address (regno)
4704 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4705 {
4706 /* If this is not a toplevel operand, find_reloads doesn't see
4707 this substitution. We have to emit a USE of the pseudo so
4708 that delete_output_reload can see it. */
4709 if (replace_reloads && recog_data.operand[opnum] != x)
4710 /* We mark the USE with QImode so that we recognize it
4711 as one that can be safely deleted at the end of
4712 reload. */
4713 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4714 QImode);
4715 x = mem;
4716 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4717 opnum, type, ind_levels, insn);
4718 if (!rtx_equal_p (x, mem))
4719 push_reg_equiv_alt_mem (regno, x);
4720 if (address_reloaded)
4721 *address_reloaded = i;
4722 }
4723 }
4724 return x;
4725 }
4726 if (code == MEM)
4727 {
4728 rtx tem = x;
4729
4730 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4731 opnum, type, ind_levels, insn);
4732 if (address_reloaded)
4733 *address_reloaded = i;
4734
4735 return tem;
4736 }
4737
4738 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4739 {
4740 /* Check for SUBREG containing a REG that's equivalent to a
4741 constant. If the constant has a known value, truncate it
4742 right now. Similarly if we are extracting a single-word of a
4743 multi-word constant. If the constant is symbolic, allow it
4744 to be substituted normally. push_reload will strip the
4745 subreg later. The constant must not be VOIDmode, because we
4746 will lose the mode of the register (this should never happen
4747 because one of the cases above should handle it). */
4748
4749 int regno = REGNO (SUBREG_REG (x));
4750 rtx tem;
4751
4752 if (regno >= FIRST_PSEUDO_REGISTER
4753 && reg_renumber[regno] < 0
4754 && reg_equiv_constant (regno) != 0)
4755 {
4756 tem =
4757 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4758 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4759 gcc_assert (tem);
4760 if (CONSTANT_P (tem)
4761 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4762 {
4763 tem = force_const_mem (GET_MODE (x), tem);
4764 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4765 &XEXP (tem, 0), opnum, type,
4766 ind_levels, insn);
4767 if (address_reloaded)
4768 *address_reloaded = i;
4769 }
4770 return tem;
4771 }
4772
4773 /* If the subreg contains a reg that will be converted to a mem,
4774 attempt to convert the whole subreg to a (narrower or wider)
4775 memory reference instead. If this succeeds, we're done --
4776 otherwise fall through to check whether the inner reg still
4777 needs address reloads anyway. */
4778
4779 if (regno >= FIRST_PSEUDO_REGISTER
4780 && reg_equiv_memory_loc (regno) != 0)
4781 {
4782 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4783 insn, address_reloaded);
4784 if (tem)
4785 return tem;
4786 }
4787 }
4788
4789 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4790 {
4791 if (fmt[i] == 'e')
4792 {
4793 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4794 ind_levels, is_set_dest, insn,
4795 address_reloaded);
4796 /* If we have replaced a reg with it's equivalent memory loc -
4797 that can still be handled here e.g. if it's in a paradoxical
4798 subreg - we must make the change in a copy, rather than using
4799 a destructive change. This way, find_reloads can still elect
4800 not to do the change. */
4801 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4802 {
4803 x = shallow_copy_rtx (x);
4804 copied = 1;
4805 }
4806 XEXP (x, i) = new_part;
4807 }
4808 }
4809 return x;
4810 }
4811
4812 /* Return a mem ref for the memory equivalent of reg REGNO.
4813 This mem ref is not shared with anything. */
4814
4815 static rtx
4816 make_memloc (rtx ad, int regno)
4817 {
4818 /* We must rerun eliminate_regs, in case the elimination
4819 offsets have changed. */
4820 rtx tem
4821 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4822 0);
4823
4824 /* If TEM might contain a pseudo, we must copy it to avoid
4825 modifying it when we do the substitution for the reload. */
4826 if (rtx_varies_p (tem, 0))
4827 tem = copy_rtx (tem);
4828
4829 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4830 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4831
4832 /* Copy the result if it's still the same as the equivalence, to avoid
4833 modifying it when we do the substitution for the reload. */
4834 if (tem == reg_equiv_memory_loc (regno))
4835 tem = copy_rtx (tem);
4836 return tem;
4837 }
4838
4839 /* Returns true if AD could be turned into a valid memory reference
4840 to mode MODE in address space AS by reloading the part pointed to
4841 by PART into a register. */
4842
4843 static int
4844 maybe_memory_address_addr_space_p (enum machine_mode mode, rtx ad,
4845 addr_space_t as, rtx *part)
4846 {
4847 int retv;
4848 rtx tem = *part;
4849 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4850
4851 *part = reg;
4852 retv = memory_address_addr_space_p (mode, ad, as);
4853 *part = tem;
4854
4855 return retv;
4856 }
4857
4858 /* Record all reloads needed for handling memory address AD
4859 which appears in *LOC in a memory reference to mode MODE
4860 which itself is found in location *MEMREFLOC.
4861 Note that we take shortcuts assuming that no multi-reg machine mode
4862 occurs as part of an address.
4863
4864 OPNUM and TYPE specify the purpose of this reload.
4865
4866 IND_LEVELS says how many levels of indirect addressing this machine
4867 supports.
4868
4869 INSN, if nonzero, is the insn in which we do the reload. It is used
4870 to determine if we may generate output reloads, and where to put USEs
4871 for pseudos that we have to replace with stack slots.
4872
4873 Value is one if this address is reloaded or replaced as a whole; it is
4874 zero if the top level of this address was not reloaded or replaced, and
4875 it is -1 if it may or may not have been reloaded or replaced.
4876
4877 Note that there is no verification that the address will be valid after
4878 this routine does its work. Instead, we rely on the fact that the address
4879 was valid when reload started. So we need only undo things that reload
4880 could have broken. These are wrong register types, pseudos not allocated
4881 to a hard register, and frame pointer elimination. */
4882
4883 static int
4884 find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
4885 rtx *loc, int opnum, enum reload_type type,
4886 int ind_levels, rtx_insn *insn)
4887 {
4888 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4889 : ADDR_SPACE_GENERIC;
4890 int regno;
4891 int removed_and = 0;
4892 int op_index;
4893 rtx tem;
4894
4895 /* If the address is a register, see if it is a legitimate address and
4896 reload if not. We first handle the cases where we need not reload
4897 or where we must reload in a non-standard way. */
4898
4899 if (REG_P (ad))
4900 {
4901 regno = REGNO (ad);
4902
4903 if (reg_equiv_constant (regno) != 0)
4904 {
4905 find_reloads_address_part (reg_equiv_constant (regno), loc,
4906 base_reg_class (mode, as, MEM, SCRATCH),
4907 GET_MODE (ad), opnum, type, ind_levels);
4908 return 1;
4909 }
4910
4911 tem = reg_equiv_memory_loc (regno);
4912 if (tem != 0)
4913 {
4914 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4915 {
4916 tem = make_memloc (ad, regno);
4917 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4918 XEXP (tem, 0),
4919 MEM_ADDR_SPACE (tem)))
4920 {
4921 rtx orig = tem;
4922
4923 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4924 &XEXP (tem, 0), opnum,
4925 ADDR_TYPE (type), ind_levels, insn);
4926 if (!rtx_equal_p (tem, orig))
4927 push_reg_equiv_alt_mem (regno, tem);
4928 }
4929 /* We can avoid a reload if the register's equivalent memory
4930 expression is valid as an indirect memory address.
4931 But not all addresses are valid in a mem used as an indirect
4932 address: only reg or reg+constant. */
4933
4934 if (ind_levels > 0
4935 && strict_memory_address_addr_space_p (mode, tem, as)
4936 && (REG_P (XEXP (tem, 0))
4937 || (GET_CODE (XEXP (tem, 0)) == PLUS
4938 && REG_P (XEXP (XEXP (tem, 0), 0))
4939 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4940 {
4941 /* TEM is not the same as what we'll be replacing the
4942 pseudo with after reload, put a USE in front of INSN
4943 in the final reload pass. */
4944 if (replace_reloads
4945 && num_not_at_initial_offset
4946 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4947 {
4948 *loc = tem;
4949 /* We mark the USE with QImode so that we
4950 recognize it as one that can be safely
4951 deleted at the end of reload. */
4952 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4953 insn), QImode);
4954
4955 /* This doesn't really count as replacing the address
4956 as a whole, since it is still a memory access. */
4957 }
4958 return 0;
4959 }
4960 ad = tem;
4961 }
4962 }
4963
4964 /* The only remaining case where we can avoid a reload is if this is a
4965 hard register that is valid as a base register and which is not the
4966 subject of a CLOBBER in this insn. */
4967
4968 else if (regno < FIRST_PSEUDO_REGISTER
4969 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4970 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4971 return 0;
4972
4973 /* If we do not have one of the cases above, we must do the reload. */
4974 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4975 base_reg_class (mode, as, MEM, SCRATCH),
4976 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4977 return 1;
4978 }
4979
4980 if (strict_memory_address_addr_space_p (mode, ad, as))
4981 {
4982 /* The address appears valid, so reloads are not needed.
4983 But the address may contain an eliminable register.
4984 This can happen because a machine with indirect addressing
4985 may consider a pseudo register by itself a valid address even when
4986 it has failed to get a hard reg.
4987 So do a tree-walk to find and eliminate all such regs. */
4988
4989 /* But first quickly dispose of a common case. */
4990 if (GET_CODE (ad) == PLUS
4991 && CONST_INT_P (XEXP (ad, 1))
4992 && REG_P (XEXP (ad, 0))
4993 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4994 return 0;
4995
4996 subst_reg_equivs_changed = 0;
4997 *loc = subst_reg_equivs (ad, insn);
4998
4999 if (! subst_reg_equivs_changed)
5000 return 0;
5001
5002 /* Check result for validity after substitution. */
5003 if (strict_memory_address_addr_space_p (mode, ad, as))
5004 return 0;
5005 }
5006
5007 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5008 do
5009 {
5010 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5011 {
5012 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5013 ind_levels, win);
5014 }
5015 break;
5016 win:
5017 *memrefloc = copy_rtx (*memrefloc);
5018 XEXP (*memrefloc, 0) = ad;
5019 move_replacements (&ad, &XEXP (*memrefloc, 0));
5020 return -1;
5021 }
5022 while (0);
5023 #endif
5024
5025 /* The address is not valid. We have to figure out why. First see if
5026 we have an outer AND and remove it if so. Then analyze what's inside. */
5027
5028 if (GET_CODE (ad) == AND)
5029 {
5030 removed_and = 1;
5031 loc = &XEXP (ad, 0);
5032 ad = *loc;
5033 }
5034
5035 /* One possibility for why the address is invalid is that it is itself
5036 a MEM. This can happen when the frame pointer is being eliminated, a
5037 pseudo is not allocated to a hard register, and the offset between the
5038 frame and stack pointers is not its initial value. In that case the
5039 pseudo will have been replaced by a MEM referring to the
5040 stack pointer. */
5041 if (MEM_P (ad))
5042 {
5043 /* First ensure that the address in this MEM is valid. Then, unless
5044 indirect addresses are valid, reload the MEM into a register. */
5045 tem = ad;
5046 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5047 opnum, ADDR_TYPE (type),
5048 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5049
5050 /* If tem was changed, then we must create a new memory reference to
5051 hold it and store it back into memrefloc. */
5052 if (tem != ad && memrefloc)
5053 {
5054 *memrefloc = copy_rtx (*memrefloc);
5055 copy_replacements (tem, XEXP (*memrefloc, 0));
5056 loc = &XEXP (*memrefloc, 0);
5057 if (removed_and)
5058 loc = &XEXP (*loc, 0);
5059 }
5060
5061 /* Check similar cases as for indirect addresses as above except
5062 that we can allow pseudos and a MEM since they should have been
5063 taken care of above. */
5064
5065 if (ind_levels == 0
5066 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5067 || MEM_P (XEXP (tem, 0))
5068 || ! (REG_P (XEXP (tem, 0))
5069 || (GET_CODE (XEXP (tem, 0)) == PLUS
5070 && REG_P (XEXP (XEXP (tem, 0), 0))
5071 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5072 {
5073 /* Must use TEM here, not AD, since it is the one that will
5074 have any subexpressions reloaded, if needed. */
5075 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5076 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5077 VOIDmode, 0,
5078 0, opnum, type);
5079 return ! removed_and;
5080 }
5081 else
5082 return 0;
5083 }
5084
5085 /* If we have address of a stack slot but it's not valid because the
5086 displacement is too large, compute the sum in a register.
5087 Handle all base registers here, not just fp/ap/sp, because on some
5088 targets (namely SH) we can also get too large displacements from
5089 big-endian corrections. */
5090 else if (GET_CODE (ad) == PLUS
5091 && REG_P (XEXP (ad, 0))
5092 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5093 && CONST_INT_P (XEXP (ad, 1))
5094 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5095 CONST_INT)
5096 /* Similarly, if we were to reload the base register and the
5097 mem+offset address is still invalid, then we want to reload
5098 the whole address, not just the base register. */
5099 || ! maybe_memory_address_addr_space_p
5100 (mode, ad, as, &(XEXP (ad, 0)))))
5101
5102 {
5103 /* Unshare the MEM rtx so we can safely alter it. */
5104 if (memrefloc)
5105 {
5106 *memrefloc = copy_rtx (*memrefloc);
5107 loc = &XEXP (*memrefloc, 0);
5108 if (removed_and)
5109 loc = &XEXP (*loc, 0);
5110 }
5111
5112 if (double_reg_address_ok
5113 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5114 PLUS, CONST_INT))
5115 {
5116 /* Unshare the sum as well. */
5117 *loc = ad = copy_rtx (ad);
5118
5119 /* Reload the displacement into an index reg.
5120 We assume the frame pointer or arg pointer is a base reg. */
5121 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5122 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5123 type, ind_levels);
5124 return 0;
5125 }
5126 else
5127 {
5128 /* If the sum of two regs is not necessarily valid,
5129 reload the sum into a base reg.
5130 That will at least work. */
5131 find_reloads_address_part (ad, loc,
5132 base_reg_class (mode, as, MEM, SCRATCH),
5133 GET_MODE (ad), opnum, type, ind_levels);
5134 }
5135 return ! removed_and;
5136 }
5137
5138 /* If we have an indexed stack slot, there are three possible reasons why
5139 it might be invalid: The index might need to be reloaded, the address
5140 might have been made by frame pointer elimination and hence have a
5141 constant out of range, or both reasons might apply.
5142
5143 We can easily check for an index needing reload, but even if that is the
5144 case, we might also have an invalid constant. To avoid making the
5145 conservative assumption and requiring two reloads, we see if this address
5146 is valid when not interpreted strictly. If it is, the only problem is
5147 that the index needs a reload and find_reloads_address_1 will take care
5148 of it.
5149
5150 Handle all base registers here, not just fp/ap/sp, because on some
5151 targets (namely SPARC) we can also get invalid addresses from preventive
5152 subreg big-endian corrections made by find_reloads_toplev. We
5153 can also get expressions involving LO_SUM (rather than PLUS) from
5154 find_reloads_subreg_address.
5155
5156 If we decide to do something, it must be that `double_reg_address_ok'
5157 is true. We generate a reload of the base register + constant and
5158 rework the sum so that the reload register will be added to the index.
5159 This is safe because we know the address isn't shared.
5160
5161 We check for the base register as both the first and second operand of
5162 the innermost PLUS and/or LO_SUM. */
5163
5164 for (op_index = 0; op_index < 2; ++op_index)
5165 {
5166 rtx operand, addend;
5167 enum rtx_code inner_code;
5168
5169 if (GET_CODE (ad) != PLUS)
5170 continue;
5171
5172 inner_code = GET_CODE (XEXP (ad, 0));
5173 if (!(GET_CODE (ad) == PLUS
5174 && CONST_INT_P (XEXP (ad, 1))
5175 && (inner_code == PLUS || inner_code == LO_SUM)))
5176 continue;
5177
5178 operand = XEXP (XEXP (ad, 0), op_index);
5179 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5180 continue;
5181
5182 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5183
5184 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5185 GET_CODE (addend))
5186 || operand == frame_pointer_rtx
5187 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
5188 || operand == hard_frame_pointer_rtx
5189 #endif
5190 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5191 || operand == arg_pointer_rtx
5192 #endif
5193 || operand == stack_pointer_rtx)
5194 && ! maybe_memory_address_addr_space_p
5195 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5196 {
5197 rtx offset_reg;
5198 enum reg_class cls;
5199
5200 offset_reg = plus_constant (GET_MODE (ad), operand,
5201 INTVAL (XEXP (ad, 1)));
5202
5203 /* Form the adjusted address. */
5204 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5205 ad = gen_rtx_PLUS (GET_MODE (ad),
5206 op_index == 0 ? offset_reg : addend,
5207 op_index == 0 ? addend : offset_reg);
5208 else
5209 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5210 op_index == 0 ? offset_reg : addend,
5211 op_index == 0 ? addend : offset_reg);
5212 *loc = ad;
5213
5214 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5215 find_reloads_address_part (XEXP (ad, op_index),
5216 &XEXP (ad, op_index), cls,
5217 GET_MODE (ad), opnum, type, ind_levels);
5218 find_reloads_address_1 (mode, as,
5219 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5220 GET_CODE (XEXP (ad, op_index)),
5221 &XEXP (ad, 1 - op_index), opnum,
5222 type, 0, insn);
5223
5224 return 0;
5225 }
5226 }
5227
5228 /* See if address becomes valid when an eliminable register
5229 in a sum is replaced. */
5230
5231 tem = ad;
5232 if (GET_CODE (ad) == PLUS)
5233 tem = subst_indexed_address (ad);
5234 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5235 {
5236 /* Ok, we win that way. Replace any additional eliminable
5237 registers. */
5238
5239 subst_reg_equivs_changed = 0;
5240 tem = subst_reg_equivs (tem, insn);
5241
5242 /* Make sure that didn't make the address invalid again. */
5243
5244 if (! subst_reg_equivs_changed
5245 || strict_memory_address_addr_space_p (mode, tem, as))
5246 {
5247 *loc = tem;
5248 return 0;
5249 }
5250 }
5251
5252 /* If constants aren't valid addresses, reload the constant address
5253 into a register. */
5254 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5255 {
5256 enum machine_mode address_mode = GET_MODE (ad);
5257 if (address_mode == VOIDmode)
5258 address_mode = targetm.addr_space.address_mode (as);
5259
5260 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5261 Unshare it so we can safely alter it. */
5262 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5263 && CONSTANT_POOL_ADDRESS_P (ad))
5264 {
5265 *memrefloc = copy_rtx (*memrefloc);
5266 loc = &XEXP (*memrefloc, 0);
5267 if (removed_and)
5268 loc = &XEXP (*loc, 0);
5269 }
5270
5271 find_reloads_address_part (ad, loc,
5272 base_reg_class (mode, as, MEM, SCRATCH),
5273 address_mode, opnum, type, ind_levels);
5274 return ! removed_and;
5275 }
5276
5277 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5278 opnum, type, ind_levels, insn);
5279 }
5280 \f
5281 /* Find all pseudo regs appearing in AD
5282 that are eliminable in favor of equivalent values
5283 and do not have hard regs; replace them by their equivalents.
5284 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5285 front of it for pseudos that we have to replace with stack slots. */
5286
5287 static rtx
5288 subst_reg_equivs (rtx ad, rtx_insn *insn)
5289 {
5290 RTX_CODE code = GET_CODE (ad);
5291 int i;
5292 const char *fmt;
5293
5294 switch (code)
5295 {
5296 case HIGH:
5297 case CONST:
5298 CASE_CONST_ANY:
5299 case SYMBOL_REF:
5300 case LABEL_REF:
5301 case PC:
5302 case CC0:
5303 return ad;
5304
5305 case REG:
5306 {
5307 int regno = REGNO (ad);
5308
5309 if (reg_equiv_constant (regno) != 0)
5310 {
5311 subst_reg_equivs_changed = 1;
5312 return reg_equiv_constant (regno);
5313 }
5314 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5315 {
5316 rtx mem = make_memloc (ad, regno);
5317 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5318 {
5319 subst_reg_equivs_changed = 1;
5320 /* We mark the USE with QImode so that we recognize it
5321 as one that can be safely deleted at the end of
5322 reload. */
5323 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5324 QImode);
5325 return mem;
5326 }
5327 }
5328 }
5329 return ad;
5330
5331 case PLUS:
5332 /* Quickly dispose of a common case. */
5333 if (XEXP (ad, 0) == frame_pointer_rtx
5334 && CONST_INT_P (XEXP (ad, 1)))
5335 return ad;
5336 break;
5337
5338 default:
5339 break;
5340 }
5341
5342 fmt = GET_RTX_FORMAT (code);
5343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5344 if (fmt[i] == 'e')
5345 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5346 return ad;
5347 }
5348 \f
5349 /* Compute the sum of X and Y, making canonicalizations assumed in an
5350 address, namely: sum constant integers, surround the sum of two
5351 constants with a CONST, put the constant as the second operand, and
5352 group the constant on the outermost sum.
5353
5354 This routine assumes both inputs are already in canonical form. */
5355
5356 rtx
5357 form_sum (enum machine_mode mode, rtx x, rtx y)
5358 {
5359 rtx tem;
5360
5361 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5362 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5363
5364 if (CONST_INT_P (x))
5365 return plus_constant (mode, y, INTVAL (x));
5366 else if (CONST_INT_P (y))
5367 return plus_constant (mode, x, INTVAL (y));
5368 else if (CONSTANT_P (x))
5369 tem = x, x = y, y = tem;
5370
5371 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5372 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5373
5374 /* Note that if the operands of Y are specified in the opposite
5375 order in the recursive calls below, infinite recursion will occur. */
5376 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5377 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5378
5379 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5380 constant will have been placed second. */
5381 if (CONSTANT_P (x) && CONSTANT_P (y))
5382 {
5383 if (GET_CODE (x) == CONST)
5384 x = XEXP (x, 0);
5385 if (GET_CODE (y) == CONST)
5386 y = XEXP (y, 0);
5387
5388 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5389 }
5390
5391 return gen_rtx_PLUS (mode, x, y);
5392 }
5393 \f
5394 /* If ADDR is a sum containing a pseudo register that should be
5395 replaced with a constant (from reg_equiv_constant),
5396 return the result of doing so, and also apply the associative
5397 law so that the result is more likely to be a valid address.
5398 (But it is not guaranteed to be one.)
5399
5400 Note that at most one register is replaced, even if more are
5401 replaceable. Also, we try to put the result into a canonical form
5402 so it is more likely to be a valid address.
5403
5404 In all other cases, return ADDR. */
5405
5406 static rtx
5407 subst_indexed_address (rtx addr)
5408 {
5409 rtx op0 = 0, op1 = 0, op2 = 0;
5410 rtx tem;
5411 int regno;
5412
5413 if (GET_CODE (addr) == PLUS)
5414 {
5415 /* Try to find a register to replace. */
5416 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5417 if (REG_P (op0)
5418 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5419 && reg_renumber[regno] < 0
5420 && reg_equiv_constant (regno) != 0)
5421 op0 = reg_equiv_constant (regno);
5422 else if (REG_P (op1)
5423 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5424 && reg_renumber[regno] < 0
5425 && reg_equiv_constant (regno) != 0)
5426 op1 = reg_equiv_constant (regno);
5427 else if (GET_CODE (op0) == PLUS
5428 && (tem = subst_indexed_address (op0)) != op0)
5429 op0 = tem;
5430 else if (GET_CODE (op1) == PLUS
5431 && (tem = subst_indexed_address (op1)) != op1)
5432 op1 = tem;
5433 else
5434 return addr;
5435
5436 /* Pick out up to three things to add. */
5437 if (GET_CODE (op1) == PLUS)
5438 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5439 else if (GET_CODE (op0) == PLUS)
5440 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5441
5442 /* Compute the sum. */
5443 if (op2 != 0)
5444 op1 = form_sum (GET_MODE (addr), op1, op2);
5445 if (op1 != 0)
5446 op0 = form_sum (GET_MODE (addr), op0, op1);
5447
5448 return op0;
5449 }
5450 return addr;
5451 }
5452 \f
5453 /* Update the REG_INC notes for an insn. It updates all REG_INC
5454 notes for the instruction which refer to REGNO the to refer
5455 to the reload number.
5456
5457 INSN is the insn for which any REG_INC notes need updating.
5458
5459 REGNO is the register number which has been reloaded.
5460
5461 RELOADNUM is the reload number. */
5462
5463 static void
5464 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5465 int reloadnum ATTRIBUTE_UNUSED)
5466 {
5467 #ifdef AUTO_INC_DEC
5468 rtx link;
5469
5470 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5471 if (REG_NOTE_KIND (link) == REG_INC
5472 && (int) REGNO (XEXP (link, 0)) == regno)
5473 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5474 #endif
5475 }
5476 \f
5477 /* Record the pseudo registers we must reload into hard registers in a
5478 subexpression of a would-be memory address, X referring to a value
5479 in mode MODE. (This function is not called if the address we find
5480 is strictly valid.)
5481
5482 CONTEXT = 1 means we are considering regs as index regs,
5483 = 0 means we are considering them as base regs.
5484 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5485 or an autoinc code.
5486 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5487 is the code of the index part of the address. Otherwise, pass SCRATCH
5488 for this argument.
5489 OPNUM and TYPE specify the purpose of any reloads made.
5490
5491 IND_LEVELS says how many levels of indirect addressing are
5492 supported at this point in the address.
5493
5494 INSN, if nonzero, is the insn in which we do the reload. It is used
5495 to determine if we may generate output reloads.
5496
5497 We return nonzero if X, as a whole, is reloaded or replaced. */
5498
5499 /* Note that we take shortcuts assuming that no multi-reg machine mode
5500 occurs as part of an address.
5501 Also, this is not fully machine-customizable; it works for machines
5502 such as VAXen and 68000's and 32000's, but other possible machines
5503 could have addressing modes that this does not handle right.
5504 If you add push_reload calls here, you need to make sure gen_reload
5505 handles those cases gracefully. */
5506
5507 static int
5508 find_reloads_address_1 (enum machine_mode mode, addr_space_t as,
5509 rtx x, int context,
5510 enum rtx_code outer_code, enum rtx_code index_code,
5511 rtx *loc, int opnum, enum reload_type type,
5512 int ind_levels, rtx_insn *insn)
5513 {
5514 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5515 ((CONTEXT) == 0 \
5516 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5517 : REGNO_OK_FOR_INDEX_P (REGNO))
5518
5519 enum reg_class context_reg_class;
5520 RTX_CODE code = GET_CODE (x);
5521 bool reloaded_inner_of_autoinc = false;
5522
5523 if (context == 1)
5524 context_reg_class = INDEX_REG_CLASS;
5525 else
5526 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5527
5528 switch (code)
5529 {
5530 case PLUS:
5531 {
5532 rtx orig_op0 = XEXP (x, 0);
5533 rtx orig_op1 = XEXP (x, 1);
5534 RTX_CODE code0 = GET_CODE (orig_op0);
5535 RTX_CODE code1 = GET_CODE (orig_op1);
5536 rtx op0 = orig_op0;
5537 rtx op1 = orig_op1;
5538
5539 if (GET_CODE (op0) == SUBREG)
5540 {
5541 op0 = SUBREG_REG (op0);
5542 code0 = GET_CODE (op0);
5543 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5544 op0 = gen_rtx_REG (word_mode,
5545 (REGNO (op0) +
5546 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5547 GET_MODE (SUBREG_REG (orig_op0)),
5548 SUBREG_BYTE (orig_op0),
5549 GET_MODE (orig_op0))));
5550 }
5551
5552 if (GET_CODE (op1) == SUBREG)
5553 {
5554 op1 = SUBREG_REG (op1);
5555 code1 = GET_CODE (op1);
5556 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5557 /* ??? Why is this given op1's mode and above for
5558 ??? op0 SUBREGs we use word_mode? */
5559 op1 = gen_rtx_REG (GET_MODE (op1),
5560 (REGNO (op1) +
5561 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5562 GET_MODE (SUBREG_REG (orig_op1)),
5563 SUBREG_BYTE (orig_op1),
5564 GET_MODE (orig_op1))));
5565 }
5566 /* Plus in the index register may be created only as a result of
5567 register rematerialization for expression like &localvar*4. Reload it.
5568 It may be possible to combine the displacement on the outer level,
5569 but it is probably not worthwhile to do so. */
5570 if (context == 1)
5571 {
5572 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5573 opnum, ADDR_TYPE (type), ind_levels, insn);
5574 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5575 context_reg_class,
5576 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5577 return 1;
5578 }
5579
5580 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5581 || code0 == ZERO_EXTEND || code1 == MEM)
5582 {
5583 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5584 &XEXP (x, 0), opnum, type, ind_levels,
5585 insn);
5586 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5587 &XEXP (x, 1), opnum, type, ind_levels,
5588 insn);
5589 }
5590
5591 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5592 || code1 == ZERO_EXTEND || code0 == MEM)
5593 {
5594 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5595 &XEXP (x, 0), opnum, type, ind_levels,
5596 insn);
5597 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5598 &XEXP (x, 1), opnum, type, ind_levels,
5599 insn);
5600 }
5601
5602 else if (code0 == CONST_INT || code0 == CONST
5603 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5604 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5605 &XEXP (x, 1), opnum, type, ind_levels,
5606 insn);
5607
5608 else if (code1 == CONST_INT || code1 == CONST
5609 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5610 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5611 &XEXP (x, 0), opnum, type, ind_levels,
5612 insn);
5613
5614 else if (code0 == REG && code1 == REG)
5615 {
5616 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5617 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5618 return 0;
5619 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5620 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5621 return 0;
5622 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5623 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5624 &XEXP (x, 1), opnum, type, ind_levels,
5625 insn);
5626 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5627 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5631 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5632 &XEXP (x, 0), opnum, type, ind_levels,
5633 insn);
5634 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5635 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5636 &XEXP (x, 1), opnum, type, ind_levels,
5637 insn);
5638 else
5639 {
5640 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5641 &XEXP (x, 0), opnum, type, ind_levels,
5642 insn);
5643 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5644 &XEXP (x, 1), opnum, type, ind_levels,
5645 insn);
5646 }
5647 }
5648
5649 else if (code0 == REG)
5650 {
5651 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5652 &XEXP (x, 0), opnum, type, ind_levels,
5653 insn);
5654 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5655 &XEXP (x, 1), opnum, type, ind_levels,
5656 insn);
5657 }
5658
5659 else if (code1 == REG)
5660 {
5661 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5662 &XEXP (x, 1), opnum, type, ind_levels,
5663 insn);
5664 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5665 &XEXP (x, 0), opnum, type, ind_levels,
5666 insn);
5667 }
5668 }
5669
5670 return 0;
5671
5672 case POST_MODIFY:
5673 case PRE_MODIFY:
5674 {
5675 rtx op0 = XEXP (x, 0);
5676 rtx op1 = XEXP (x, 1);
5677 enum rtx_code index_code;
5678 int regno;
5679 int reloadnum;
5680
5681 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5682 return 0;
5683
5684 /* Currently, we only support {PRE,POST}_MODIFY constructs
5685 where a base register is {inc,dec}remented by the contents
5686 of another register or by a constant value. Thus, these
5687 operands must match. */
5688 gcc_assert (op0 == XEXP (op1, 0));
5689
5690 /* Require index register (or constant). Let's just handle the
5691 register case in the meantime... If the target allows
5692 auto-modify by a constant then we could try replacing a pseudo
5693 register with its equivalent constant where applicable.
5694
5695 We also handle the case where the register was eliminated
5696 resulting in a PLUS subexpression.
5697
5698 If we later decide to reload the whole PRE_MODIFY or
5699 POST_MODIFY, inc_for_reload might clobber the reload register
5700 before reading the index. The index register might therefore
5701 need to live longer than a TYPE reload normally would, so be
5702 conservative and class it as RELOAD_OTHER. */
5703 if ((REG_P (XEXP (op1, 1))
5704 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5705 || GET_CODE (XEXP (op1, 1)) == PLUS)
5706 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5707 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5708 ind_levels, insn);
5709
5710 gcc_assert (REG_P (XEXP (op1, 0)));
5711
5712 regno = REGNO (XEXP (op1, 0));
5713 index_code = GET_CODE (XEXP (op1, 1));
5714
5715 /* A register that is incremented cannot be constant! */
5716 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5717 || reg_equiv_constant (regno) == 0);
5718
5719 /* Handle a register that is equivalent to a memory location
5720 which cannot be addressed directly. */
5721 if (reg_equiv_memory_loc (regno) != 0
5722 && (reg_equiv_address (regno) != 0
5723 || num_not_at_initial_offset))
5724 {
5725 rtx tem = make_memloc (XEXP (x, 0), regno);
5726
5727 if (reg_equiv_address (regno)
5728 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5729 {
5730 rtx orig = tem;
5731
5732 /* First reload the memory location's address.
5733 We can't use ADDR_TYPE (type) here, because we need to
5734 write back the value after reading it, hence we actually
5735 need two registers. */
5736 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5737 &XEXP (tem, 0), opnum,
5738 RELOAD_OTHER,
5739 ind_levels, insn);
5740
5741 if (!rtx_equal_p (tem, orig))
5742 push_reg_equiv_alt_mem (regno, tem);
5743
5744 /* Then reload the memory location into a base
5745 register. */
5746 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5747 &XEXP (op1, 0),
5748 base_reg_class (mode, as,
5749 code, index_code),
5750 GET_MODE (x), GET_MODE (x), 0,
5751 0, opnum, RELOAD_OTHER);
5752
5753 update_auto_inc_notes (this_insn, regno, reloadnum);
5754 return 0;
5755 }
5756 }
5757
5758 if (reg_renumber[regno] >= 0)
5759 regno = reg_renumber[regno];
5760
5761 /* We require a base register here... */
5762 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5763 {
5764 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5765 &XEXP (op1, 0), &XEXP (x, 0),
5766 base_reg_class (mode, as,
5767 code, index_code),
5768 GET_MODE (x), GET_MODE (x), 0, 0,
5769 opnum, RELOAD_OTHER);
5770
5771 update_auto_inc_notes (this_insn, regno, reloadnum);
5772 return 0;
5773 }
5774 }
5775 return 0;
5776
5777 case POST_INC:
5778 case POST_DEC:
5779 case PRE_INC:
5780 case PRE_DEC:
5781 if (REG_P (XEXP (x, 0)))
5782 {
5783 int regno = REGNO (XEXP (x, 0));
5784 int value = 0;
5785 rtx x_orig = x;
5786
5787 /* A register that is incremented cannot be constant! */
5788 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5789 || reg_equiv_constant (regno) == 0);
5790
5791 /* Handle a register that is equivalent to a memory location
5792 which cannot be addressed directly. */
5793 if (reg_equiv_memory_loc (regno) != 0
5794 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5795 {
5796 rtx tem = make_memloc (XEXP (x, 0), regno);
5797 if (reg_equiv_address (regno)
5798 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5799 {
5800 rtx orig = tem;
5801
5802 /* First reload the memory location's address.
5803 We can't use ADDR_TYPE (type) here, because we need to
5804 write back the value after reading it, hence we actually
5805 need two registers. */
5806 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5807 &XEXP (tem, 0), opnum, type,
5808 ind_levels, insn);
5809 reloaded_inner_of_autoinc = true;
5810 if (!rtx_equal_p (tem, orig))
5811 push_reg_equiv_alt_mem (regno, tem);
5812 /* Put this inside a new increment-expression. */
5813 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5814 /* Proceed to reload that, as if it contained a register. */
5815 }
5816 }
5817
5818 /* If we have a hard register that is ok in this incdec context,
5819 don't make a reload. If the register isn't nice enough for
5820 autoincdec, we can reload it. But, if an autoincrement of a
5821 register that we here verified as playing nice, still outside
5822 isn't "valid", it must be that no autoincrement is "valid".
5823 If that is true and something made an autoincrement anyway,
5824 this must be a special context where one is allowed.
5825 (For example, a "push" instruction.)
5826 We can't improve this address, so leave it alone. */
5827
5828 /* Otherwise, reload the autoincrement into a suitable hard reg
5829 and record how much to increment by. */
5830
5831 if (reg_renumber[regno] >= 0)
5832 regno = reg_renumber[regno];
5833 if (regno >= FIRST_PSEUDO_REGISTER
5834 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5835 index_code))
5836 {
5837 int reloadnum;
5838
5839 /* If we can output the register afterwards, do so, this
5840 saves the extra update.
5841 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5842 CALL_INSN - and it does not set CC0.
5843 But don't do this if we cannot directly address the
5844 memory location, since this will make it harder to
5845 reuse address reloads, and increases register pressure.
5846 Also don't do this if we can probably update x directly. */
5847 rtx equiv = (MEM_P (XEXP (x, 0))
5848 ? XEXP (x, 0)
5849 : reg_equiv_mem (regno));
5850 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5851 if (insn && NONJUMP_INSN_P (insn) && equiv
5852 && memory_operand (equiv, GET_MODE (equiv))
5853 #ifdef HAVE_cc0
5854 && ! sets_cc0_p (PATTERN (insn))
5855 #endif
5856 && ! (icode != CODE_FOR_nothing
5857 && insn_operand_matches (icode, 0, equiv)
5858 && insn_operand_matches (icode, 1, equiv))
5859 /* Using RELOAD_OTHER means we emit this and the reload we
5860 made earlier in the wrong order. */
5861 && !reloaded_inner_of_autoinc)
5862 {
5863 /* We use the original pseudo for loc, so that
5864 emit_reload_insns() knows which pseudo this
5865 reload refers to and updates the pseudo rtx, not
5866 its equivalent memory location, as well as the
5867 corresponding entry in reg_last_reload_reg. */
5868 loc = &XEXP (x_orig, 0);
5869 x = XEXP (x, 0);
5870 reloadnum
5871 = push_reload (x, x, loc, loc,
5872 context_reg_class,
5873 GET_MODE (x), GET_MODE (x), 0, 0,
5874 opnum, RELOAD_OTHER);
5875 }
5876 else
5877 {
5878 reloadnum
5879 = push_reload (x, x, loc, (rtx*) 0,
5880 context_reg_class,
5881 GET_MODE (x), GET_MODE (x), 0, 0,
5882 opnum, type);
5883 rld[reloadnum].inc
5884 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5885
5886 value = 1;
5887 }
5888
5889 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5890 reloadnum);
5891 }
5892 return value;
5893 }
5894 return 0;
5895
5896 case TRUNCATE:
5897 case SIGN_EXTEND:
5898 case ZERO_EXTEND:
5899 /* Look for parts to reload in the inner expression and reload them
5900 too, in addition to this operation. Reloading all inner parts in
5901 addition to this one shouldn't be necessary, but at this point,
5902 we don't know if we can possibly omit any part that *can* be
5903 reloaded. Targets that are better off reloading just either part
5904 (or perhaps even a different part of an outer expression), should
5905 define LEGITIMIZE_RELOAD_ADDRESS. */
5906 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5907 context, code, SCRATCH, &XEXP (x, 0), opnum,
5908 type, ind_levels, insn);
5909 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5910 context_reg_class,
5911 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5912 return 1;
5913
5914 case MEM:
5915 /* This is probably the result of a substitution, by eliminate_regs, of
5916 an equivalent address for a pseudo that was not allocated to a hard
5917 register. Verify that the specified address is valid and reload it
5918 into a register.
5919
5920 Since we know we are going to reload this item, don't decrement for
5921 the indirection level.
5922
5923 Note that this is actually conservative: it would be slightly more
5924 efficient to use the value of SPILL_INDIRECT_LEVELS from
5925 reload1.c here. */
5926
5927 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5928 opnum, ADDR_TYPE (type), ind_levels, insn);
5929 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5930 context_reg_class,
5931 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5932 return 1;
5933
5934 case REG:
5935 {
5936 int regno = REGNO (x);
5937
5938 if (reg_equiv_constant (regno) != 0)
5939 {
5940 find_reloads_address_part (reg_equiv_constant (regno), loc,
5941 context_reg_class,
5942 GET_MODE (x), opnum, type, ind_levels);
5943 return 1;
5944 }
5945
5946 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5947 that feeds this insn. */
5948 if (reg_equiv_mem (regno) != 0)
5949 {
5950 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5951 context_reg_class,
5952 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5953 return 1;
5954 }
5955 #endif
5956
5957 if (reg_equiv_memory_loc (regno)
5958 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5959 {
5960 rtx tem = make_memloc (x, regno);
5961 if (reg_equiv_address (regno) != 0
5962 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5963 {
5964 x = tem;
5965 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5966 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5967 ind_levels, insn);
5968 if (!rtx_equal_p (x, tem))
5969 push_reg_equiv_alt_mem (regno, x);
5970 }
5971 }
5972
5973 if (reg_renumber[regno] >= 0)
5974 regno = reg_renumber[regno];
5975
5976 if (regno >= FIRST_PSEUDO_REGISTER
5977 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5978 index_code))
5979 {
5980 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5981 context_reg_class,
5982 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5983 return 1;
5984 }
5985
5986 /* If a register appearing in an address is the subject of a CLOBBER
5987 in this insn, reload it into some other register to be safe.
5988 The CLOBBER is supposed to make the register unavailable
5989 from before this insn to after it. */
5990 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5991 {
5992 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5993 context_reg_class,
5994 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5995 return 1;
5996 }
5997 }
5998 return 0;
5999
6000 case SUBREG:
6001 if (REG_P (SUBREG_REG (x)))
6002 {
6003 /* If this is a SUBREG of a hard register and the resulting register
6004 is of the wrong class, reload the whole SUBREG. This avoids
6005 needless copies if SUBREG_REG is multi-word. */
6006 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6007 {
6008 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6009
6010 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6011 index_code))
6012 {
6013 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6014 context_reg_class,
6015 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6016 return 1;
6017 }
6018 }
6019 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6020 is larger than the class size, then reload the whole SUBREG. */
6021 else
6022 {
6023 enum reg_class rclass = context_reg_class;
6024 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6025 > reg_class_size[(int) rclass])
6026 {
6027 /* If the inner register will be replaced by a memory
6028 reference, we can do this only if we can replace the
6029 whole subreg by a (narrower) memory reference. If
6030 this is not possible, fall through and reload just
6031 the inner register (including address reloads). */
6032 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6033 {
6034 rtx tem = find_reloads_subreg_address (x, opnum,
6035 ADDR_TYPE (type),
6036 ind_levels, insn,
6037 NULL);
6038 if (tem)
6039 {
6040 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6041 GET_MODE (tem), VOIDmode, 0, 0,
6042 opnum, type);
6043 return 1;
6044 }
6045 }
6046 else
6047 {
6048 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6049 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6050 return 1;
6051 }
6052 }
6053 }
6054 }
6055 break;
6056
6057 default:
6058 break;
6059 }
6060
6061 {
6062 const char *fmt = GET_RTX_FORMAT (code);
6063 int i;
6064
6065 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6066 {
6067 if (fmt[i] == 'e')
6068 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6069 we get here. */
6070 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6071 code, SCRATCH, &XEXP (x, i),
6072 opnum, type, ind_levels, insn);
6073 }
6074 }
6075
6076 #undef REG_OK_FOR_CONTEXT
6077 return 0;
6078 }
6079 \f
6080 /* X, which is found at *LOC, is a part of an address that needs to be
6081 reloaded into a register of class RCLASS. If X is a constant, or if
6082 X is a PLUS that contains a constant, check that the constant is a
6083 legitimate operand and that we are supposed to be able to load
6084 it into the register.
6085
6086 If not, force the constant into memory and reload the MEM instead.
6087
6088 MODE is the mode to use, in case X is an integer constant.
6089
6090 OPNUM and TYPE describe the purpose of any reloads made.
6091
6092 IND_LEVELS says how many levels of indirect addressing this machine
6093 supports. */
6094
6095 static void
6096 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6097 enum machine_mode mode, int opnum,
6098 enum reload_type type, int ind_levels)
6099 {
6100 if (CONSTANT_P (x)
6101 && (!targetm.legitimate_constant_p (mode, x)
6102 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6103 {
6104 x = force_const_mem (mode, x);
6105 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6106 opnum, type, ind_levels, 0);
6107 }
6108
6109 else if (GET_CODE (x) == PLUS
6110 && CONSTANT_P (XEXP (x, 1))
6111 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6112 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6113 == NO_REGS))
6114 {
6115 rtx tem;
6116
6117 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6118 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6119 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6120 opnum, type, ind_levels, 0);
6121 }
6122
6123 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6124 mode, VOIDmode, 0, 0, opnum, type);
6125 }
6126 \f
6127 /* X, a subreg of a pseudo, is a part of an address that needs to be
6128 reloaded, and the pseusdo is equivalent to a memory location.
6129
6130 Attempt to replace the whole subreg by a (possibly narrower or wider)
6131 memory reference. If this is possible, return this new memory
6132 reference, and push all required address reloads. Otherwise,
6133 return NULL.
6134
6135 OPNUM and TYPE identify the purpose of the reload.
6136
6137 IND_LEVELS says how many levels of indirect addressing are
6138 supported at this point in the address.
6139
6140 INSN, if nonzero, is the insn in which we do the reload. It is used
6141 to determine where to put USEs for pseudos that we have to replace with
6142 stack slots. */
6143
6144 static rtx
6145 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6146 int ind_levels, rtx_insn *insn,
6147 int *address_reloaded)
6148 {
6149 enum machine_mode outer_mode = GET_MODE (x);
6150 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6151 int regno = REGNO (SUBREG_REG (x));
6152 int reloaded = 0;
6153 rtx tem, orig;
6154 int offset;
6155
6156 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6157
6158 /* We cannot replace the subreg with a modified memory reference if:
6159
6160 - we have a paradoxical subreg that implicitly acts as a zero or
6161 sign extension operation due to LOAD_EXTEND_OP;
6162
6163 - we have a subreg that is implicitly supposed to act on the full
6164 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6165
6166 - the address of the equivalent memory location is mode-dependent; or
6167
6168 - we have a paradoxical subreg and the resulting memory is not
6169 sufficiently aligned to allow access in the wider mode.
6170
6171 In addition, we choose not to perform the replacement for *any*
6172 paradoxical subreg, even if it were possible in principle. This
6173 is to avoid generating wider memory references than necessary.
6174
6175 This corresponds to how previous versions of reload used to handle
6176 paradoxical subregs where no address reload was required. */
6177
6178 if (paradoxical_subreg_p (x))
6179 return NULL;
6180
6181 #ifdef WORD_REGISTER_OPERATIONS
6182 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6183 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6184 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6185 return NULL;
6186 #endif
6187
6188 /* Since we don't attempt to handle paradoxical subregs, we can just
6189 call into simplify_subreg, which will handle all remaining checks
6190 for us. */
6191 orig = make_memloc (SUBREG_REG (x), regno);
6192 offset = SUBREG_BYTE (x);
6193 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6194 if (!tem || !MEM_P (tem))
6195 return NULL;
6196
6197 /* Now push all required address reloads, if any. */
6198 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6199 XEXP (tem, 0), &XEXP (tem, 0),
6200 opnum, type, ind_levels, insn);
6201 /* ??? Do we need to handle nonzero offsets somehow? */
6202 if (!offset && !rtx_equal_p (tem, orig))
6203 push_reg_equiv_alt_mem (regno, tem);
6204
6205 /* For some processors an address may be valid in the original mode but
6206 not in a smaller mode. For example, ARM accepts a scaled index register
6207 in SImode but not in HImode. Note that this is only a problem if the
6208 address in reg_equiv_mem is already invalid in the new mode; other
6209 cases would be fixed by find_reloads_address as usual.
6210
6211 ??? We attempt to handle such cases here by doing an additional reload
6212 of the full address after the usual processing by find_reloads_address.
6213 Note that this may not work in the general case, but it seems to cover
6214 the cases where this situation currently occurs. A more general fix
6215 might be to reload the *value* instead of the address, but this would
6216 not be expected by the callers of this routine as-is.
6217
6218 If find_reloads_address already completed replaced the address, there
6219 is nothing further to do. */
6220 if (reloaded == 0
6221 && reg_equiv_mem (regno) != 0
6222 && !strict_memory_address_addr_space_p
6223 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6224 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6225 {
6226 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6227 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6228 MEM, SCRATCH),
6229 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6230 reloaded = 1;
6231 }
6232
6233 /* If this is not a toplevel operand, find_reloads doesn't see this
6234 substitution. We have to emit a USE of the pseudo so that
6235 delete_output_reload can see it. */
6236 if (replace_reloads && recog_data.operand[opnum] != x)
6237 /* We mark the USE with QImode so that we recognize it as one that
6238 can be safely deleted at the end of reload. */
6239 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6240 QImode);
6241
6242 if (address_reloaded)
6243 *address_reloaded = reloaded;
6244
6245 return tem;
6246 }
6247 \f
6248 /* Substitute into the current INSN the registers into which we have reloaded
6249 the things that need reloading. The array `replacements'
6250 contains the locations of all pointers that must be changed
6251 and says what to replace them with.
6252
6253 Return the rtx that X translates into; usually X, but modified. */
6254
6255 void
6256 subst_reloads (rtx_insn *insn)
6257 {
6258 int i;
6259
6260 for (i = 0; i < n_replacements; i++)
6261 {
6262 struct replacement *r = &replacements[i];
6263 rtx reloadreg = rld[r->what].reg_rtx;
6264 if (reloadreg)
6265 {
6266 #ifdef DEBUG_RELOAD
6267 /* This checking takes a very long time on some platforms
6268 causing the gcc.c-torture/compile/limits-fnargs.c test
6269 to time out during testing. See PR 31850.
6270
6271 Internal consistency test. Check that we don't modify
6272 anything in the equivalence arrays. Whenever something from
6273 those arrays needs to be reloaded, it must be unshared before
6274 being substituted into; the equivalence must not be modified.
6275 Otherwise, if the equivalence is used after that, it will
6276 have been modified, and the thing substituted (probably a
6277 register) is likely overwritten and not a usable equivalence. */
6278 int check_regno;
6279
6280 for (check_regno = 0; check_regno < max_regno; check_regno++)
6281 {
6282 #define CHECK_MODF(ARRAY) \
6283 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6284 || !loc_mentioned_in_p (r->where, \
6285 (*reg_equivs)[check_regno].ARRAY))
6286
6287 CHECK_MODF (constant);
6288 CHECK_MODF (memory_loc);
6289 CHECK_MODF (address);
6290 CHECK_MODF (mem);
6291 #undef CHECK_MODF
6292 }
6293 #endif /* DEBUG_RELOAD */
6294
6295 /* If we're replacing a LABEL_REF with a register, there must
6296 already be an indication (to e.g. flow) which label this
6297 register refers to. */
6298 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6299 || !JUMP_P (insn)
6300 || find_reg_note (insn,
6301 REG_LABEL_OPERAND,
6302 XEXP (*r->where, 0))
6303 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6304
6305 /* Encapsulate RELOADREG so its machine mode matches what
6306 used to be there. Note that gen_lowpart_common will
6307 do the wrong thing if RELOADREG is multi-word. RELOADREG
6308 will always be a REG here. */
6309 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6310 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6311
6312 *r->where = reloadreg;
6313 }
6314 /* If reload got no reg and isn't optional, something's wrong. */
6315 else
6316 gcc_assert (rld[r->what].optional);
6317 }
6318 }
6319 \f
6320 /* Make a copy of any replacements being done into X and move those
6321 copies to locations in Y, a copy of X. */
6322
6323 void
6324 copy_replacements (rtx x, rtx y)
6325 {
6326 copy_replacements_1 (&x, &y, n_replacements);
6327 }
6328
6329 static void
6330 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6331 {
6332 int i, j;
6333 rtx x, y;
6334 struct replacement *r;
6335 enum rtx_code code;
6336 const char *fmt;
6337
6338 for (j = 0; j < orig_replacements; j++)
6339 if (replacements[j].where == px)
6340 {
6341 r = &replacements[n_replacements++];
6342 r->where = py;
6343 r->what = replacements[j].what;
6344 r->mode = replacements[j].mode;
6345 }
6346
6347 x = *px;
6348 y = *py;
6349 code = GET_CODE (x);
6350 fmt = GET_RTX_FORMAT (code);
6351
6352 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6353 {
6354 if (fmt[i] == 'e')
6355 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6356 else if (fmt[i] == 'E')
6357 for (j = XVECLEN (x, i); --j >= 0; )
6358 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6359 orig_replacements);
6360 }
6361 }
6362
6363 /* Change any replacements being done to *X to be done to *Y. */
6364
6365 void
6366 move_replacements (rtx *x, rtx *y)
6367 {
6368 int i;
6369
6370 for (i = 0; i < n_replacements; i++)
6371 if (replacements[i].where == x)
6372 replacements[i].where = y;
6373 }
6374 \f
6375 /* If LOC was scheduled to be replaced by something, return the replacement.
6376 Otherwise, return *LOC. */
6377
6378 rtx
6379 find_replacement (rtx *loc)
6380 {
6381 struct replacement *r;
6382
6383 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6384 {
6385 rtx reloadreg = rld[r->what].reg_rtx;
6386
6387 if (reloadreg && r->where == loc)
6388 {
6389 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6390 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6391
6392 return reloadreg;
6393 }
6394 else if (reloadreg && GET_CODE (*loc) == SUBREG
6395 && r->where == &SUBREG_REG (*loc))
6396 {
6397 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6398 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6399
6400 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6401 GET_MODE (SUBREG_REG (*loc)),
6402 SUBREG_BYTE (*loc));
6403 }
6404 }
6405
6406 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6407 what's inside and make a new rtl if so. */
6408 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6409 || GET_CODE (*loc) == MULT)
6410 {
6411 rtx x = find_replacement (&XEXP (*loc, 0));
6412 rtx y = find_replacement (&XEXP (*loc, 1));
6413
6414 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6415 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6416 }
6417
6418 return *loc;
6419 }
6420 \f
6421 /* Return nonzero if register in range [REGNO, ENDREGNO)
6422 appears either explicitly or implicitly in X
6423 other than being stored into (except for earlyclobber operands).
6424
6425 References contained within the substructure at LOC do not count.
6426 LOC may be zero, meaning don't ignore anything.
6427
6428 This is similar to refers_to_regno_p in rtlanal.c except that we
6429 look at equivalences for pseudos that didn't get hard registers. */
6430
6431 static int
6432 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6433 rtx x, rtx *loc)
6434 {
6435 int i;
6436 unsigned int r;
6437 RTX_CODE code;
6438 const char *fmt;
6439
6440 if (x == 0)
6441 return 0;
6442
6443 repeat:
6444 code = GET_CODE (x);
6445
6446 switch (code)
6447 {
6448 case REG:
6449 r = REGNO (x);
6450
6451 /* If this is a pseudo, a hard register must not have been allocated.
6452 X must therefore either be a constant or be in memory. */
6453 if (r >= FIRST_PSEUDO_REGISTER)
6454 {
6455 if (reg_equiv_memory_loc (r))
6456 return refers_to_regno_for_reload_p (regno, endregno,
6457 reg_equiv_memory_loc (r),
6458 (rtx*) 0);
6459
6460 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6461 return 0;
6462 }
6463
6464 return (endregno > r
6465 && regno < r + (r < FIRST_PSEUDO_REGISTER
6466 ? hard_regno_nregs[r][GET_MODE (x)]
6467 : 1));
6468
6469 case SUBREG:
6470 /* If this is a SUBREG of a hard reg, we can see exactly which
6471 registers are being modified. Otherwise, handle normally. */
6472 if (REG_P (SUBREG_REG (x))
6473 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6474 {
6475 unsigned int inner_regno = subreg_regno (x);
6476 unsigned int inner_endregno
6477 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6478 ? subreg_nregs (x) : 1);
6479
6480 return endregno > inner_regno && regno < inner_endregno;
6481 }
6482 break;
6483
6484 case CLOBBER:
6485 case SET:
6486 if (&SET_DEST (x) != loc
6487 /* Note setting a SUBREG counts as referring to the REG it is in for
6488 a pseudo but not for hard registers since we can
6489 treat each word individually. */
6490 && ((GET_CODE (SET_DEST (x)) == SUBREG
6491 && loc != &SUBREG_REG (SET_DEST (x))
6492 && REG_P (SUBREG_REG (SET_DEST (x)))
6493 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6494 && refers_to_regno_for_reload_p (regno, endregno,
6495 SUBREG_REG (SET_DEST (x)),
6496 loc))
6497 /* If the output is an earlyclobber operand, this is
6498 a conflict. */
6499 || ((!REG_P (SET_DEST (x))
6500 || earlyclobber_operand_p (SET_DEST (x)))
6501 && refers_to_regno_for_reload_p (regno, endregno,
6502 SET_DEST (x), loc))))
6503 return 1;
6504
6505 if (code == CLOBBER || loc == &SET_SRC (x))
6506 return 0;
6507 x = SET_SRC (x);
6508 goto repeat;
6509
6510 default:
6511 break;
6512 }
6513
6514 /* X does not match, so try its subexpressions. */
6515
6516 fmt = GET_RTX_FORMAT (code);
6517 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6518 {
6519 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6520 {
6521 if (i == 0)
6522 {
6523 x = XEXP (x, 0);
6524 goto repeat;
6525 }
6526 else
6527 if (refers_to_regno_for_reload_p (regno, endregno,
6528 XEXP (x, i), loc))
6529 return 1;
6530 }
6531 else if (fmt[i] == 'E')
6532 {
6533 int j;
6534 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6535 if (loc != &XVECEXP (x, i, j)
6536 && refers_to_regno_for_reload_p (regno, endregno,
6537 XVECEXP (x, i, j), loc))
6538 return 1;
6539 }
6540 }
6541 return 0;
6542 }
6543
6544 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6545 we check if any register number in X conflicts with the relevant register
6546 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6547 contains a MEM (we don't bother checking for memory addresses that can't
6548 conflict because we expect this to be a rare case.
6549
6550 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6551 that we look at equivalences for pseudos that didn't get hard registers. */
6552
6553 int
6554 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6555 {
6556 int regno, endregno;
6557
6558 /* Overly conservative. */
6559 if (GET_CODE (x) == STRICT_LOW_PART
6560 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6561 x = XEXP (x, 0);
6562
6563 /* If either argument is a constant, then modifying X can not affect IN. */
6564 if (CONSTANT_P (x) || CONSTANT_P (in))
6565 return 0;
6566 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6567 return refers_to_mem_for_reload_p (in);
6568 else if (GET_CODE (x) == SUBREG)
6569 {
6570 regno = REGNO (SUBREG_REG (x));
6571 if (regno < FIRST_PSEUDO_REGISTER)
6572 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6573 GET_MODE (SUBREG_REG (x)),
6574 SUBREG_BYTE (x),
6575 GET_MODE (x));
6576 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6577 ? subreg_nregs (x) : 1);
6578
6579 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6580 }
6581 else if (REG_P (x))
6582 {
6583 regno = REGNO (x);
6584
6585 /* If this is a pseudo, it must not have been assigned a hard register.
6586 Therefore, it must either be in memory or be a constant. */
6587
6588 if (regno >= FIRST_PSEUDO_REGISTER)
6589 {
6590 if (reg_equiv_memory_loc (regno))
6591 return refers_to_mem_for_reload_p (in);
6592 gcc_assert (reg_equiv_constant (regno));
6593 return 0;
6594 }
6595
6596 endregno = END_HARD_REGNO (x);
6597
6598 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6599 }
6600 else if (MEM_P (x))
6601 return refers_to_mem_for_reload_p (in);
6602 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6603 || GET_CODE (x) == CC0)
6604 return reg_mentioned_p (x, in);
6605 else
6606 {
6607 gcc_assert (GET_CODE (x) == PLUS);
6608
6609 /* We actually want to know if X is mentioned somewhere inside IN.
6610 We must not say that (plus (sp) (const_int 124)) is in
6611 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6612 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6613 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6614 while (MEM_P (in))
6615 in = XEXP (in, 0);
6616 if (REG_P (in))
6617 return 0;
6618 else if (GET_CODE (in) == PLUS)
6619 return (rtx_equal_p (x, in)
6620 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6621 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6622 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6623 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6624 }
6625
6626 gcc_unreachable ();
6627 }
6628
6629 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6630 registers. */
6631
6632 static int
6633 refers_to_mem_for_reload_p (rtx x)
6634 {
6635 const char *fmt;
6636 int i;
6637
6638 if (MEM_P (x))
6639 return 1;
6640
6641 if (REG_P (x))
6642 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6643 && reg_equiv_memory_loc (REGNO (x)));
6644
6645 fmt = GET_RTX_FORMAT (GET_CODE (x));
6646 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6647 if (fmt[i] == 'e'
6648 && (MEM_P (XEXP (x, i))
6649 || refers_to_mem_for_reload_p (XEXP (x, i))))
6650 return 1;
6651
6652 return 0;
6653 }
6654 \f
6655 /* Check the insns before INSN to see if there is a suitable register
6656 containing the same value as GOAL.
6657 If OTHER is -1, look for a register in class RCLASS.
6658 Otherwise, just see if register number OTHER shares GOAL's value.
6659
6660 Return an rtx for the register found, or zero if none is found.
6661
6662 If RELOAD_REG_P is (short *)1,
6663 we reject any hard reg that appears in reload_reg_rtx
6664 because such a hard reg is also needed coming into this insn.
6665
6666 If RELOAD_REG_P is any other nonzero value,
6667 it is a vector indexed by hard reg number
6668 and we reject any hard reg whose element in the vector is nonnegative
6669 as well as any that appears in reload_reg_rtx.
6670
6671 If GOAL is zero, then GOALREG is a register number; we look
6672 for an equivalent for that register.
6673
6674 MODE is the machine mode of the value we want an equivalence for.
6675 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6676
6677 This function is used by jump.c as well as in the reload pass.
6678
6679 If GOAL is the sum of the stack pointer and a constant, we treat it
6680 as if it were a constant except that sp is required to be unchanging. */
6681
6682 rtx
6683 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6684 short *reload_reg_p, int goalreg, enum machine_mode mode)
6685 {
6686 rtx_insn *p = insn;
6687 rtx goaltry, valtry, value;
6688 rtx_insn *where;
6689 rtx pat;
6690 int regno = -1;
6691 int valueno;
6692 int goal_mem = 0;
6693 int goal_const = 0;
6694 int goal_mem_addr_varies = 0;
6695 int need_stable_sp = 0;
6696 int nregs;
6697 int valuenregs;
6698 int num = 0;
6699
6700 if (goal == 0)
6701 regno = goalreg;
6702 else if (REG_P (goal))
6703 regno = REGNO (goal);
6704 else if (MEM_P (goal))
6705 {
6706 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6707 if (MEM_VOLATILE_P (goal))
6708 return 0;
6709 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6710 return 0;
6711 /* An address with side effects must be reexecuted. */
6712 switch (code)
6713 {
6714 case POST_INC:
6715 case PRE_INC:
6716 case POST_DEC:
6717 case PRE_DEC:
6718 case POST_MODIFY:
6719 case PRE_MODIFY:
6720 return 0;
6721 default:
6722 break;
6723 }
6724 goal_mem = 1;
6725 }
6726 else if (CONSTANT_P (goal))
6727 goal_const = 1;
6728 else if (GET_CODE (goal) == PLUS
6729 && XEXP (goal, 0) == stack_pointer_rtx
6730 && CONSTANT_P (XEXP (goal, 1)))
6731 goal_const = need_stable_sp = 1;
6732 else if (GET_CODE (goal) == PLUS
6733 && XEXP (goal, 0) == frame_pointer_rtx
6734 && CONSTANT_P (XEXP (goal, 1)))
6735 goal_const = 1;
6736 else
6737 return 0;
6738
6739 num = 0;
6740 /* Scan insns back from INSN, looking for one that copies
6741 a value into or out of GOAL.
6742 Stop and give up if we reach a label. */
6743
6744 while (1)
6745 {
6746 p = PREV_INSN (p);
6747 if (p && DEBUG_INSN_P (p))
6748 continue;
6749 num++;
6750 if (p == 0 || LABEL_P (p)
6751 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6752 return 0;
6753
6754 /* Don't reuse register contents from before a setjmp-type
6755 function call; on the second return (from the longjmp) it
6756 might have been clobbered by a later reuse. It doesn't
6757 seem worthwhile to actually go and see if it is actually
6758 reused even if that information would be readily available;
6759 just don't reuse it across the setjmp call. */
6760 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6761 return 0;
6762
6763 if (NONJUMP_INSN_P (p)
6764 /* If we don't want spill regs ... */
6765 && (! (reload_reg_p != 0
6766 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6767 /* ... then ignore insns introduced by reload; they aren't
6768 useful and can cause results in reload_as_needed to be
6769 different from what they were when calculating the need for
6770 spills. If we notice an input-reload insn here, we will
6771 reject it below, but it might hide a usable equivalent.
6772 That makes bad code. It may even fail: perhaps no reg was
6773 spilled for this insn because it was assumed we would find
6774 that equivalent. */
6775 || INSN_UID (p) < reload_first_uid))
6776 {
6777 rtx tem;
6778 pat = single_set (p);
6779
6780 /* First check for something that sets some reg equal to GOAL. */
6781 if (pat != 0
6782 && ((regno >= 0
6783 && true_regnum (SET_SRC (pat)) == regno
6784 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6785 ||
6786 (regno >= 0
6787 && true_regnum (SET_DEST (pat)) == regno
6788 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6789 ||
6790 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6791 /* When looking for stack pointer + const,
6792 make sure we don't use a stack adjust. */
6793 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6794 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6795 || (goal_mem
6796 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6797 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6798 || (goal_mem
6799 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6800 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6801 /* If we are looking for a constant,
6802 and something equivalent to that constant was copied
6803 into a reg, we can use that reg. */
6804 || (goal_const && REG_NOTES (p) != 0
6805 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6806 && ((rtx_equal_p (XEXP (tem, 0), goal)
6807 && (valueno
6808 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6809 || (REG_P (SET_DEST (pat))
6810 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6811 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6812 && CONST_INT_P (goal)
6813 && 0 != (goaltry
6814 = operand_subword (XEXP (tem, 0), 0, 0,
6815 VOIDmode))
6816 && rtx_equal_p (goal, goaltry)
6817 && (valtry
6818 = operand_subword (SET_DEST (pat), 0, 0,
6819 VOIDmode))
6820 && (valueno = true_regnum (valtry)) >= 0)))
6821 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6822 NULL_RTX))
6823 && REG_P (SET_DEST (pat))
6824 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6825 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6826 && CONST_INT_P (goal)
6827 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6828 VOIDmode))
6829 && rtx_equal_p (goal, goaltry)
6830 && (valtry
6831 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6832 && (valueno = true_regnum (valtry)) >= 0)))
6833 {
6834 if (other >= 0)
6835 {
6836 if (valueno != other)
6837 continue;
6838 }
6839 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6840 continue;
6841 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6842 mode, valueno))
6843 continue;
6844 value = valtry;
6845 where = p;
6846 break;
6847 }
6848 }
6849 }
6850
6851 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6852 (or copying VALUE into GOAL, if GOAL is also a register).
6853 Now verify that VALUE is really valid. */
6854
6855 /* VALUENO is the register number of VALUE; a hard register. */
6856
6857 /* Don't try to re-use something that is killed in this insn. We want
6858 to be able to trust REG_UNUSED notes. */
6859 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6860 return 0;
6861
6862 /* If we propose to get the value from the stack pointer or if GOAL is
6863 a MEM based on the stack pointer, we need a stable SP. */
6864 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6865 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6866 goal)))
6867 need_stable_sp = 1;
6868
6869 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6870 if (GET_MODE (value) != mode)
6871 return 0;
6872
6873 /* Reject VALUE if it was loaded from GOAL
6874 and is also a register that appears in the address of GOAL. */
6875
6876 if (goal_mem && value == SET_DEST (single_set (where))
6877 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6878 goal, (rtx*) 0))
6879 return 0;
6880
6881 /* Reject registers that overlap GOAL. */
6882
6883 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6884 nregs = hard_regno_nregs[regno][mode];
6885 else
6886 nregs = 1;
6887 valuenregs = hard_regno_nregs[valueno][mode];
6888
6889 if (!goal_mem && !goal_const
6890 && regno + nregs > valueno && regno < valueno + valuenregs)
6891 return 0;
6892
6893 /* Reject VALUE if it is one of the regs reserved for reloads.
6894 Reload1 knows how to reuse them anyway, and it would get
6895 confused if we allocated one without its knowledge.
6896 (Now that insns introduced by reload are ignored above,
6897 this case shouldn't happen, but I'm not positive.) */
6898
6899 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6900 {
6901 int i;
6902 for (i = 0; i < valuenregs; ++i)
6903 if (reload_reg_p[valueno + i] >= 0)
6904 return 0;
6905 }
6906
6907 /* Reject VALUE if it is a register being used for an input reload
6908 even if it is not one of those reserved. */
6909
6910 if (reload_reg_p != 0)
6911 {
6912 int i;
6913 for (i = 0; i < n_reloads; i++)
6914 if (rld[i].reg_rtx != 0 && rld[i].in)
6915 {
6916 int regno1 = REGNO (rld[i].reg_rtx);
6917 int nregs1 = hard_regno_nregs[regno1]
6918 [GET_MODE (rld[i].reg_rtx)];
6919 if (regno1 < valueno + valuenregs
6920 && regno1 + nregs1 > valueno)
6921 return 0;
6922 }
6923 }
6924
6925 if (goal_mem)
6926 /* We must treat frame pointer as varying here,
6927 since it can vary--in a nonlocal goto as generated by expand_goto. */
6928 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6929
6930 /* Now verify that the values of GOAL and VALUE remain unaltered
6931 until INSN is reached. */
6932
6933 p = insn;
6934 while (1)
6935 {
6936 p = PREV_INSN (p);
6937 if (p == where)
6938 return value;
6939
6940 /* Don't trust the conversion past a function call
6941 if either of the two is in a call-clobbered register, or memory. */
6942 if (CALL_P (p))
6943 {
6944 int i;
6945
6946 if (goal_mem || need_stable_sp)
6947 return 0;
6948
6949 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6950 for (i = 0; i < nregs; ++i)
6951 if (call_used_regs[regno + i]
6952 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6953 return 0;
6954
6955 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6956 for (i = 0; i < valuenregs; ++i)
6957 if (call_used_regs[valueno + i]
6958 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6959 return 0;
6960 }
6961
6962 if (INSN_P (p))
6963 {
6964 pat = PATTERN (p);
6965
6966 /* Watch out for unspec_volatile, and volatile asms. */
6967 if (volatile_insn_p (pat))
6968 return 0;
6969
6970 /* If this insn P stores in either GOAL or VALUE, return 0.
6971 If GOAL is a memory ref and this insn writes memory, return 0.
6972 If GOAL is a memory ref and its address is not constant,
6973 and this insn P changes a register used in GOAL, return 0. */
6974
6975 if (GET_CODE (pat) == COND_EXEC)
6976 pat = COND_EXEC_CODE (pat);
6977 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6978 {
6979 rtx dest = SET_DEST (pat);
6980 while (GET_CODE (dest) == SUBREG
6981 || GET_CODE (dest) == ZERO_EXTRACT
6982 || GET_CODE (dest) == STRICT_LOW_PART)
6983 dest = XEXP (dest, 0);
6984 if (REG_P (dest))
6985 {
6986 int xregno = REGNO (dest);
6987 int xnregs;
6988 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6989 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6990 else
6991 xnregs = 1;
6992 if (xregno < regno + nregs && xregno + xnregs > regno)
6993 return 0;
6994 if (xregno < valueno + valuenregs
6995 && xregno + xnregs > valueno)
6996 return 0;
6997 if (goal_mem_addr_varies
6998 && reg_overlap_mentioned_for_reload_p (dest, goal))
6999 return 0;
7000 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7001 return 0;
7002 }
7003 else if (goal_mem && MEM_P (dest)
7004 && ! push_operand (dest, GET_MODE (dest)))
7005 return 0;
7006 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7007 && reg_equiv_memory_loc (regno) != 0)
7008 return 0;
7009 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
7010 return 0;
7011 }
7012 else if (GET_CODE (pat) == PARALLEL)
7013 {
7014 int i;
7015 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
7016 {
7017 rtx v1 = XVECEXP (pat, 0, i);
7018 if (GET_CODE (v1) == COND_EXEC)
7019 v1 = COND_EXEC_CODE (v1);
7020 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7021 {
7022 rtx dest = SET_DEST (v1);
7023 while (GET_CODE (dest) == SUBREG
7024 || GET_CODE (dest) == ZERO_EXTRACT
7025 || GET_CODE (dest) == STRICT_LOW_PART)
7026 dest = XEXP (dest, 0);
7027 if (REG_P (dest))
7028 {
7029 int xregno = REGNO (dest);
7030 int xnregs;
7031 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7032 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7033 else
7034 xnregs = 1;
7035 if (xregno < regno + nregs
7036 && xregno + xnregs > regno)
7037 return 0;
7038 if (xregno < valueno + valuenregs
7039 && xregno + xnregs > valueno)
7040 return 0;
7041 if (goal_mem_addr_varies
7042 && reg_overlap_mentioned_for_reload_p (dest,
7043 goal))
7044 return 0;
7045 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7046 return 0;
7047 }
7048 else if (goal_mem && MEM_P (dest)
7049 && ! push_operand (dest, GET_MODE (dest)))
7050 return 0;
7051 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7052 && reg_equiv_memory_loc (regno) != 0)
7053 return 0;
7054 else if (need_stable_sp
7055 && push_operand (dest, GET_MODE (dest)))
7056 return 0;
7057 }
7058 }
7059 }
7060
7061 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7062 {
7063 rtx link;
7064
7065 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7066 link = XEXP (link, 1))
7067 {
7068 pat = XEXP (link, 0);
7069 if (GET_CODE (pat) == CLOBBER)
7070 {
7071 rtx dest = SET_DEST (pat);
7072
7073 if (REG_P (dest))
7074 {
7075 int xregno = REGNO (dest);
7076 int xnregs
7077 = hard_regno_nregs[xregno][GET_MODE (dest)];
7078
7079 if (xregno < regno + nregs
7080 && xregno + xnregs > regno)
7081 return 0;
7082 else if (xregno < valueno + valuenregs
7083 && xregno + xnregs > valueno)
7084 return 0;
7085 else if (goal_mem_addr_varies
7086 && reg_overlap_mentioned_for_reload_p (dest,
7087 goal))
7088 return 0;
7089 }
7090
7091 else if (goal_mem && MEM_P (dest)
7092 && ! push_operand (dest, GET_MODE (dest)))
7093 return 0;
7094 else if (need_stable_sp
7095 && push_operand (dest, GET_MODE (dest)))
7096 return 0;
7097 }
7098 }
7099 }
7100
7101 #ifdef AUTO_INC_DEC
7102 /* If this insn auto-increments or auto-decrements
7103 either regno or valueno, return 0 now.
7104 If GOAL is a memory ref and its address is not constant,
7105 and this insn P increments a register used in GOAL, return 0. */
7106 {
7107 rtx link;
7108
7109 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7110 if (REG_NOTE_KIND (link) == REG_INC
7111 && REG_P (XEXP (link, 0)))
7112 {
7113 int incno = REGNO (XEXP (link, 0));
7114 if (incno < regno + nregs && incno >= regno)
7115 return 0;
7116 if (incno < valueno + valuenregs && incno >= valueno)
7117 return 0;
7118 if (goal_mem_addr_varies
7119 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7120 goal))
7121 return 0;
7122 }
7123 }
7124 #endif
7125 }
7126 }
7127 }
7128 \f
7129 /* Find a place where INCED appears in an increment or decrement operator
7130 within X, and return the amount INCED is incremented or decremented by.
7131 The value is always positive. */
7132
7133 static int
7134 find_inc_amount (rtx x, rtx inced)
7135 {
7136 enum rtx_code code = GET_CODE (x);
7137 const char *fmt;
7138 int i;
7139
7140 if (code == MEM)
7141 {
7142 rtx addr = XEXP (x, 0);
7143 if ((GET_CODE (addr) == PRE_DEC
7144 || GET_CODE (addr) == POST_DEC
7145 || GET_CODE (addr) == PRE_INC
7146 || GET_CODE (addr) == POST_INC)
7147 && XEXP (addr, 0) == inced)
7148 return GET_MODE_SIZE (GET_MODE (x));
7149 else if ((GET_CODE (addr) == PRE_MODIFY
7150 || GET_CODE (addr) == POST_MODIFY)
7151 && GET_CODE (XEXP (addr, 1)) == PLUS
7152 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7153 && XEXP (addr, 0) == inced
7154 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7155 {
7156 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7157 return i < 0 ? -i : i;
7158 }
7159 }
7160
7161 fmt = GET_RTX_FORMAT (code);
7162 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7163 {
7164 if (fmt[i] == 'e')
7165 {
7166 int tem = find_inc_amount (XEXP (x, i), inced);
7167 if (tem != 0)
7168 return tem;
7169 }
7170 if (fmt[i] == 'E')
7171 {
7172 int j;
7173 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7174 {
7175 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7176 if (tem != 0)
7177 return tem;
7178 }
7179 }
7180 }
7181
7182 return 0;
7183 }
7184 \f
7185 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7186 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7187
7188 #ifdef AUTO_INC_DEC
7189 static int
7190 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7191 rtx insn)
7192 {
7193 rtx link;
7194
7195 gcc_assert (insn);
7196
7197 if (! INSN_P (insn))
7198 return 0;
7199
7200 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7201 if (REG_NOTE_KIND (link) == REG_INC)
7202 {
7203 unsigned int test = (int) REGNO (XEXP (link, 0));
7204 if (test >= regno && test < endregno)
7205 return 1;
7206 }
7207 return 0;
7208 }
7209 #else
7210
7211 #define reg_inc_found_and_valid_p(regno,endregno,insn) 0
7212
7213 #endif
7214
7215 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7216 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7217 REG_INC. REGNO must refer to a hard register. */
7218
7219 int
7220 regno_clobbered_p (unsigned int regno, rtx_insn *insn, enum machine_mode mode,
7221 int sets)
7222 {
7223 unsigned int nregs, endregno;
7224
7225 /* regno must be a hard register. */
7226 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7227
7228 nregs = hard_regno_nregs[regno][mode];
7229 endregno = regno + nregs;
7230
7231 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7232 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7233 && REG_P (XEXP (PATTERN (insn), 0)))
7234 {
7235 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7236
7237 return test >= regno && test < endregno;
7238 }
7239
7240 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7241 return 1;
7242
7243 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7244 {
7245 int i = XVECLEN (PATTERN (insn), 0) - 1;
7246
7247 for (; i >= 0; i--)
7248 {
7249 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7250 if ((GET_CODE (elt) == CLOBBER
7251 || (sets == 1 && GET_CODE (elt) == SET))
7252 && REG_P (XEXP (elt, 0)))
7253 {
7254 unsigned int test = REGNO (XEXP (elt, 0));
7255
7256 if (test >= regno && test < endregno)
7257 return 1;
7258 }
7259 if (sets == 2
7260 && reg_inc_found_and_valid_p (regno, endregno, elt))
7261 return 1;
7262 }
7263 }
7264
7265 return 0;
7266 }
7267
7268 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7269 rtx
7270 reload_adjust_reg_for_mode (rtx reloadreg, enum machine_mode mode)
7271 {
7272 int regno;
7273
7274 if (GET_MODE (reloadreg) == mode)
7275 return reloadreg;
7276
7277 regno = REGNO (reloadreg);
7278
7279 if (REG_WORDS_BIG_ENDIAN)
7280 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7281 - (int) hard_regno_nregs[regno][mode];
7282
7283 return gen_rtx_REG (mode, regno);
7284 }
7285
7286 static const char *const reload_when_needed_name[] =
7287 {
7288 "RELOAD_FOR_INPUT",
7289 "RELOAD_FOR_OUTPUT",
7290 "RELOAD_FOR_INSN",
7291 "RELOAD_FOR_INPUT_ADDRESS",
7292 "RELOAD_FOR_INPADDR_ADDRESS",
7293 "RELOAD_FOR_OUTPUT_ADDRESS",
7294 "RELOAD_FOR_OUTADDR_ADDRESS",
7295 "RELOAD_FOR_OPERAND_ADDRESS",
7296 "RELOAD_FOR_OPADDR_ADDR",
7297 "RELOAD_OTHER",
7298 "RELOAD_FOR_OTHER_ADDRESS"
7299 };
7300
7301 /* These functions are used to print the variables set by 'find_reloads' */
7302
7303 DEBUG_FUNCTION void
7304 debug_reload_to_stream (FILE *f)
7305 {
7306 int r;
7307 const char *prefix;
7308
7309 if (! f)
7310 f = stderr;
7311 for (r = 0; r < n_reloads; r++)
7312 {
7313 fprintf (f, "Reload %d: ", r);
7314
7315 if (rld[r].in != 0)
7316 {
7317 fprintf (f, "reload_in (%s) = ",
7318 GET_MODE_NAME (rld[r].inmode));
7319 print_inline_rtx (f, rld[r].in, 24);
7320 fprintf (f, "\n\t");
7321 }
7322
7323 if (rld[r].out != 0)
7324 {
7325 fprintf (f, "reload_out (%s) = ",
7326 GET_MODE_NAME (rld[r].outmode));
7327 print_inline_rtx (f, rld[r].out, 24);
7328 fprintf (f, "\n\t");
7329 }
7330
7331 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7332
7333 fprintf (f, "%s (opnum = %d)",
7334 reload_when_needed_name[(int) rld[r].when_needed],
7335 rld[r].opnum);
7336
7337 if (rld[r].optional)
7338 fprintf (f, ", optional");
7339
7340 if (rld[r].nongroup)
7341 fprintf (f, ", nongroup");
7342
7343 if (rld[r].inc != 0)
7344 fprintf (f, ", inc by %d", rld[r].inc);
7345
7346 if (rld[r].nocombine)
7347 fprintf (f, ", can't combine");
7348
7349 if (rld[r].secondary_p)
7350 fprintf (f, ", secondary_reload_p");
7351
7352 if (rld[r].in_reg != 0)
7353 {
7354 fprintf (f, "\n\treload_in_reg: ");
7355 print_inline_rtx (f, rld[r].in_reg, 24);
7356 }
7357
7358 if (rld[r].out_reg != 0)
7359 {
7360 fprintf (f, "\n\treload_out_reg: ");
7361 print_inline_rtx (f, rld[r].out_reg, 24);
7362 }
7363
7364 if (rld[r].reg_rtx != 0)
7365 {
7366 fprintf (f, "\n\treload_reg_rtx: ");
7367 print_inline_rtx (f, rld[r].reg_rtx, 24);
7368 }
7369
7370 prefix = "\n\t";
7371 if (rld[r].secondary_in_reload != -1)
7372 {
7373 fprintf (f, "%ssecondary_in_reload = %d",
7374 prefix, rld[r].secondary_in_reload);
7375 prefix = ", ";
7376 }
7377
7378 if (rld[r].secondary_out_reload != -1)
7379 fprintf (f, "%ssecondary_out_reload = %d\n",
7380 prefix, rld[r].secondary_out_reload);
7381
7382 prefix = "\n\t";
7383 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7384 {
7385 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7386 insn_data[rld[r].secondary_in_icode].name);
7387 prefix = ", ";
7388 }
7389
7390 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7391 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7392 insn_data[rld[r].secondary_out_icode].name);
7393
7394 fprintf (f, "\n");
7395 }
7396 }
7397
7398 DEBUG_FUNCTION void
7399 debug_reload (void)
7400 {
7401 debug_reload_to_stream (stderr);
7402 }