]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload.c
Apply mechanical replacement (generated patch).
[thirdparty/gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
109 #include "function-abi.h"
110
111 /* True if X is a constant that can be forced into the constant pool.
112 MODE is the mode of the operand, or VOIDmode if not known. */
113 #define CONST_POOL_OK_P(MODE, X) \
114 ((MODE) != VOIDmode \
115 && CONSTANT_P (X) \
116 && GET_CODE (X) != HIGH \
117 && !targetm.cannot_force_const_mem (MODE, X))
118
119 /* True if C is a non-empty register class that has too few registers
120 to be safely used as a reload target class. */
121
122 static inline bool
123 small_register_class_p (reg_class_t rclass)
124 {
125 return (reg_class_size [(int) rclass] == 1
126 || (reg_class_size [(int) rclass] >= 1
127 && targetm.class_likely_spilled_p (rclass)));
128 }
129
130 \f
131 /* All reloads of the current insn are recorded here. See reload.h for
132 comments. */
133 int n_reloads;
134 struct reload rld[MAX_RELOADS];
135
136 /* All the "earlyclobber" operands of the current insn
137 are recorded here. */
138 int n_earlyclobbers;
139 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
140
141 int reload_n_operands;
142
143 /* Replacing reloads.
144
145 If `replace_reloads' is nonzero, then as each reload is recorded
146 an entry is made for it in the table `replacements'.
147 Then later `subst_reloads' can look through that table and
148 perform all the replacements needed. */
149
150 /* Nonzero means record the places to replace. */
151 static int replace_reloads;
152
153 /* Each replacement is recorded with a structure like this. */
154 struct replacement
155 {
156 rtx *where; /* Location to store in */
157 int what; /* which reload this is for */
158 machine_mode mode; /* mode it must have */
159 };
160
161 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
162
163 /* Number of replacements currently recorded. */
164 static int n_replacements;
165
166 /* Used to track what is modified by an operand. */
167 struct decomposition
168 {
169 int reg_flag; /* Nonzero if referencing a register. */
170 int safe; /* Nonzero if this can't conflict with anything. */
171 rtx base; /* Base address for MEM. */
172 poly_int64_pod start; /* Starting offset or register number. */
173 poly_int64_pod end; /* Ending offset or register number. */
174 };
175
176 /* Save MEMs needed to copy from one class of registers to another. One MEM
177 is used per mode, but normally only one or two modes are ever used.
178
179 We keep two versions, before and after register elimination. The one
180 after register elimination is record separately for each operand. This
181 is done in case the address is not valid to be sure that we separately
182 reload each. */
183
184 static rtx secondary_memlocs[NUM_MACHINE_MODES];
185 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
186 static int secondary_memlocs_elim_used = 0;
187
188 /* The instruction we are doing reloads for;
189 so we can test whether a register dies in it. */
190 static rtx_insn *this_insn;
191
192 /* Nonzero if this instruction is a user-specified asm with operands. */
193 static int this_insn_is_asm;
194
195 /* If hard_regs_live_known is nonzero,
196 we can tell which hard regs are currently live,
197 at least enough to succeed in choosing dummy reloads. */
198 static int hard_regs_live_known;
199
200 /* Indexed by hard reg number,
201 element is nonnegative if hard reg has been spilled.
202 This vector is passed to `find_reloads' as an argument
203 and is not changed here. */
204 static short *static_reload_reg_p;
205
206 /* Set to 1 in subst_reg_equivs if it changes anything. */
207 static int subst_reg_equivs_changed;
208
209 /* On return from push_reload, holds the reload-number for the OUT
210 operand, which can be different for that from the input operand. */
211 static int output_reloadnum;
212
213 /* Compare two RTX's. */
214 #define MATCHES(x, y) \
215 (x == y || (x != 0 && (REG_P (x) \
216 ? REG_P (y) && REGNO (x) == REGNO (y) \
217 : rtx_equal_p (x, y) && ! side_effects_p (x))))
218
219 /* Indicates if two reloads purposes are for similar enough things that we
220 can merge their reloads. */
221 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
222 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
223 || ((when1) == (when2) && (op1) == (op2)) \
224 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
225 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
226 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
227 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
228 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
229
230 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
231 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
232 ((when1) != (when2) \
233 || ! ((op1) == (op2) \
234 || (when1) == RELOAD_FOR_INPUT \
235 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
236 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
237
238 /* If we are going to reload an address, compute the reload type to
239 use. */
240 #define ADDR_TYPE(type) \
241 ((type) == RELOAD_FOR_INPUT_ADDRESS \
242 ? RELOAD_FOR_INPADDR_ADDRESS \
243 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
244 ? RELOAD_FOR_OUTADDR_ADDRESS \
245 : (type)))
246
247 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
248 machine_mode, enum reload_type,
249 enum insn_code *, secondary_reload_info *);
250 static enum reg_class find_valid_class (machine_mode, machine_mode,
251 int, unsigned int);
252 static void push_replacement (rtx *, int, machine_mode);
253 static void dup_replacements (rtx *, rtx *);
254 static void combine_reloads (void);
255 static int find_reusable_reload (rtx *, rtx, enum reg_class,
256 enum reload_type, int, int);
257 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
258 machine_mode, reg_class_t, int, int);
259 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
260 static struct decomposition decompose (rtx);
261 static int immune_p (rtx, rtx, struct decomposition);
262 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
263 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
264 rtx_insn *, int *);
265 static rtx make_memloc (rtx, int);
266 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
267 addr_space_t, rtx *);
268 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
269 int, enum reload_type, int, rtx_insn *);
270 static rtx subst_reg_equivs (rtx, rtx_insn *);
271 static rtx subst_indexed_address (rtx);
272 static void update_auto_inc_notes (rtx_insn *, int, int);
273 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
274 enum rtx_code, enum rtx_code, rtx *,
275 int, enum reload_type,int, rtx_insn *);
276 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
277 machine_mode, int,
278 enum reload_type, int);
279 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
280 int, rtx_insn *, int *);
281 static void copy_replacements_1 (rtx *, rtx *, int);
282 static poly_int64 find_inc_amount (rtx, rtx);
283 static int refers_to_mem_for_reload_p (rtx);
284 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
285 rtx, rtx *);
286
287 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
288 list yet. */
289
290 static void
291 push_reg_equiv_alt_mem (int regno, rtx mem)
292 {
293 rtx it;
294
295 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
296 if (rtx_equal_p (XEXP (it, 0), mem))
297 return;
298
299 reg_equiv_alt_mem_list (regno)
300 = alloc_EXPR_LIST (REG_EQUIV, mem,
301 reg_equiv_alt_mem_list (regno));
302 }
303 \f
304 /* Determine if any secondary reloads are needed for loading (if IN_P is
305 nonzero) or storing (if IN_P is zero) X to or from a reload register of
306 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
307 are needed, push them.
308
309 Return the reload number of the secondary reload we made, or -1 if
310 we didn't need one. *PICODE is set to the insn_code to use if we do
311 need a secondary reload. */
312
313 static int
314 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
315 enum reg_class reload_class,
316 machine_mode reload_mode, enum reload_type type,
317 enum insn_code *picode, secondary_reload_info *prev_sri)
318 {
319 enum reg_class rclass = NO_REGS;
320 enum reg_class scratch_class;
321 machine_mode mode = reload_mode;
322 enum insn_code icode = CODE_FOR_nothing;
323 enum insn_code t_icode = CODE_FOR_nothing;
324 enum reload_type secondary_type;
325 int s_reload, t_reload = -1;
326 const char *scratch_constraint;
327 secondary_reload_info sri;
328
329 if (type == RELOAD_FOR_INPUT_ADDRESS
330 || type == RELOAD_FOR_OUTPUT_ADDRESS
331 || type == RELOAD_FOR_INPADDR_ADDRESS
332 || type == RELOAD_FOR_OUTADDR_ADDRESS)
333 secondary_type = type;
334 else
335 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
336
337 *picode = CODE_FOR_nothing;
338
339 /* If X is a paradoxical SUBREG, use the inner value to determine both the
340 mode and object being reloaded. */
341 if (paradoxical_subreg_p (x))
342 {
343 x = SUBREG_REG (x);
344 reload_mode = GET_MODE (x);
345 }
346
347 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
348 is still a pseudo-register by now, it *must* have an equivalent MEM
349 but we don't want to assume that), use that equivalent when seeing if
350 a secondary reload is needed since whether or not a reload is needed
351 might be sensitive to the form of the MEM. */
352
353 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
354 && reg_equiv_mem (REGNO (x)))
355 x = reg_equiv_mem (REGNO (x));
356
357 sri.icode = CODE_FOR_nothing;
358 sri.prev_sri = prev_sri;
359 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
360 reload_mode, &sri);
361 icode = (enum insn_code) sri.icode;
362
363 /* If we don't need any secondary registers, done. */
364 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
365 return -1;
366
367 if (rclass != NO_REGS)
368 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
369 reload_mode, type, &t_icode, &sri);
370
371 /* If we will be using an insn, the secondary reload is for a
372 scratch register. */
373
374 if (icode != CODE_FOR_nothing)
375 {
376 /* If IN_P is nonzero, the reload register will be the output in
377 operand 0. If IN_P is zero, the reload register will be the input
378 in operand 1. Outputs should have an initial "=", which we must
379 skip. */
380
381 /* ??? It would be useful to be able to handle only two, or more than
382 three, operands, but for now we can only handle the case of having
383 exactly three: output, input and one temp/scratch. */
384 gcc_assert (insn_data[(int) icode].n_operands == 3);
385
386 /* ??? We currently have no way to represent a reload that needs
387 an icode to reload from an intermediate tertiary reload register.
388 We should probably have a new field in struct reload to tag a
389 chain of scratch operand reloads onto. */
390 gcc_assert (rclass == NO_REGS);
391
392 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
393 gcc_assert (*scratch_constraint == '=');
394 scratch_constraint++;
395 if (*scratch_constraint == '&')
396 scratch_constraint++;
397 scratch_class = (reg_class_for_constraint
398 (lookup_constraint (scratch_constraint)));
399
400 rclass = scratch_class;
401 mode = insn_data[(int) icode].operand[2].mode;
402 }
403
404 /* This case isn't valid, so fail. Reload is allowed to use the same
405 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
406 in the case of a secondary register, we actually need two different
407 registers for correct code. We fail here to prevent the possibility of
408 silently generating incorrect code later.
409
410 The convention is that secondary input reloads are valid only if the
411 secondary_class is different from class. If you have such a case, you
412 cannot use secondary reloads, you must work around the problem some
413 other way.
414
415 Allow this when a reload_in/out pattern is being used. I.e. assume
416 that the generated code handles this case. */
417
418 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
419 || t_icode != CODE_FOR_nothing);
420
421 /* See if we can reuse an existing secondary reload. */
422 for (s_reload = 0; s_reload < n_reloads; s_reload++)
423 if (rld[s_reload].secondary_p
424 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
425 || reg_class_subset_p (rld[s_reload].rclass, rclass))
426 && ((in_p && rld[s_reload].inmode == mode)
427 || (! in_p && rld[s_reload].outmode == mode))
428 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
429 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
430 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
431 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
432 && (small_register_class_p (rclass)
433 || targetm.small_register_classes_for_mode_p (VOIDmode))
434 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
435 opnum, rld[s_reload].opnum))
436 {
437 if (in_p)
438 rld[s_reload].inmode = mode;
439 if (! in_p)
440 rld[s_reload].outmode = mode;
441
442 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
443 rld[s_reload].rclass = rclass;
444
445 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
446 rld[s_reload].optional &= optional;
447 rld[s_reload].secondary_p = 1;
448 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
449 opnum, rld[s_reload].opnum))
450 rld[s_reload].when_needed = RELOAD_OTHER;
451
452 break;
453 }
454
455 if (s_reload == n_reloads)
456 {
457 /* If we need a memory location to copy between the two reload regs,
458 set it up now. Note that we do the input case before making
459 the reload and the output case after. This is due to the
460 way reloads are output. */
461
462 if (in_p && icode == CODE_FOR_nothing
463 && targetm.secondary_memory_needed (mode, rclass, reload_class))
464 {
465 get_secondary_mem (x, reload_mode, opnum, type);
466
467 /* We may have just added new reloads. Make sure we add
468 the new reload at the end. */
469 s_reload = n_reloads;
470 }
471
472 /* We need to make a new secondary reload for this register class. */
473 rld[s_reload].in = rld[s_reload].out = 0;
474 rld[s_reload].rclass = rclass;
475
476 rld[s_reload].inmode = in_p ? mode : VOIDmode;
477 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
478 rld[s_reload].reg_rtx = 0;
479 rld[s_reload].optional = optional;
480 rld[s_reload].inc = 0;
481 /* Maybe we could combine these, but it seems too tricky. */
482 rld[s_reload].nocombine = 1;
483 rld[s_reload].in_reg = 0;
484 rld[s_reload].out_reg = 0;
485 rld[s_reload].opnum = opnum;
486 rld[s_reload].when_needed = secondary_type;
487 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
488 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
489 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
490 rld[s_reload].secondary_out_icode
491 = ! in_p ? t_icode : CODE_FOR_nothing;
492 rld[s_reload].secondary_p = 1;
493
494 n_reloads++;
495
496 if (! in_p && icode == CODE_FOR_nothing
497 && targetm.secondary_memory_needed (mode, reload_class, rclass))
498 get_secondary_mem (x, mode, opnum, type);
499 }
500
501 *picode = icode;
502 return s_reload;
503 }
504
505 /* If a secondary reload is needed, return its class. If both an intermediate
506 register and a scratch register is needed, we return the class of the
507 intermediate register. */
508 reg_class_t
509 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
510 rtx x)
511 {
512 enum insn_code icode;
513 secondary_reload_info sri;
514
515 sri.icode = CODE_FOR_nothing;
516 sri.prev_sri = NULL;
517 rclass
518 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
519 icode = (enum insn_code) sri.icode;
520
521 /* If there are no secondary reloads at all, we return NO_REGS.
522 If an intermediate register is needed, we return its class. */
523 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
524 return rclass;
525
526 /* No intermediate register is needed, but we have a special reload
527 pattern, which we assume for now needs a scratch register. */
528 return scratch_reload_class (icode);
529 }
530
531 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
532 three operands, verify that operand 2 is an output operand, and return
533 its register class.
534 ??? We'd like to be able to handle any pattern with at least 2 operands,
535 for zero or more scratch registers, but that needs more infrastructure. */
536 enum reg_class
537 scratch_reload_class (enum insn_code icode)
538 {
539 const char *scratch_constraint;
540 enum reg_class rclass;
541
542 gcc_assert (insn_data[(int) icode].n_operands == 3);
543 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
544 gcc_assert (*scratch_constraint == '=');
545 scratch_constraint++;
546 if (*scratch_constraint == '&')
547 scratch_constraint++;
548 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
549 gcc_assert (rclass != NO_REGS);
550 return rclass;
551 }
552 \f
553 /* Return a memory location that will be used to copy X in mode MODE.
554 If we haven't already made a location for this mode in this insn,
555 call find_reloads_address on the location being returned. */
556
557 rtx
558 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
559 int opnum, enum reload_type type)
560 {
561 rtx loc;
562 int mem_valid;
563
564 /* By default, if MODE is narrower than a word, widen it to a word.
565 This is required because most machines that require these memory
566 locations do not support short load and stores from all registers
567 (e.g., FP registers). */
568
569 mode = targetm.secondary_memory_needed_mode (mode);
570
571 /* If we already have made a MEM for this operand in MODE, return it. */
572 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
573 return secondary_memlocs_elim[(int) mode][opnum];
574
575 /* If this is the first time we've tried to get a MEM for this mode,
576 allocate a new one. `something_changed' in reload will get set
577 by noticing that the frame size has changed. */
578
579 if (secondary_memlocs[(int) mode] == 0)
580 {
581 #ifdef SECONDARY_MEMORY_NEEDED_RTX
582 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
583 #else
584 secondary_memlocs[(int) mode]
585 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
586 #endif
587 }
588
589 /* Get a version of the address doing any eliminations needed. If that
590 didn't give us a new MEM, make a new one if it isn't valid. */
591
592 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
593 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
594 MEM_ADDR_SPACE (loc));
595
596 if (! mem_valid && loc == secondary_memlocs[(int) mode])
597 loc = copy_rtx (loc);
598
599 /* The only time the call below will do anything is if the stack
600 offset is too large. In that case IND_LEVELS doesn't matter, so we
601 can just pass a zero. Adjust the type to be the address of the
602 corresponding object. If the address was valid, save the eliminated
603 address. If it wasn't valid, we need to make a reload each time, so
604 don't save it. */
605
606 if (! mem_valid)
607 {
608 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
609 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
610 : RELOAD_OTHER);
611
612 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
613 opnum, type, 0, 0);
614 }
615
616 secondary_memlocs_elim[(int) mode][opnum] = loc;
617 if (secondary_memlocs_elim_used <= (int)mode)
618 secondary_memlocs_elim_used = (int)mode + 1;
619 return loc;
620 }
621
622 /* Clear any secondary memory locations we've made. */
623
624 void
625 clear_secondary_mem (void)
626 {
627 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
628 }
629 \f
630
631 /* Find the largest class which has at least one register valid in
632 mode INNER, and which for every such register, that register number
633 plus N is also valid in OUTER (if in range) and is cheap to move
634 into REGNO. Such a class must exist. */
635
636 static enum reg_class
637 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
638 machine_mode inner ATTRIBUTE_UNUSED, int n,
639 unsigned int dest_regno ATTRIBUTE_UNUSED)
640 {
641 int best_cost = -1;
642 int rclass;
643 int regno;
644 enum reg_class best_class = NO_REGS;
645 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
646 unsigned int best_size = 0;
647 int cost;
648
649 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
650 {
651 int bad = 0;
652 int good = 0;
653 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
654 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
655 {
656 if (targetm.hard_regno_mode_ok (regno, inner))
657 {
658 good = 1;
659 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
660 && !targetm.hard_regno_mode_ok (regno + n, outer))
661 bad = 1;
662 }
663 }
664
665 if (bad || !good)
666 continue;
667 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
668
669 if ((reg_class_size[rclass] > best_size
670 && (best_cost < 0 || best_cost >= cost))
671 || best_cost > cost)
672 {
673 best_class = (enum reg_class) rclass;
674 best_size = reg_class_size[rclass];
675 best_cost = register_move_cost (outer, (enum reg_class) rclass,
676 dest_class);
677 }
678 }
679
680 gcc_assert (best_size != 0);
681
682 return best_class;
683 }
684
685 /* We are trying to reload a subreg of something that is not a register.
686 Find the largest class which contains only registers valid in
687 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
688 which we would eventually like to obtain the object. */
689
690 static enum reg_class
691 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
692 machine_mode mode ATTRIBUTE_UNUSED,
693 enum reg_class dest_class ATTRIBUTE_UNUSED)
694 {
695 int best_cost = -1;
696 int rclass;
697 int regno;
698 enum reg_class best_class = NO_REGS;
699 unsigned int best_size = 0;
700 int cost;
701
702 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
703 {
704 unsigned int computed_rclass_size = 0;
705
706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
707 {
708 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
709 && targetm.hard_regno_mode_ok (regno, mode))
710 computed_rclass_size++;
711 }
712
713 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
714
715 if ((computed_rclass_size > best_size
716 && (best_cost < 0 || best_cost >= cost))
717 || best_cost > cost)
718 {
719 best_class = (enum reg_class) rclass;
720 best_size = computed_rclass_size;
721 best_cost = register_move_cost (outer, (enum reg_class) rclass,
722 dest_class);
723 }
724 }
725
726 gcc_assert (best_size != 0);
727
728 #ifdef LIMIT_RELOAD_CLASS
729 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
730 #endif
731 return best_class;
732 }
733 \f
734 /* Return the number of a previously made reload that can be combined with
735 a new one, or n_reloads if none of the existing reloads can be used.
736 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
737 push_reload, they determine the kind of the new reload that we try to
738 combine. P_IN points to the corresponding value of IN, which can be
739 modified by this function.
740 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
741
742 static int
743 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
744 enum reload_type type, int opnum, int dont_share)
745 {
746 rtx in = *p_in;
747 int i;
748 /* We can't merge two reloads if the output of either one is
749 earlyclobbered. */
750
751 if (earlyclobber_operand_p (out))
752 return n_reloads;
753
754 /* We can use an existing reload if the class is right
755 and at least one of IN and OUT is a match
756 and the other is at worst neutral.
757 (A zero compared against anything is neutral.)
758
759 For targets with small register classes, don't use existing reloads
760 unless they are for the same thing since that can cause us to need
761 more reload registers than we otherwise would. */
762
763 for (i = 0; i < n_reloads; i++)
764 if ((reg_class_subset_p (rclass, rld[i].rclass)
765 || reg_class_subset_p (rld[i].rclass, rclass))
766 /* If the existing reload has a register, it must fit our class. */
767 && (rld[i].reg_rtx == 0
768 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
769 true_regnum (rld[i].reg_rtx)))
770 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
771 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
772 || (out != 0 && MATCHES (rld[i].out, out)
773 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
774 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
775 && (small_register_class_p (rclass)
776 || targetm.small_register_classes_for_mode_p (VOIDmode))
777 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
778 return i;
779
780 /* Reloading a plain reg for input can match a reload to postincrement
781 that reg, since the postincrement's value is the right value.
782 Likewise, it can match a preincrement reload, since we regard
783 the preincrementation as happening before any ref in this insn
784 to that register. */
785 for (i = 0; i < n_reloads; i++)
786 if ((reg_class_subset_p (rclass, rld[i].rclass)
787 || reg_class_subset_p (rld[i].rclass, rclass))
788 /* If the existing reload has a register, it must fit our
789 class. */
790 && (rld[i].reg_rtx == 0
791 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
792 true_regnum (rld[i].reg_rtx)))
793 && out == 0 && rld[i].out == 0 && rld[i].in != 0
794 && ((REG_P (in)
795 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
796 && MATCHES (XEXP (rld[i].in, 0), in))
797 || (REG_P (rld[i].in)
798 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
799 && MATCHES (XEXP (in, 0), rld[i].in)))
800 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
801 && (small_register_class_p (rclass)
802 || targetm.small_register_classes_for_mode_p (VOIDmode))
803 && MERGABLE_RELOADS (type, rld[i].when_needed,
804 opnum, rld[i].opnum))
805 {
806 /* Make sure reload_in ultimately has the increment,
807 not the plain register. */
808 if (REG_P (in))
809 *p_in = rld[i].in;
810 return i;
811 }
812 return n_reloads;
813 }
814
815 /* Return true if:
816
817 (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
818 of a multiword value; and
819
820 (b) the number of *words* in REG does not match the number of *registers*
821 in REG. */
822
823 static bool
824 complex_word_subreg_p (machine_mode outer_mode, rtx reg)
825 {
826 machine_mode inner_mode = GET_MODE (reg);
827 poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
828 return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
829 && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
830 && !known_equal_after_align_up (GET_MODE_SIZE (inner_mode),
831 reg_words, UNITS_PER_WORD));
832 }
833
834 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
835 expression. MODE is the mode that X will be used in. OUTPUT is true if
836 the function is invoked for the output part of an enclosing reload. */
837
838 static bool
839 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
840 {
841 rtx inner;
842
843 /* Only SUBREGs are problematical. */
844 if (GET_CODE (x) != SUBREG)
845 return false;
846
847 inner = SUBREG_REG (x);
848
849 /* If INNER is a constant or PLUS, then INNER will need reloading. */
850 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
851 return true;
852
853 /* If INNER is not a hard register, then INNER will not need reloading. */
854 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
855 return false;
856
857 /* If INNER is not ok for MODE, then INNER will need reloading. */
858 if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
859 return true;
860
861 /* If this is for an output, and the outer part is a word or smaller,
862 INNER is larger than a word and the number of registers in INNER is
863 not the same as the number of words in INNER, then INNER will need
864 reloading (with an in-out reload). */
865 return output && complex_word_subreg_p (mode, inner);
866 }
867
868 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
869 requiring an extra reload register. The caller has already found that
870 IN contains some reference to REGNO, so check that we can produce the
871 new value in a single step. E.g. if we have
872 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
873 instruction that adds one to a register, this should succeed.
874 However, if we have something like
875 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
876 needs to be loaded into a register first, we need a separate reload
877 register.
878 Such PLUS reloads are generated by find_reload_address_part.
879 The out-of-range PLUS expressions are usually introduced in the instruction
880 patterns by register elimination and substituting pseudos without a home
881 by their function-invariant equivalences. */
882 static int
883 can_reload_into (rtx in, int regno, machine_mode mode)
884 {
885 rtx dst;
886 rtx_insn *test_insn;
887 int r = 0;
888 struct recog_data_d save_recog_data;
889
890 /* For matching constraints, we often get notional input reloads where
891 we want to use the original register as the reload register. I.e.
892 technically this is a non-optional input-output reload, but IN is
893 already a valid register, and has been chosen as the reload register.
894 Speed this up, since it trivially works. */
895 if (REG_P (in))
896 return 1;
897
898 /* To test MEMs properly, we'd have to take into account all the reloads
899 that are already scheduled, which can become quite complicated.
900 And since we've already handled address reloads for this MEM, it
901 should always succeed anyway. */
902 if (MEM_P (in))
903 return 1;
904
905 /* If we can make a simple SET insn that does the job, everything should
906 be fine. */
907 dst = gen_rtx_REG (mode, regno);
908 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
909 save_recog_data = recog_data;
910 if (recog_memoized (test_insn) >= 0)
911 {
912 extract_insn (test_insn);
913 r = constrain_operands (1, get_enabled_alternatives (test_insn));
914 }
915 recog_data = save_recog_data;
916 return r;
917 }
918
919 /* Record one reload that needs to be performed.
920 IN is an rtx saying where the data are to be found before this instruction.
921 OUT says where they must be stored after the instruction.
922 (IN is zero for data not read, and OUT is zero for data not written.)
923 INLOC and OUTLOC point to the places in the instructions where
924 IN and OUT were found.
925 If IN and OUT are both nonzero, it means the same register must be used
926 to reload both IN and OUT.
927
928 RCLASS is a register class required for the reloaded data.
929 INMODE is the machine mode that the instruction requires
930 for the reg that replaces IN and OUTMODE is likewise for OUT.
931
932 If IN is zero, then OUT's location and mode should be passed as
933 INLOC and INMODE.
934
935 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
936
937 OPTIONAL nonzero means this reload does not need to be performed:
938 it can be discarded if that is more convenient.
939
940 OPNUM and TYPE say what the purpose of this reload is.
941
942 The return value is the reload-number for this reload.
943
944 If both IN and OUT are nonzero, in some rare cases we might
945 want to make two separate reloads. (Actually we never do this now.)
946 Therefore, the reload-number for OUT is stored in
947 output_reloadnum when we return; the return value applies to IN.
948 Usually (presently always), when IN and OUT are nonzero,
949 the two reload-numbers are equal, but the caller should be careful to
950 distinguish them. */
951
952 int
953 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
954 enum reg_class rclass, machine_mode inmode,
955 machine_mode outmode, int strict_low, int optional,
956 int opnum, enum reload_type type)
957 {
958 int i;
959 int dont_share = 0;
960 int dont_remove_subreg = 0;
961 #ifdef LIMIT_RELOAD_CLASS
962 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
963 #endif
964 int secondary_in_reload = -1, secondary_out_reload = -1;
965 enum insn_code secondary_in_icode = CODE_FOR_nothing;
966 enum insn_code secondary_out_icode = CODE_FOR_nothing;
967 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
968 subreg_in_class = NO_REGS;
969
970 /* INMODE and/or OUTMODE could be VOIDmode if no mode
971 has been specified for the operand. In that case,
972 use the operand's mode as the mode to reload. */
973 if (inmode == VOIDmode && in != 0)
974 inmode = GET_MODE (in);
975 if (outmode == VOIDmode && out != 0)
976 outmode = GET_MODE (out);
977
978 /* If find_reloads and friends until now missed to replace a pseudo
979 with a constant of reg_equiv_constant something went wrong
980 beforehand.
981 Note that it can't simply be done here if we missed it earlier
982 since the constant might need to be pushed into the literal pool
983 and the resulting memref would probably need further
984 reloading. */
985 if (in != 0 && REG_P (in))
986 {
987 int regno = REGNO (in);
988
989 gcc_assert (regno < FIRST_PSEUDO_REGISTER
990 || reg_renumber[regno] >= 0
991 || reg_equiv_constant (regno) == NULL_RTX);
992 }
993
994 /* reg_equiv_constant only contains constants which are obviously
995 not appropriate as destination. So if we would need to replace
996 the destination pseudo with a constant we are in real
997 trouble. */
998 if (out != 0 && REG_P (out))
999 {
1000 int regno = REGNO (out);
1001
1002 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1003 || reg_renumber[regno] >= 0
1004 || reg_equiv_constant (regno) == NULL_RTX);
1005 }
1006
1007 /* If we have a read-write operand with an address side-effect,
1008 change either IN or OUT so the side-effect happens only once. */
1009 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1010 switch (GET_CODE (XEXP (in, 0)))
1011 {
1012 case POST_INC: case POST_DEC: case POST_MODIFY:
1013 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1014 break;
1015
1016 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1017 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1018 break;
1019
1020 default:
1021 break;
1022 }
1023
1024 /* If we are reloading a (SUBREG constant ...), really reload just the
1025 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1026 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1027 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1028 register is a pseudo, also reload the inside expression.
1029 For machines that extend byte loads, do this for any SUBREG of a pseudo
1030 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1031 M2 is an integral mode that gets extended when loaded.
1032 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1033 where either M1 is not valid for R or M2 is wider than a word but we
1034 only need one register to store an M2-sized quantity in R.
1035 (However, if OUT is nonzero, we need to reload the reg *and*
1036 the subreg, so do nothing here, and let following statement handle it.)
1037
1038 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1039 we can't handle it here because CONST_INT does not indicate a mode.
1040
1041 Similarly, we must reload the inside expression if we have a
1042 STRICT_LOW_PART (presumably, in == out in this case).
1043
1044 Also reload the inner expression if it does not require a secondary
1045 reload but the SUBREG does.
1046
1047 Finally, reload the inner expression if it is a register that is in
1048 the class whose registers cannot be referenced in a different size
1049 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1050 cannot reload just the inside since we might end up with the wrong
1051 register class. But if it is inside a STRICT_LOW_PART, we have
1052 no choice, so we hope we do get the right register class there. */
1053
1054 scalar_int_mode inner_mode;
1055 if (in != 0 && GET_CODE (in) == SUBREG
1056 && (subreg_lowpart_p (in) || strict_low)
1057 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1058 inmode, rclass)
1059 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1060 && (CONSTANT_P (SUBREG_REG (in))
1061 || GET_CODE (SUBREG_REG (in)) == PLUS
1062 || strict_low
1063 || (((REG_P (SUBREG_REG (in))
1064 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1065 || MEM_P (SUBREG_REG (in)))
1066 && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1067 || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1068 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1069 &inner_mode)
1070 && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1071 && paradoxical_subreg_p (inmode, inner_mode)
1072 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1073 || (WORD_REGISTER_OPERATIONS
1074 && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1075 && (known_equal_after_align_down
1076 (GET_MODE_SIZE (inmode) - 1,
1077 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1,
1078 UNITS_PER_WORD)))))
1079 || (REG_P (SUBREG_REG (in))
1080 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1081 /* The case where out is nonzero
1082 is handled differently in the following statement. */
1083 && (out == 0 || subreg_lowpart_p (in))
1084 && (complex_word_subreg_p (inmode, SUBREG_REG (in))
1085 || !targetm.hard_regno_mode_ok (subreg_regno (in), inmode)))
1086 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1087 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1088 SUBREG_REG (in))
1089 == NO_REGS))
1090 || (REG_P (SUBREG_REG (in))
1091 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1092 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1093 GET_MODE (SUBREG_REG (in)), inmode))))
1094 {
1095 #ifdef LIMIT_RELOAD_CLASS
1096 in_subreg_loc = inloc;
1097 #endif
1098 inloc = &SUBREG_REG (in);
1099 in = *inloc;
1100
1101 if (!WORD_REGISTER_OPERATIONS
1102 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1103 && MEM_P (in))
1104 /* This is supposed to happen only for paradoxical subregs made by
1105 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1106 gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1107 GET_MODE_SIZE (inmode)));
1108
1109 inmode = GET_MODE (in);
1110 }
1111
1112 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1113 where M1 is not valid for R if it was not handled by the code above.
1114
1115 Similar issue for (SUBREG constant ...) if it was not handled by the
1116 code above. This can happen if SUBREG_BYTE != 0.
1117
1118 However, we must reload the inner reg *as well as* the subreg in
1119 that case. */
1120
1121 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1122 {
1123 if (REG_P (SUBREG_REG (in)))
1124 subreg_in_class
1125 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1126 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1127 GET_MODE (SUBREG_REG (in)),
1128 SUBREG_BYTE (in),
1129 GET_MODE (in)),
1130 REGNO (SUBREG_REG (in)));
1131 else if (CONSTANT_P (SUBREG_REG (in))
1132 || GET_CODE (SUBREG_REG (in)) == PLUS)
1133 subreg_in_class = find_valid_class_1 (inmode,
1134 GET_MODE (SUBREG_REG (in)),
1135 rclass);
1136
1137 /* This relies on the fact that emit_reload_insns outputs the
1138 instructions for input reloads of type RELOAD_OTHER in the same
1139 order as the reloads. Thus if the outer reload is also of type
1140 RELOAD_OTHER, we are guaranteed that this inner reload will be
1141 output before the outer reload. */
1142 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1143 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1144 dont_remove_subreg = 1;
1145 }
1146
1147 /* Similarly for paradoxical and problematical SUBREGs on the output.
1148 Note that there is no reason we need worry about the previous value
1149 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1150 entitled to clobber it all (except in the case of a word mode subreg
1151 or of a STRICT_LOW_PART, in that latter case the constraint should
1152 label it input-output.) */
1153 if (out != 0 && GET_CODE (out) == SUBREG
1154 && (subreg_lowpart_p (out) || strict_low)
1155 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1156 outmode, rclass)
1157 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1158 && (CONSTANT_P (SUBREG_REG (out))
1159 || strict_low
1160 || (((REG_P (SUBREG_REG (out))
1161 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1162 || MEM_P (SUBREG_REG (out)))
1163 && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1164 || (WORD_REGISTER_OPERATIONS
1165 && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1166 && (known_equal_after_align_down
1167 (GET_MODE_SIZE (outmode) - 1,
1168 GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1169 UNITS_PER_WORD)))))
1170 || (REG_P (SUBREG_REG (out))
1171 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1172 /* The case of a word mode subreg
1173 is handled differently in the following statement. */
1174 && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1175 && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1176 UNITS_PER_WORD))
1177 && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1178 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1179 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1180 SUBREG_REG (out))
1181 == NO_REGS))
1182 || (REG_P (SUBREG_REG (out))
1183 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1184 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1185 GET_MODE (SUBREG_REG (out)),
1186 outmode))))
1187 {
1188 #ifdef LIMIT_RELOAD_CLASS
1189 out_subreg_loc = outloc;
1190 #endif
1191 outloc = &SUBREG_REG (out);
1192 out = *outloc;
1193 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1194 || known_le (GET_MODE_SIZE (GET_MODE (out)),
1195 GET_MODE_SIZE (outmode)));
1196 outmode = GET_MODE (out);
1197 }
1198
1199 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1200 where either M1 is not valid for R or M2 is wider than a word but we
1201 only need one register to store an M2-sized quantity in R.
1202
1203 However, we must reload the inner reg *as well as* the subreg in
1204 that case and the inner reg is an in-out reload. */
1205
1206 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1207 {
1208 enum reg_class in_out_class
1209 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1210 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1211 GET_MODE (SUBREG_REG (out)),
1212 SUBREG_BYTE (out),
1213 GET_MODE (out)),
1214 REGNO (SUBREG_REG (out)));
1215
1216 /* This relies on the fact that emit_reload_insns outputs the
1217 instructions for output reloads of type RELOAD_OTHER in reverse
1218 order of the reloads. Thus if the outer reload is also of type
1219 RELOAD_OTHER, we are guaranteed that this inner reload will be
1220 output after the outer reload. */
1221 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1222 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1223 0, 0, opnum, RELOAD_OTHER);
1224 dont_remove_subreg = 1;
1225 }
1226
1227 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1228 if (in != 0 && out != 0 && MEM_P (out)
1229 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1230 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1231 dont_share = 1;
1232
1233 /* If IN is a SUBREG of a hard register, make a new REG. This
1234 simplifies some of the cases below. */
1235
1236 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1237 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1238 && ! dont_remove_subreg)
1239 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1240
1241 /* Similarly for OUT. */
1242 if (out != 0 && GET_CODE (out) == SUBREG
1243 && REG_P (SUBREG_REG (out))
1244 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1245 && ! dont_remove_subreg)
1246 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1247
1248 /* Narrow down the class of register wanted if that is
1249 desirable on this machine for efficiency. */
1250 {
1251 reg_class_t preferred_class = rclass;
1252
1253 if (in != 0)
1254 preferred_class = targetm.preferred_reload_class (in, rclass);
1255
1256 /* Output reloads may need analogous treatment, different in detail. */
1257 if (out != 0)
1258 preferred_class
1259 = targetm.preferred_output_reload_class (out, preferred_class);
1260
1261 /* Discard what the target said if we cannot do it. */
1262 if (preferred_class != NO_REGS
1263 || (optional && type == RELOAD_FOR_OUTPUT))
1264 rclass = (enum reg_class) preferred_class;
1265 }
1266
1267 /* Make sure we use a class that can handle the actual pseudo
1268 inside any subreg. For example, on the 386, QImode regs
1269 can appear within SImode subregs. Although GENERAL_REGS
1270 can handle SImode, QImode needs a smaller class. */
1271 #ifdef LIMIT_RELOAD_CLASS
1272 if (in_subreg_loc)
1273 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1274 else if (in != 0 && GET_CODE (in) == SUBREG)
1275 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1276
1277 if (out_subreg_loc)
1278 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1279 if (out != 0 && GET_CODE (out) == SUBREG)
1280 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1281 #endif
1282
1283 /* Verify that this class is at least possible for the mode that
1284 is specified. */
1285 if (this_insn_is_asm)
1286 {
1287 machine_mode mode;
1288 if (paradoxical_subreg_p (inmode, outmode))
1289 mode = inmode;
1290 else
1291 mode = outmode;
1292 if (mode == VOIDmode)
1293 {
1294 error_for_asm (this_insn, "cannot reload integer constant "
1295 "operand in %<asm%>");
1296 mode = word_mode;
1297 if (in != 0)
1298 inmode = word_mode;
1299 if (out != 0)
1300 outmode = word_mode;
1301 }
1302 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1303 if (targetm.hard_regno_mode_ok (i, mode)
1304 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1305 break;
1306 if (i == FIRST_PSEUDO_REGISTER)
1307 {
1308 error_for_asm (this_insn, "impossible register constraint "
1309 "in %<asm%>");
1310 /* Avoid further trouble with this insn. */
1311 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1312 /* We used to continue here setting class to ALL_REGS, but it triggers
1313 sanity check on i386 for:
1314 void foo(long double d)
1315 {
1316 asm("" :: "a" (d));
1317 }
1318 Returning zero here ought to be safe as we take care in
1319 find_reloads to not process the reloads when instruction was
1320 replaced by USE. */
1321
1322 return 0;
1323 }
1324 }
1325
1326 /* Optional output reloads are always OK even if we have no register class,
1327 since the function of these reloads is only to have spill_reg_store etc.
1328 set, so that the storing insn can be deleted later. */
1329 gcc_assert (rclass != NO_REGS
1330 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1331
1332 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1333
1334 if (i == n_reloads)
1335 {
1336 /* See if we need a secondary reload register to move between CLASS
1337 and IN or CLASS and OUT. Get the icode and push any required reloads
1338 needed for each of them if so. */
1339
1340 if (in != 0)
1341 secondary_in_reload
1342 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1343 &secondary_in_icode, NULL);
1344 if (out != 0 && GET_CODE (out) != SCRATCH)
1345 secondary_out_reload
1346 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1347 type, &secondary_out_icode, NULL);
1348
1349 /* We found no existing reload suitable for re-use.
1350 So add an additional reload. */
1351
1352 if (subreg_in_class == NO_REGS
1353 && in != 0
1354 && (REG_P (in)
1355 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1356 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1357 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1358 /* If a memory location is needed for the copy, make one. */
1359 if (subreg_in_class != NO_REGS
1360 && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1361 get_secondary_mem (in, inmode, opnum, type);
1362
1363 i = n_reloads;
1364 rld[i].in = in;
1365 rld[i].out = out;
1366 rld[i].rclass = rclass;
1367 rld[i].inmode = inmode;
1368 rld[i].outmode = outmode;
1369 rld[i].reg_rtx = 0;
1370 rld[i].optional = optional;
1371 rld[i].inc = 0;
1372 rld[i].nocombine = 0;
1373 rld[i].in_reg = inloc ? *inloc : 0;
1374 rld[i].out_reg = outloc ? *outloc : 0;
1375 rld[i].opnum = opnum;
1376 rld[i].when_needed = type;
1377 rld[i].secondary_in_reload = secondary_in_reload;
1378 rld[i].secondary_out_reload = secondary_out_reload;
1379 rld[i].secondary_in_icode = secondary_in_icode;
1380 rld[i].secondary_out_icode = secondary_out_icode;
1381 rld[i].secondary_p = 0;
1382
1383 n_reloads++;
1384
1385 if (out != 0
1386 && (REG_P (out)
1387 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1388 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1389 && (targetm.secondary_memory_needed
1390 (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1391 get_secondary_mem (out, outmode, opnum, type);
1392 }
1393 else
1394 {
1395 /* We are reusing an existing reload,
1396 but we may have additional information for it.
1397 For example, we may now have both IN and OUT
1398 while the old one may have just one of them. */
1399
1400 /* The modes can be different. If they are, we want to reload in
1401 the larger mode, so that the value is valid for both modes. */
1402 if (inmode != VOIDmode
1403 && partial_subreg_p (rld[i].inmode, inmode))
1404 rld[i].inmode = inmode;
1405 if (outmode != VOIDmode
1406 && partial_subreg_p (rld[i].outmode, outmode))
1407 rld[i].outmode = outmode;
1408 if (in != 0)
1409 {
1410 rtx in_reg = inloc ? *inloc : 0;
1411 /* If we merge reloads for two distinct rtl expressions that
1412 are identical in content, there might be duplicate address
1413 reloads. Remove the extra set now, so that if we later find
1414 that we can inherit this reload, we can get rid of the
1415 address reloads altogether.
1416
1417 Do not do this if both reloads are optional since the result
1418 would be an optional reload which could potentially leave
1419 unresolved address replacements.
1420
1421 It is not sufficient to call transfer_replacements since
1422 choose_reload_regs will remove the replacements for address
1423 reloads of inherited reloads which results in the same
1424 problem. */
1425 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1426 && ! (rld[i].optional && optional))
1427 {
1428 /* We must keep the address reload with the lower operand
1429 number alive. */
1430 if (opnum > rld[i].opnum)
1431 {
1432 remove_address_replacements (in);
1433 in = rld[i].in;
1434 in_reg = rld[i].in_reg;
1435 }
1436 else
1437 remove_address_replacements (rld[i].in);
1438 }
1439 /* When emitting reloads we don't necessarily look at the in-
1440 and outmode, but also directly at the operands (in and out).
1441 So we can't simply overwrite them with whatever we have found
1442 for this (to-be-merged) reload, we have to "merge" that too.
1443 Reusing another reload already verified that we deal with the
1444 same operands, just possibly in different modes. So we
1445 overwrite the operands only when the new mode is larger.
1446 See also PR33613. */
1447 if (!rld[i].in
1448 || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1449 rld[i].in = in;
1450 if (!rld[i].in_reg
1451 || (in_reg
1452 && partial_subreg_p (GET_MODE (rld[i].in_reg),
1453 GET_MODE (in_reg))))
1454 rld[i].in_reg = in_reg;
1455 }
1456 if (out != 0)
1457 {
1458 if (!rld[i].out
1459 || (out
1460 && partial_subreg_p (GET_MODE (rld[i].out),
1461 GET_MODE (out))))
1462 rld[i].out = out;
1463 if (outloc
1464 && (!rld[i].out_reg
1465 || partial_subreg_p (GET_MODE (rld[i].out_reg),
1466 GET_MODE (*outloc))))
1467 rld[i].out_reg = *outloc;
1468 }
1469 if (reg_class_subset_p (rclass, rld[i].rclass))
1470 rld[i].rclass = rclass;
1471 rld[i].optional &= optional;
1472 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1473 opnum, rld[i].opnum))
1474 rld[i].when_needed = RELOAD_OTHER;
1475 rld[i].opnum = MIN (rld[i].opnum, opnum);
1476 }
1477
1478 /* If the ostensible rtx being reloaded differs from the rtx found
1479 in the location to substitute, this reload is not safe to combine
1480 because we cannot reliably tell whether it appears in the insn. */
1481
1482 if (in != 0 && in != *inloc)
1483 rld[i].nocombine = 1;
1484
1485 #if 0
1486 /* This was replaced by changes in find_reloads_address_1 and the new
1487 function inc_for_reload, which go with a new meaning of reload_inc. */
1488
1489 /* If this is an IN/OUT reload in an insn that sets the CC,
1490 it must be for an autoincrement. It doesn't work to store
1491 the incremented value after the insn because that would clobber the CC.
1492 So we must do the increment of the value reloaded from,
1493 increment it, store it back, then decrement again. */
1494 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1495 {
1496 out = 0;
1497 rld[i].out = 0;
1498 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1499 /* If we did not find a nonzero amount-to-increment-by,
1500 that contradicts the belief that IN is being incremented
1501 in an address in this insn. */
1502 gcc_assert (rld[i].inc != 0);
1503 }
1504 #endif
1505
1506 /* If we will replace IN and OUT with the reload-reg,
1507 record where they are located so that substitution need
1508 not do a tree walk. */
1509
1510 if (replace_reloads)
1511 {
1512 if (inloc != 0)
1513 {
1514 struct replacement *r = &replacements[n_replacements++];
1515 r->what = i;
1516 r->where = inloc;
1517 r->mode = inmode;
1518 }
1519 if (outloc != 0 && outloc != inloc)
1520 {
1521 struct replacement *r = &replacements[n_replacements++];
1522 r->what = i;
1523 r->where = outloc;
1524 r->mode = outmode;
1525 }
1526 }
1527
1528 /* If this reload is just being introduced and it has both
1529 an incoming quantity and an outgoing quantity that are
1530 supposed to be made to match, see if either one of the two
1531 can serve as the place to reload into.
1532
1533 If one of them is acceptable, set rld[i].reg_rtx
1534 to that one. */
1535
1536 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1537 {
1538 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1539 inmode, outmode,
1540 rld[i].rclass, i,
1541 earlyclobber_operand_p (out));
1542
1543 /* If the outgoing register already contains the same value
1544 as the incoming one, we can dispense with loading it.
1545 The easiest way to tell the caller that is to give a phony
1546 value for the incoming operand (same as outgoing one). */
1547 if (rld[i].reg_rtx == out
1548 && (REG_P (in) || CONSTANT_P (in))
1549 && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1550 static_reload_reg_p, i, inmode) != 0)
1551 rld[i].in = out;
1552 }
1553
1554 /* If this is an input reload and the operand contains a register that
1555 dies in this insn and is used nowhere else, see if it is the right class
1556 to be used for this reload. Use it if so. (This occurs most commonly
1557 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1558 this if it is also an output reload that mentions the register unless
1559 the output is a SUBREG that clobbers an entire register.
1560
1561 Note that the operand might be one of the spill regs, if it is a
1562 pseudo reg and we are in a block where spilling has not taken place.
1563 But if there is no spilling in this block, that is OK.
1564 An explicitly used hard reg cannot be a spill reg. */
1565
1566 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1567 {
1568 rtx note;
1569 int regno;
1570 machine_mode rel_mode = inmode;
1571
1572 if (out && partial_subreg_p (rel_mode, outmode))
1573 rel_mode = outmode;
1574
1575 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1576 if (REG_NOTE_KIND (note) == REG_DEAD
1577 && REG_P (XEXP (note, 0))
1578 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1579 && reg_mentioned_p (XEXP (note, 0), in)
1580 /* Check that a former pseudo is valid; see find_dummy_reload. */
1581 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1582 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1583 ORIGINAL_REGNO (XEXP (note, 0)))
1584 && REG_NREGS (XEXP (note, 0)) == 1))
1585 && ! refers_to_regno_for_reload_p (regno,
1586 end_hard_regno (rel_mode,
1587 regno),
1588 PATTERN (this_insn), inloc)
1589 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1590 /* If this is also an output reload, IN cannot be used as
1591 the reload register if it is set in this insn unless IN
1592 is also OUT. */
1593 && (out == 0 || in == out
1594 || ! hard_reg_set_here_p (regno,
1595 end_hard_regno (rel_mode, regno),
1596 PATTERN (this_insn)))
1597 /* ??? Why is this code so different from the previous?
1598 Is there any simple coherent way to describe the two together?
1599 What's going on here. */
1600 && (in != out
1601 || (GET_CODE (in) == SUBREG
1602 && (known_equal_after_align_up
1603 (GET_MODE_SIZE (GET_MODE (in)),
1604 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1605 UNITS_PER_WORD))))
1606 /* Make sure the operand fits in the reg that dies. */
1607 && known_le (GET_MODE_SIZE (rel_mode),
1608 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1609 && targetm.hard_regno_mode_ok (regno, inmode)
1610 && targetm.hard_regno_mode_ok (regno, outmode))
1611 {
1612 unsigned int offs;
1613 unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1614 hard_regno_nregs (regno, outmode));
1615
1616 for (offs = 0; offs < nregs; offs++)
1617 if (fixed_regs[regno + offs]
1618 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1619 regno + offs))
1620 break;
1621
1622 if (offs == nregs
1623 && (! (refers_to_regno_for_reload_p
1624 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1625 || can_reload_into (in, regno, inmode)))
1626 {
1627 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1628 break;
1629 }
1630 }
1631 }
1632
1633 if (out)
1634 output_reloadnum = i;
1635
1636 return i;
1637 }
1638
1639 /* Record an additional place we must replace a value
1640 for which we have already recorded a reload.
1641 RELOADNUM is the value returned by push_reload
1642 when the reload was recorded.
1643 This is used in insn patterns that use match_dup. */
1644
1645 static void
1646 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1647 {
1648 if (replace_reloads)
1649 {
1650 struct replacement *r = &replacements[n_replacements++];
1651 r->what = reloadnum;
1652 r->where = loc;
1653 r->mode = mode;
1654 }
1655 }
1656
1657 /* Duplicate any replacement we have recorded to apply at
1658 location ORIG_LOC to also be performed at DUP_LOC.
1659 This is used in insn patterns that use match_dup. */
1660
1661 static void
1662 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1663 {
1664 int i, n = n_replacements;
1665
1666 for (i = 0; i < n; i++)
1667 {
1668 struct replacement *r = &replacements[i];
1669 if (r->where == orig_loc)
1670 push_replacement (dup_loc, r->what, r->mode);
1671 }
1672 }
1673 \f
1674 /* Transfer all replacements that used to be in reload FROM to be in
1675 reload TO. */
1676
1677 void
1678 transfer_replacements (int to, int from)
1679 {
1680 int i;
1681
1682 for (i = 0; i < n_replacements; i++)
1683 if (replacements[i].what == from)
1684 replacements[i].what = to;
1685 }
1686 \f
1687 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1688 or a subpart of it. If we have any replacements registered for IN_RTX,
1689 cancel the reloads that were supposed to load them.
1690 Return nonzero if we canceled any reloads. */
1691 int
1692 remove_address_replacements (rtx in_rtx)
1693 {
1694 int i, j;
1695 char reload_flags[MAX_RELOADS];
1696 int something_changed = 0;
1697
1698 memset (reload_flags, 0, sizeof reload_flags);
1699 for (i = 0, j = 0; i < n_replacements; i++)
1700 {
1701 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1702 reload_flags[replacements[i].what] |= 1;
1703 else
1704 {
1705 replacements[j++] = replacements[i];
1706 reload_flags[replacements[i].what] |= 2;
1707 }
1708 }
1709 /* Note that the following store must be done before the recursive calls. */
1710 n_replacements = j;
1711
1712 for (i = n_reloads - 1; i >= 0; i--)
1713 {
1714 if (reload_flags[i] == 1)
1715 {
1716 deallocate_reload_reg (i);
1717 remove_address_replacements (rld[i].in);
1718 rld[i].in = 0;
1719 something_changed = 1;
1720 }
1721 }
1722 return something_changed;
1723 }
1724 \f
1725 /* If there is only one output reload, and it is not for an earlyclobber
1726 operand, try to combine it with a (logically unrelated) input reload
1727 to reduce the number of reload registers needed.
1728
1729 This is safe if the input reload does not appear in
1730 the value being output-reloaded, because this implies
1731 it is not needed any more once the original insn completes.
1732
1733 If that doesn't work, see we can use any of the registers that
1734 die in this insn as a reload register. We can if it is of the right
1735 class and does not appear in the value being output-reloaded. */
1736
1737 static void
1738 combine_reloads (void)
1739 {
1740 int i, regno;
1741 int output_reload = -1;
1742 int secondary_out = -1;
1743 rtx note;
1744
1745 /* Find the output reload; return unless there is exactly one
1746 and that one is mandatory. */
1747
1748 for (i = 0; i < n_reloads; i++)
1749 if (rld[i].out != 0)
1750 {
1751 if (output_reload >= 0)
1752 return;
1753 output_reload = i;
1754 }
1755
1756 if (output_reload < 0 || rld[output_reload].optional)
1757 return;
1758
1759 /* An input-output reload isn't combinable. */
1760
1761 if (rld[output_reload].in != 0)
1762 return;
1763
1764 /* If this reload is for an earlyclobber operand, we can't do anything. */
1765 if (earlyclobber_operand_p (rld[output_reload].out))
1766 return;
1767
1768 /* If there is a reload for part of the address of this operand, we would
1769 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1770 its life to the point where doing this combine would not lower the
1771 number of spill registers needed. */
1772 for (i = 0; i < n_reloads; i++)
1773 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1774 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1775 && rld[i].opnum == rld[output_reload].opnum)
1776 return;
1777
1778 /* Check each input reload; can we combine it? */
1779
1780 for (i = 0; i < n_reloads; i++)
1781 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1782 /* Life span of this reload must not extend past main insn. */
1783 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1784 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1785 && rld[i].when_needed != RELOAD_OTHER
1786 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1787 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1788 [(int) rld[output_reload].outmode])
1789 && known_eq (rld[i].inc, 0)
1790 && rld[i].reg_rtx == 0
1791 /* Don't combine two reloads with different secondary
1792 memory locations. */
1793 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1794 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1795 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1796 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1797 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1798 ? (rld[i].rclass == rld[output_reload].rclass)
1799 : (reg_class_subset_p (rld[i].rclass,
1800 rld[output_reload].rclass)
1801 || reg_class_subset_p (rld[output_reload].rclass,
1802 rld[i].rclass)))
1803 && (MATCHES (rld[i].in, rld[output_reload].out)
1804 /* Args reversed because the first arg seems to be
1805 the one that we imagine being modified
1806 while the second is the one that might be affected. */
1807 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1808 rld[i].in)
1809 /* However, if the input is a register that appears inside
1810 the output, then we also can't share.
1811 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1812 If the same reload reg is used for both reg 69 and the
1813 result to be stored in memory, then that result
1814 will clobber the address of the memory ref. */
1815 && ! (REG_P (rld[i].in)
1816 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1817 rld[output_reload].out))))
1818 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1819 rld[i].when_needed != RELOAD_FOR_INPUT)
1820 && (reg_class_size[(int) rld[i].rclass]
1821 || targetm.small_register_classes_for_mode_p (VOIDmode))
1822 /* We will allow making things slightly worse by combining an
1823 input and an output, but no worse than that. */
1824 && (rld[i].when_needed == RELOAD_FOR_INPUT
1825 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1826 {
1827 int j;
1828
1829 /* We have found a reload to combine with! */
1830 rld[i].out = rld[output_reload].out;
1831 rld[i].out_reg = rld[output_reload].out_reg;
1832 rld[i].outmode = rld[output_reload].outmode;
1833 /* Mark the old output reload as inoperative. */
1834 rld[output_reload].out = 0;
1835 /* The combined reload is needed for the entire insn. */
1836 rld[i].when_needed = RELOAD_OTHER;
1837 /* If the output reload had a secondary reload, copy it. */
1838 if (rld[output_reload].secondary_out_reload != -1)
1839 {
1840 rld[i].secondary_out_reload
1841 = rld[output_reload].secondary_out_reload;
1842 rld[i].secondary_out_icode
1843 = rld[output_reload].secondary_out_icode;
1844 }
1845
1846 /* Copy any secondary MEM. */
1847 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1848 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1849 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1850 /* If required, minimize the register class. */
1851 if (reg_class_subset_p (rld[output_reload].rclass,
1852 rld[i].rclass))
1853 rld[i].rclass = rld[output_reload].rclass;
1854
1855 /* Transfer all replacements from the old reload to the combined. */
1856 for (j = 0; j < n_replacements; j++)
1857 if (replacements[j].what == output_reload)
1858 replacements[j].what = i;
1859
1860 return;
1861 }
1862
1863 /* If this insn has only one operand that is modified or written (assumed
1864 to be the first), it must be the one corresponding to this reload. It
1865 is safe to use anything that dies in this insn for that output provided
1866 that it does not occur in the output (we already know it isn't an
1867 earlyclobber. If this is an asm insn, give up. */
1868
1869 if (INSN_CODE (this_insn) == -1)
1870 return;
1871
1872 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1873 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1874 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1875 return;
1876
1877 /* See if some hard register that dies in this insn and is not used in
1878 the output is the right class. Only works if the register we pick
1879 up can fully hold our output reload. */
1880 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1881 if (REG_NOTE_KIND (note) == REG_DEAD
1882 && REG_P (XEXP (note, 0))
1883 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1884 rld[output_reload].out)
1885 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1886 && targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1887 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1888 regno)
1889 && (hard_regno_nregs (regno, rld[output_reload].outmode)
1890 <= REG_NREGS (XEXP (note, 0)))
1891 /* Ensure that a secondary or tertiary reload for this output
1892 won't want this register. */
1893 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1894 || (!(TEST_HARD_REG_BIT
1895 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1896 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1897 || !(TEST_HARD_REG_BIT
1898 (reg_class_contents[(int) rld[secondary_out].rclass],
1899 regno)))))
1900 && !fixed_regs[regno]
1901 /* Check that a former pseudo is valid; see find_dummy_reload. */
1902 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1903 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1904 ORIGINAL_REGNO (XEXP (note, 0)))
1905 && REG_NREGS (XEXP (note, 0)) == 1)))
1906 {
1907 rld[output_reload].reg_rtx
1908 = gen_rtx_REG (rld[output_reload].outmode, regno);
1909 return;
1910 }
1911 }
1912 \f
1913 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1914 See if one of IN and OUT is a register that may be used;
1915 this is desirable since a spill-register won't be needed.
1916 If so, return the register rtx that proves acceptable.
1917
1918 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1919 RCLASS is the register class required for the reload.
1920
1921 If FOR_REAL is >= 0, it is the number of the reload,
1922 and in some cases when it can be discovered that OUT doesn't need
1923 to be computed, clear out rld[FOR_REAL].out.
1924
1925 If FOR_REAL is -1, this should not be done, because this call
1926 is just to see if a register can be found, not to find and install it.
1927
1928 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1929 puts an additional constraint on being able to use IN for OUT since
1930 IN must not appear elsewhere in the insn (it is assumed that IN itself
1931 is safe from the earlyclobber). */
1932
1933 static rtx
1934 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1935 machine_mode inmode, machine_mode outmode,
1936 reg_class_t rclass, int for_real, int earlyclobber)
1937 {
1938 rtx in = real_in;
1939 rtx out = real_out;
1940 int in_offset = 0;
1941 int out_offset = 0;
1942 rtx value = 0;
1943
1944 /* If operands exceed a word, we can't use either of them
1945 unless they have the same size. */
1946 if (maybe_ne (GET_MODE_SIZE (outmode), GET_MODE_SIZE (inmode))
1947 && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1948 || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1949 return 0;
1950
1951 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1952 respectively refers to a hard register. */
1953
1954 /* Find the inside of any subregs. */
1955 while (GET_CODE (out) == SUBREG)
1956 {
1957 if (REG_P (SUBREG_REG (out))
1958 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1959 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1960 GET_MODE (SUBREG_REG (out)),
1961 SUBREG_BYTE (out),
1962 GET_MODE (out));
1963 out = SUBREG_REG (out);
1964 }
1965 while (GET_CODE (in) == SUBREG)
1966 {
1967 if (REG_P (SUBREG_REG (in))
1968 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1969 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1970 GET_MODE (SUBREG_REG (in)),
1971 SUBREG_BYTE (in),
1972 GET_MODE (in));
1973 in = SUBREG_REG (in);
1974 }
1975
1976 /* Narrow down the reg class, the same way push_reload will;
1977 otherwise we might find a dummy now, but push_reload won't. */
1978 {
1979 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1980 if (preferred_class != NO_REGS)
1981 rclass = (enum reg_class) preferred_class;
1982 }
1983
1984 /* See if OUT will do. */
1985 if (REG_P (out)
1986 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1987 {
1988 unsigned int regno = REGNO (out) + out_offset;
1989 unsigned int nwords = hard_regno_nregs (regno, outmode);
1990 rtx saved_rtx;
1991
1992 /* When we consider whether the insn uses OUT,
1993 ignore references within IN. They don't prevent us
1994 from copying IN into OUT, because those refs would
1995 move into the insn that reloads IN.
1996
1997 However, we only ignore IN in its role as this reload.
1998 If the insn uses IN elsewhere and it contains OUT,
1999 that counts. We can't be sure it's the "same" operand
2000 so it might not go through this reload.
2001
2002 We also need to avoid using OUT if it, or part of it, is a
2003 fixed register. Modifying such registers, even transiently,
2004 may have undefined effects on the machine, such as modifying
2005 the stack pointer. */
2006 saved_rtx = *inloc;
2007 *inloc = const0_rtx;
2008
2009 if (regno < FIRST_PSEUDO_REGISTER
2010 && targetm.hard_regno_mode_ok (regno, outmode)
2011 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2012 PATTERN (this_insn), outloc))
2013 {
2014 unsigned int i;
2015
2016 for (i = 0; i < nwords; i++)
2017 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2018 regno + i)
2019 || fixed_regs[regno + i])
2020 break;
2021
2022 if (i == nwords)
2023 {
2024 if (REG_P (real_out))
2025 value = real_out;
2026 else
2027 value = gen_rtx_REG (outmode, regno);
2028 }
2029 }
2030
2031 *inloc = saved_rtx;
2032 }
2033
2034 /* Consider using IN if OUT was not acceptable
2035 or if OUT dies in this insn (like the quotient in a divmod insn).
2036 We can't use IN unless it is dies in this insn,
2037 which means we must know accurately which hard regs are live.
2038 Also, the result can't go in IN if IN is used within OUT,
2039 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2040 if (hard_regs_live_known
2041 && REG_P (in)
2042 && REGNO (in) < FIRST_PSEUDO_REGISTER
2043 && (value == 0
2044 || find_reg_note (this_insn, REG_UNUSED, real_out))
2045 && find_reg_note (this_insn, REG_DEAD, real_in)
2046 && !fixed_regs[REGNO (in)]
2047 && targetm.hard_regno_mode_ok (REGNO (in),
2048 /* The only case where out and real_out
2049 might have different modes is where
2050 real_out is a subreg, and in that
2051 case, out has a real mode. */
2052 (GET_MODE (out) != VOIDmode
2053 ? GET_MODE (out) : outmode))
2054 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2055 /* However only do this if we can be sure that this input
2056 operand doesn't correspond with an uninitialized pseudo.
2057 global can assign some hardreg to it that is the same as
2058 the one assigned to a different, also live pseudo (as it
2059 can ignore the conflict). We must never introduce writes
2060 to such hardregs, as they would clobber the other live
2061 pseudo. See PR 20973. */
2062 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2063 ORIGINAL_REGNO (in))
2064 /* Similarly, only do this if we can be sure that the death
2065 note is still valid. global can assign some hardreg to
2066 the pseudo referenced in the note and simultaneously a
2067 subword of this hardreg to a different, also live pseudo,
2068 because only another subword of the hardreg is actually
2069 used in the insn. This cannot happen if the pseudo has
2070 been assigned exactly one hardreg. See PR 33732. */
2071 && REG_NREGS (in) == 1)))
2072 {
2073 unsigned int regno = REGNO (in) + in_offset;
2074 unsigned int nwords = hard_regno_nregs (regno, inmode);
2075
2076 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2077 && ! hard_reg_set_here_p (regno, regno + nwords,
2078 PATTERN (this_insn))
2079 && (! earlyclobber
2080 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2081 PATTERN (this_insn), inloc)))
2082 {
2083 unsigned int i;
2084
2085 for (i = 0; i < nwords; i++)
2086 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2087 regno + i))
2088 break;
2089
2090 if (i == nwords)
2091 {
2092 /* If we were going to use OUT as the reload reg
2093 and changed our mind, it means OUT is a dummy that
2094 dies here. So don't bother copying value to it. */
2095 if (for_real >= 0 && value == real_out)
2096 rld[for_real].out = 0;
2097 if (REG_P (real_in))
2098 value = real_in;
2099 else
2100 value = gen_rtx_REG (inmode, regno);
2101 }
2102 }
2103 }
2104
2105 return value;
2106 }
2107 \f
2108 /* This page contains subroutines used mainly for determining
2109 whether the IN or an OUT of a reload can serve as the
2110 reload register. */
2111
2112 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2113
2114 int
2115 earlyclobber_operand_p (rtx x)
2116 {
2117 int i;
2118
2119 for (i = 0; i < n_earlyclobbers; i++)
2120 if (reload_earlyclobbers[i] == x)
2121 return 1;
2122
2123 return 0;
2124 }
2125
2126 /* Return 1 if expression X alters a hard reg in the range
2127 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2128 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2129 X should be the body of an instruction. */
2130
2131 static int
2132 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2133 {
2134 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2135 {
2136 rtx op0 = SET_DEST (x);
2137
2138 while (GET_CODE (op0) == SUBREG)
2139 op0 = SUBREG_REG (op0);
2140 if (REG_P (op0))
2141 {
2142 unsigned int r = REGNO (op0);
2143
2144 /* See if this reg overlaps range under consideration. */
2145 if (r < end_regno
2146 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2147 return 1;
2148 }
2149 }
2150 else if (GET_CODE (x) == PARALLEL)
2151 {
2152 int i = XVECLEN (x, 0) - 1;
2153
2154 for (; i >= 0; i--)
2155 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2156 return 1;
2157 }
2158
2159 return 0;
2160 }
2161
2162 /* Return 1 if ADDR is a valid memory address for mode MODE
2163 in address space AS, and check that each pseudo reg has the
2164 proper kind of hard reg. */
2165
2166 int
2167 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2168 rtx addr, addr_space_t as)
2169 {
2170 #ifdef GO_IF_LEGITIMATE_ADDRESS
2171 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2172 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2173 return 0;
2174
2175 win:
2176 return 1;
2177 #else
2178 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2179 #endif
2180 }
2181 \f
2182 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2183 if they are the same hard reg, and has special hacks for
2184 autoincrement and autodecrement.
2185 This is specifically intended for find_reloads to use
2186 in determining whether two operands match.
2187 X is the operand whose number is the lower of the two.
2188
2189 The value is 2 if Y contains a pre-increment that matches
2190 a non-incrementing address in X. */
2191
2192 /* ??? To be completely correct, we should arrange to pass
2193 for X the output operand and for Y the input operand.
2194 For now, we assume that the output operand has the lower number
2195 because that is natural in (SET output (... input ...)). */
2196
2197 int
2198 operands_match_p (rtx x, rtx y)
2199 {
2200 int i;
2201 RTX_CODE code = GET_CODE (x);
2202 const char *fmt;
2203 int success_2;
2204
2205 if (x == y)
2206 return 1;
2207 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2208 && (REG_P (y) || (GET_CODE (y) == SUBREG
2209 && REG_P (SUBREG_REG (y)))))
2210 {
2211 int j;
2212
2213 if (code == SUBREG)
2214 {
2215 i = REGNO (SUBREG_REG (x));
2216 if (i >= FIRST_PSEUDO_REGISTER)
2217 goto slow;
2218 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2219 GET_MODE (SUBREG_REG (x)),
2220 SUBREG_BYTE (x),
2221 GET_MODE (x));
2222 }
2223 else
2224 i = REGNO (x);
2225
2226 if (GET_CODE (y) == SUBREG)
2227 {
2228 j = REGNO (SUBREG_REG (y));
2229 if (j >= FIRST_PSEUDO_REGISTER)
2230 goto slow;
2231 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2232 GET_MODE (SUBREG_REG (y)),
2233 SUBREG_BYTE (y),
2234 GET_MODE (y));
2235 }
2236 else
2237 j = REGNO (y);
2238
2239 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2240 multiple hard register group of scalar integer registers, so that
2241 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2242 register. */
2243 scalar_int_mode xmode;
2244 if (REG_WORDS_BIG_ENDIAN
2245 && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2246 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2247 && i < FIRST_PSEUDO_REGISTER)
2248 i += hard_regno_nregs (i, xmode) - 1;
2249 scalar_int_mode ymode;
2250 if (REG_WORDS_BIG_ENDIAN
2251 && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2252 && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2253 && j < FIRST_PSEUDO_REGISTER)
2254 j += hard_regno_nregs (j, ymode) - 1;
2255
2256 return i == j;
2257 }
2258 /* If two operands must match, because they are really a single
2259 operand of an assembler insn, then two postincrements are invalid
2260 because the assembler insn would increment only once.
2261 On the other hand, a postincrement matches ordinary indexing
2262 if the postincrement is the output operand. */
2263 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2264 return operands_match_p (XEXP (x, 0), y);
2265 /* Two preincrements are invalid
2266 because the assembler insn would increment only once.
2267 On the other hand, a preincrement matches ordinary indexing
2268 if the preincrement is the input operand.
2269 In this case, return 2, since some callers need to do special
2270 things when this happens. */
2271 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2272 || GET_CODE (y) == PRE_MODIFY)
2273 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2274
2275 slow:
2276
2277 /* Now we have disposed of all the cases in which different rtx codes
2278 can match. */
2279 if (code != GET_CODE (y))
2280 return 0;
2281
2282 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2283 if (GET_MODE (x) != GET_MODE (y))
2284 return 0;
2285
2286 /* MEMs referring to different address space are not equivalent. */
2287 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2288 return 0;
2289
2290 switch (code)
2291 {
2292 CASE_CONST_UNIQUE:
2293 return 0;
2294
2295 case LABEL_REF:
2296 return label_ref_label (x) == label_ref_label (y);
2297 case SYMBOL_REF:
2298 return XSTR (x, 0) == XSTR (y, 0);
2299
2300 default:
2301 break;
2302 }
2303
2304 /* Compare the elements. If any pair of corresponding elements
2305 fail to match, return 0 for the whole things. */
2306
2307 success_2 = 0;
2308 fmt = GET_RTX_FORMAT (code);
2309 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2310 {
2311 int val, j;
2312 switch (fmt[i])
2313 {
2314 case 'w':
2315 if (XWINT (x, i) != XWINT (y, i))
2316 return 0;
2317 break;
2318
2319 case 'i':
2320 if (XINT (x, i) != XINT (y, i))
2321 return 0;
2322 break;
2323
2324 case 'p':
2325 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2326 return 0;
2327 break;
2328
2329 case 'e':
2330 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2331 if (val == 0)
2332 return 0;
2333 /* If any subexpression returns 2,
2334 we should return 2 if we are successful. */
2335 if (val == 2)
2336 success_2 = 1;
2337 break;
2338
2339 case '0':
2340 break;
2341
2342 case 'E':
2343 if (XVECLEN (x, i) != XVECLEN (y, i))
2344 return 0;
2345 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2346 {
2347 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2348 if (val == 0)
2349 return 0;
2350 if (val == 2)
2351 success_2 = 1;
2352 }
2353 break;
2354
2355 /* It is believed that rtx's at this level will never
2356 contain anything but integers and other rtx's,
2357 except for within LABEL_REFs and SYMBOL_REFs. */
2358 default:
2359 gcc_unreachable ();
2360 }
2361 }
2362 return 1 + success_2;
2363 }
2364 \f
2365 /* Describe the range of registers or memory referenced by X.
2366 If X is a register, set REG_FLAG and put the first register
2367 number into START and the last plus one into END.
2368 If X is a memory reference, put a base address into BASE
2369 and a range of integer offsets into START and END.
2370 If X is pushing on the stack, we can assume it causes no trouble,
2371 so we set the SAFE field. */
2372
2373 static struct decomposition
2374 decompose (rtx x)
2375 {
2376 struct decomposition val;
2377 int all_const = 0, regno;
2378
2379 memset (&val, 0, sizeof (val));
2380
2381 switch (GET_CODE (x))
2382 {
2383 case MEM:
2384 {
2385 rtx base = NULL_RTX, offset = 0;
2386 rtx addr = XEXP (x, 0);
2387
2388 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2389 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2390 {
2391 val.base = XEXP (addr, 0);
2392 val.start = -GET_MODE_SIZE (GET_MODE (x));
2393 val.end = GET_MODE_SIZE (GET_MODE (x));
2394 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2395 return val;
2396 }
2397
2398 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2399 {
2400 if (GET_CODE (XEXP (addr, 1)) == PLUS
2401 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2402 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2403 {
2404 val.base = XEXP (addr, 0);
2405 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2406 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2407 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2408 return val;
2409 }
2410 }
2411
2412 if (GET_CODE (addr) == CONST)
2413 {
2414 addr = XEXP (addr, 0);
2415 all_const = 1;
2416 }
2417 if (GET_CODE (addr) == PLUS)
2418 {
2419 if (CONSTANT_P (XEXP (addr, 0)))
2420 {
2421 base = XEXP (addr, 1);
2422 offset = XEXP (addr, 0);
2423 }
2424 else if (CONSTANT_P (XEXP (addr, 1)))
2425 {
2426 base = XEXP (addr, 0);
2427 offset = XEXP (addr, 1);
2428 }
2429 }
2430
2431 if (offset == 0)
2432 {
2433 base = addr;
2434 offset = const0_rtx;
2435 }
2436 if (GET_CODE (offset) == CONST)
2437 offset = XEXP (offset, 0);
2438 if (GET_CODE (offset) == PLUS)
2439 {
2440 if (CONST_INT_P (XEXP (offset, 0)))
2441 {
2442 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2443 offset = XEXP (offset, 0);
2444 }
2445 else if (CONST_INT_P (XEXP (offset, 1)))
2446 {
2447 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2448 offset = XEXP (offset, 1);
2449 }
2450 else
2451 {
2452 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2453 offset = const0_rtx;
2454 }
2455 }
2456 else if (!CONST_INT_P (offset))
2457 {
2458 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2459 offset = const0_rtx;
2460 }
2461
2462 if (all_const && GET_CODE (base) == PLUS)
2463 base = gen_rtx_CONST (GET_MODE (base), base);
2464
2465 gcc_assert (CONST_INT_P (offset));
2466
2467 val.start = INTVAL (offset);
2468 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2469 val.base = base;
2470 }
2471 break;
2472
2473 case REG:
2474 val.reg_flag = 1;
2475 regno = true_regnum (x);
2476 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2477 {
2478 /* A pseudo with no hard reg. */
2479 val.start = REGNO (x);
2480 val.end = val.start + 1;
2481 }
2482 else
2483 {
2484 /* A hard reg. */
2485 val.start = regno;
2486 val.end = end_hard_regno (GET_MODE (x), regno);
2487 }
2488 break;
2489
2490 case SUBREG:
2491 if (!REG_P (SUBREG_REG (x)))
2492 /* This could be more precise, but it's good enough. */
2493 return decompose (SUBREG_REG (x));
2494 regno = true_regnum (x);
2495 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2496 return decompose (SUBREG_REG (x));
2497
2498 /* A hard reg. */
2499 val.reg_flag = 1;
2500 val.start = regno;
2501 val.end = regno + subreg_nregs (x);
2502 break;
2503
2504 case SCRATCH:
2505 /* This hasn't been assigned yet, so it can't conflict yet. */
2506 val.safe = 1;
2507 break;
2508
2509 default:
2510 gcc_assert (CONSTANT_P (x));
2511 val.safe = 1;
2512 break;
2513 }
2514 return val;
2515 }
2516
2517 /* Return 1 if altering Y will not modify the value of X.
2518 Y is also described by YDATA, which should be decompose (Y). */
2519
2520 static int
2521 immune_p (rtx x, rtx y, struct decomposition ydata)
2522 {
2523 struct decomposition xdata;
2524
2525 if (ydata.reg_flag)
2526 /* In this case the decomposition structure contains register
2527 numbers rather than byte offsets. */
2528 return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2529 ydata.end.to_constant (),
2530 x, (rtx *) 0);
2531 if (ydata.safe)
2532 return 1;
2533
2534 gcc_assert (MEM_P (y));
2535 /* If Y is memory and X is not, Y can't affect X. */
2536 if (!MEM_P (x))
2537 return 1;
2538
2539 xdata = decompose (x);
2540
2541 if (! rtx_equal_p (xdata.base, ydata.base))
2542 {
2543 /* If bases are distinct symbolic constants, there is no overlap. */
2544 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2545 return 1;
2546 /* Constants and stack slots never overlap. */
2547 if (CONSTANT_P (xdata.base)
2548 && (ydata.base == frame_pointer_rtx
2549 || ydata.base == hard_frame_pointer_rtx
2550 || ydata.base == stack_pointer_rtx))
2551 return 1;
2552 if (CONSTANT_P (ydata.base)
2553 && (xdata.base == frame_pointer_rtx
2554 || xdata.base == hard_frame_pointer_rtx
2555 || xdata.base == stack_pointer_rtx))
2556 return 1;
2557 /* If either base is variable, we don't know anything. */
2558 return 0;
2559 }
2560
2561 return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2562 }
2563
2564 /* Similar, but calls decompose. */
2565
2566 int
2567 safe_from_earlyclobber (rtx op, rtx clobber)
2568 {
2569 struct decomposition early_data;
2570
2571 early_data = decompose (clobber);
2572 return immune_p (op, clobber, early_data);
2573 }
2574 \f
2575 /* Main entry point of this file: search the body of INSN
2576 for values that need reloading and record them with push_reload.
2577 REPLACE nonzero means record also where the values occur
2578 so that subst_reloads can be used.
2579
2580 IND_LEVELS says how many levels of indirection are supported by this
2581 machine; a value of zero means that a memory reference is not a valid
2582 memory address.
2583
2584 LIVE_KNOWN says we have valid information about which hard
2585 regs are live at each point in the program; this is true when
2586 we are called from global_alloc but false when stupid register
2587 allocation has been done.
2588
2589 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2590 which is nonnegative if the reg has been commandeered for reloading into.
2591 It is copied into STATIC_RELOAD_REG_P and referenced from there
2592 by various subroutines.
2593
2594 Return TRUE if some operands need to be changed, because of swapping
2595 commutative operands, reg_equiv_address substitution, or whatever. */
2596
2597 int
2598 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2599 short *reload_reg_p)
2600 {
2601 int insn_code_number;
2602 int i, j;
2603 int noperands;
2604 /* These start out as the constraints for the insn
2605 and they are chewed up as we consider alternatives. */
2606 const char *constraints[MAX_RECOG_OPERANDS];
2607 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2608 a register. */
2609 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2610 char pref_or_nothing[MAX_RECOG_OPERANDS];
2611 /* Nonzero for a MEM operand whose entire address needs a reload.
2612 May be -1 to indicate the entire address may or may not need a reload. */
2613 int address_reloaded[MAX_RECOG_OPERANDS];
2614 /* Nonzero for an address operand that needs to be completely reloaded.
2615 May be -1 to indicate the entire operand may or may not need a reload. */
2616 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2617 /* Value of enum reload_type to use for operand. */
2618 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2619 /* Value of enum reload_type to use within address of operand. */
2620 enum reload_type address_type[MAX_RECOG_OPERANDS];
2621 /* Save the usage of each operand. */
2622 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2623 int no_input_reloads = 0, no_output_reloads = 0;
2624 int n_alternatives;
2625 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2626 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2627 char this_alternative_win[MAX_RECOG_OPERANDS];
2628 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2629 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2630 int this_alternative_matches[MAX_RECOG_OPERANDS];
2631 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2632 int this_alternative_number;
2633 int goal_alternative_number = 0;
2634 int operand_reloadnum[MAX_RECOG_OPERANDS];
2635 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2636 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2637 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2638 char goal_alternative_win[MAX_RECOG_OPERANDS];
2639 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2640 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2641 int goal_alternative_swapped;
2642 int best;
2643 int commutative;
2644 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2645 rtx substed_operand[MAX_RECOG_OPERANDS];
2646 rtx body = PATTERN (insn);
2647 rtx set = single_set (insn);
2648 int goal_earlyclobber = 0, this_earlyclobber;
2649 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2650 int retval = 0;
2651
2652 this_insn = insn;
2653 n_reloads = 0;
2654 n_replacements = 0;
2655 n_earlyclobbers = 0;
2656 replace_reloads = replace;
2657 hard_regs_live_known = live_known;
2658 static_reload_reg_p = reload_reg_p;
2659
2660 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2661 neither are insns that SET cc0. Insns that use CC0 are not allowed
2662 to have any input reloads. */
2663 if (JUMP_P (insn) || CALL_P (insn))
2664 no_output_reloads = 1;
2665
2666 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2667 no_input_reloads = 1;
2668 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2669 no_output_reloads = 1;
2670
2671 /* The eliminated forms of any secondary memory locations are per-insn, so
2672 clear them out here. */
2673
2674 if (secondary_memlocs_elim_used)
2675 {
2676 memset (secondary_memlocs_elim, 0,
2677 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2678 secondary_memlocs_elim_used = 0;
2679 }
2680
2681 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2682 is cheap to move between them. If it is not, there may not be an insn
2683 to do the copy, so we may need a reload. */
2684 if (GET_CODE (body) == SET
2685 && REG_P (SET_DEST (body))
2686 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2687 && REG_P (SET_SRC (body))
2688 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2689 && register_move_cost (GET_MODE (SET_SRC (body)),
2690 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2691 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2692 return 0;
2693
2694 extract_insn (insn);
2695
2696 noperands = reload_n_operands = recog_data.n_operands;
2697 n_alternatives = recog_data.n_alternatives;
2698
2699 /* Just return "no reloads" if insn has no operands with constraints. */
2700 if (noperands == 0 || n_alternatives == 0)
2701 return 0;
2702
2703 insn_code_number = INSN_CODE (insn);
2704 this_insn_is_asm = insn_code_number < 0;
2705
2706 memcpy (operand_mode, recog_data.operand_mode,
2707 noperands * sizeof (machine_mode));
2708 memcpy (constraints, recog_data.constraints,
2709 noperands * sizeof (const char *));
2710
2711 commutative = -1;
2712
2713 /* If we will need to know, later, whether some pair of operands
2714 are the same, we must compare them now and save the result.
2715 Reloading the base and index registers will clobber them
2716 and afterward they will fail to match. */
2717
2718 for (i = 0; i < noperands; i++)
2719 {
2720 const char *p;
2721 int c;
2722 char *end;
2723
2724 substed_operand[i] = recog_data.operand[i];
2725 p = constraints[i];
2726
2727 modified[i] = RELOAD_READ;
2728
2729 /* Scan this operand's constraint to see if it is an output operand,
2730 an in-out operand, is commutative, or should match another. */
2731
2732 while ((c = *p))
2733 {
2734 p += CONSTRAINT_LEN (c, p);
2735 switch (c)
2736 {
2737 case '=':
2738 modified[i] = RELOAD_WRITE;
2739 break;
2740 case '+':
2741 modified[i] = RELOAD_READ_WRITE;
2742 break;
2743 case '%':
2744 {
2745 /* The last operand should not be marked commutative. */
2746 gcc_assert (i != noperands - 1);
2747
2748 /* We currently only support one commutative pair of
2749 operands. Some existing asm code currently uses more
2750 than one pair. Previously, that would usually work,
2751 but sometimes it would crash the compiler. We
2752 continue supporting that case as well as we can by
2753 silently ignoring all but the first pair. In the
2754 future we may handle it correctly. */
2755 if (commutative < 0)
2756 commutative = i;
2757 else
2758 gcc_assert (this_insn_is_asm);
2759 }
2760 break;
2761 /* Use of ISDIGIT is tempting here, but it may get expensive because
2762 of locale support we don't want. */
2763 case '0': case '1': case '2': case '3': case '4':
2764 case '5': case '6': case '7': case '8': case '9':
2765 {
2766 c = strtoul (p - 1, &end, 10);
2767 p = end;
2768
2769 operands_match[c][i]
2770 = operands_match_p (recog_data.operand[c],
2771 recog_data.operand[i]);
2772
2773 /* An operand may not match itself. */
2774 gcc_assert (c != i);
2775
2776 /* If C can be commuted with C+1, and C might need to match I,
2777 then C+1 might also need to match I. */
2778 if (commutative >= 0)
2779 {
2780 if (c == commutative || c == commutative + 1)
2781 {
2782 int other = c + (c == commutative ? 1 : -1);
2783 operands_match[other][i]
2784 = operands_match_p (recog_data.operand[other],
2785 recog_data.operand[i]);
2786 }
2787 if (i == commutative || i == commutative + 1)
2788 {
2789 int other = i + (i == commutative ? 1 : -1);
2790 operands_match[c][other]
2791 = operands_match_p (recog_data.operand[c],
2792 recog_data.operand[other]);
2793 }
2794 /* Note that C is supposed to be less than I.
2795 No need to consider altering both C and I because in
2796 that case we would alter one into the other. */
2797 }
2798 }
2799 }
2800 }
2801 }
2802
2803 /* Examine each operand that is a memory reference or memory address
2804 and reload parts of the addresses into index registers.
2805 Also here any references to pseudo regs that didn't get hard regs
2806 but are equivalent to constants get replaced in the insn itself
2807 with those constants. Nobody will ever see them again.
2808
2809 Finally, set up the preferred classes of each operand. */
2810
2811 for (i = 0; i < noperands; i++)
2812 {
2813 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2814
2815 address_reloaded[i] = 0;
2816 address_operand_reloaded[i] = 0;
2817 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2818 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2819 : RELOAD_OTHER);
2820 address_type[i]
2821 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2822 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2823 : RELOAD_OTHER);
2824
2825 if (*constraints[i] == 0)
2826 /* Ignore things like match_operator operands. */
2827 ;
2828 else if (insn_extra_address_constraint
2829 (lookup_constraint (constraints[i])))
2830 {
2831 address_operand_reloaded[i]
2832 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2833 recog_data.operand[i],
2834 recog_data.operand_loc[i],
2835 i, operand_type[i], ind_levels, insn);
2836
2837 /* If we now have a simple operand where we used to have a
2838 PLUS or MULT, re-recognize and try again. */
2839 if ((OBJECT_P (*recog_data.operand_loc[i])
2840 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2841 && (GET_CODE (recog_data.operand[i]) == MULT
2842 || GET_CODE (recog_data.operand[i]) == PLUS))
2843 {
2844 INSN_CODE (insn) = -1;
2845 retval = find_reloads (insn, replace, ind_levels, live_known,
2846 reload_reg_p);
2847 return retval;
2848 }
2849
2850 recog_data.operand[i] = *recog_data.operand_loc[i];
2851 substed_operand[i] = recog_data.operand[i];
2852
2853 /* Address operands are reloaded in their existing mode,
2854 no matter what is specified in the machine description. */
2855 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2856
2857 /* If the address is a single CONST_INT pick address mode
2858 instead otherwise we will later not know in which mode
2859 the reload should be performed. */
2860 if (operand_mode[i] == VOIDmode)
2861 operand_mode[i] = Pmode;
2862
2863 }
2864 else if (code == MEM)
2865 {
2866 address_reloaded[i]
2867 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2868 recog_data.operand_loc[i],
2869 XEXP (recog_data.operand[i], 0),
2870 &XEXP (recog_data.operand[i], 0),
2871 i, address_type[i], ind_levels, insn);
2872 recog_data.operand[i] = *recog_data.operand_loc[i];
2873 substed_operand[i] = recog_data.operand[i];
2874 }
2875 else if (code == SUBREG)
2876 {
2877 rtx reg = SUBREG_REG (recog_data.operand[i]);
2878 rtx op
2879 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2880 ind_levels,
2881 set != 0
2882 && &SET_DEST (set) == recog_data.operand_loc[i],
2883 insn,
2884 &address_reloaded[i]);
2885
2886 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2887 that didn't get a hard register, emit a USE with a REG_EQUAL
2888 note in front so that we might inherit a previous, possibly
2889 wider reload. */
2890
2891 if (replace
2892 && MEM_P (op)
2893 && REG_P (reg)
2894 && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2895 GET_MODE_SIZE (GET_MODE (op)))
2896 && reg_equiv_constant (REGNO (reg)) == 0)
2897 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2898 insn),
2899 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2900
2901 substed_operand[i] = recog_data.operand[i] = op;
2902 }
2903 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2904 /* We can get a PLUS as an "operand" as a result of register
2905 elimination. See eliminate_regs and gen_reload. We handle
2906 a unary operator by reloading the operand. */
2907 substed_operand[i] = recog_data.operand[i]
2908 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2909 ind_levels, 0, insn,
2910 &address_reloaded[i]);
2911 else if (code == REG)
2912 {
2913 /* This is equivalent to calling find_reloads_toplev.
2914 The code is duplicated for speed.
2915 When we find a pseudo always equivalent to a constant,
2916 we replace it by the constant. We must be sure, however,
2917 that we don't try to replace it in the insn in which it
2918 is being set. */
2919 int regno = REGNO (recog_data.operand[i]);
2920 if (reg_equiv_constant (regno) != 0
2921 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2922 {
2923 /* Record the existing mode so that the check if constants are
2924 allowed will work when operand_mode isn't specified. */
2925
2926 if (operand_mode[i] == VOIDmode)
2927 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2928
2929 substed_operand[i] = recog_data.operand[i]
2930 = reg_equiv_constant (regno);
2931 }
2932 if (reg_equiv_memory_loc (regno) != 0
2933 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2934 /* We need not give a valid is_set_dest argument since the case
2935 of a constant equivalence was checked above. */
2936 substed_operand[i] = recog_data.operand[i]
2937 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2938 ind_levels, 0, insn,
2939 &address_reloaded[i]);
2940 }
2941 /* If the operand is still a register (we didn't replace it with an
2942 equivalent), get the preferred class to reload it into. */
2943 code = GET_CODE (recog_data.operand[i]);
2944 preferred_class[i]
2945 = ((code == REG && REGNO (recog_data.operand[i])
2946 >= FIRST_PSEUDO_REGISTER)
2947 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2948 : NO_REGS);
2949 pref_or_nothing[i]
2950 = (code == REG
2951 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2952 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2953 }
2954
2955 /* If this is simply a copy from operand 1 to operand 0, merge the
2956 preferred classes for the operands. */
2957 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2958 && recog_data.operand[1] == SET_SRC (set))
2959 {
2960 preferred_class[0] = preferred_class[1]
2961 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2962 pref_or_nothing[0] |= pref_or_nothing[1];
2963 pref_or_nothing[1] |= pref_or_nothing[0];
2964 }
2965
2966 /* Now see what we need for pseudo-regs that didn't get hard regs
2967 or got the wrong kind of hard reg. For this, we must consider
2968 all the operands together against the register constraints. */
2969
2970 best = MAX_RECOG_OPERANDS * 2 + 600;
2971
2972 goal_alternative_swapped = 0;
2973
2974 /* The constraints are made of several alternatives.
2975 Each operand's constraint looks like foo,bar,... with commas
2976 separating the alternatives. The first alternatives for all
2977 operands go together, the second alternatives go together, etc.
2978
2979 First loop over alternatives. */
2980
2981 alternative_mask enabled = get_enabled_alternatives (insn);
2982 for (this_alternative_number = 0;
2983 this_alternative_number < n_alternatives;
2984 this_alternative_number++)
2985 {
2986 int swapped;
2987
2988 if (!TEST_BIT (enabled, this_alternative_number))
2989 {
2990 int i;
2991
2992 for (i = 0; i < recog_data.n_operands; i++)
2993 constraints[i] = skip_alternative (constraints[i]);
2994
2995 continue;
2996 }
2997
2998 /* If insn is commutative (it's safe to exchange a certain pair
2999 of operands) then we need to try each alternative twice, the
3000 second time matching those two operands as if we had
3001 exchanged them. To do this, really exchange them in
3002 operands. */
3003 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3004 {
3005 /* Loop over operands for one constraint alternative. */
3006 /* LOSERS counts those that don't fit this alternative
3007 and would require loading. */
3008 int losers = 0;
3009 /* BAD is set to 1 if it some operand can't fit this alternative
3010 even after reloading. */
3011 int bad = 0;
3012 /* REJECT is a count of how undesirable this alternative says it is
3013 if any reloading is required. If the alternative matches exactly
3014 then REJECT is ignored, but otherwise it gets this much
3015 counted against it in addition to the reloading needed. Each
3016 ? counts three times here since we want the disparaging caused by
3017 a bad register class to only count 1/3 as much. */
3018 int reject = 0;
3019
3020 if (swapped)
3021 {
3022 recog_data.operand[commutative] = substed_operand[commutative + 1];
3023 recog_data.operand[commutative + 1] = substed_operand[commutative];
3024 /* Swap the duplicates too. */
3025 for (i = 0; i < recog_data.n_dups; i++)
3026 if (recog_data.dup_num[i] == commutative
3027 || recog_data.dup_num[i] == commutative + 1)
3028 *recog_data.dup_loc[i]
3029 = recog_data.operand[(int) recog_data.dup_num[i]];
3030
3031 std::swap (preferred_class[commutative],
3032 preferred_class[commutative + 1]);
3033 std::swap (pref_or_nothing[commutative],
3034 pref_or_nothing[commutative + 1]);
3035 std::swap (address_reloaded[commutative],
3036 address_reloaded[commutative + 1]);
3037 }
3038
3039 this_earlyclobber = 0;
3040
3041 for (i = 0; i < noperands; i++)
3042 {
3043 const char *p = constraints[i];
3044 char *end;
3045 int len;
3046 int win = 0;
3047 int did_match = 0;
3048 /* 0 => this operand can be reloaded somehow for this alternative. */
3049 int badop = 1;
3050 /* 0 => this operand can be reloaded if the alternative allows regs. */
3051 int winreg = 0;
3052 int c;
3053 int m;
3054 rtx operand = recog_data.operand[i];
3055 int offset = 0;
3056 /* Nonzero means this is a MEM that must be reloaded into a reg
3057 regardless of what the constraint says. */
3058 int force_reload = 0;
3059 int offmemok = 0;
3060 /* Nonzero if a constant forced into memory would be OK for this
3061 operand. */
3062 int constmemok = 0;
3063 int earlyclobber = 0;
3064 enum constraint_num cn;
3065 enum reg_class cl;
3066
3067 /* If the predicate accepts a unary operator, it means that
3068 we need to reload the operand, but do not do this for
3069 match_operator and friends. */
3070 if (UNARY_P (operand) && *p != 0)
3071 operand = XEXP (operand, 0);
3072
3073 /* If the operand is a SUBREG, extract
3074 the REG or MEM (or maybe even a constant) within.
3075 (Constants can occur as a result of reg_equiv_constant.) */
3076
3077 while (GET_CODE (operand) == SUBREG)
3078 {
3079 /* Offset only matters when operand is a REG and
3080 it is a hard reg. This is because it is passed
3081 to reg_fits_class_p if it is a REG and all pseudos
3082 return 0 from that function. */
3083 if (REG_P (SUBREG_REG (operand))
3084 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3085 {
3086 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3087 GET_MODE (SUBREG_REG (operand)),
3088 SUBREG_BYTE (operand),
3089 GET_MODE (operand)) < 0)
3090 force_reload = 1;
3091 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3092 GET_MODE (SUBREG_REG (operand)),
3093 SUBREG_BYTE (operand),
3094 GET_MODE (operand));
3095 }
3096 operand = SUBREG_REG (operand);
3097 /* Force reload if this is a constant or PLUS or if there may
3098 be a problem accessing OPERAND in the outer mode. */
3099 scalar_int_mode inner_mode;
3100 if (CONSTANT_P (operand)
3101 || GET_CODE (operand) == PLUS
3102 /* We must force a reload of paradoxical SUBREGs
3103 of a MEM because the alignment of the inner value
3104 may not be enough to do the outer reference. On
3105 big-endian machines, it may also reference outside
3106 the object.
3107
3108 On machines that extend byte operations and we have a
3109 SUBREG where both the inner and outer modes are no wider
3110 than a word and the inner mode is narrower, is integral,
3111 and gets extended when loaded from memory, combine.c has
3112 made assumptions about the behavior of the machine in such
3113 register access. If the data is, in fact, in memory we
3114 must always load using the size assumed to be in the
3115 register and let the insn do the different-sized
3116 accesses.
3117
3118 This is doubly true if WORD_REGISTER_OPERATIONS. In
3119 this case eliminate_regs has left non-paradoxical
3120 subregs for push_reload to see. Make sure it does
3121 by forcing the reload.
3122
3123 ??? When is it right at this stage to have a subreg
3124 of a mem that is _not_ to be handled specially? IMO
3125 those should have been reduced to just a mem. */
3126 || ((MEM_P (operand)
3127 || (REG_P (operand)
3128 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3129 && (WORD_REGISTER_OPERATIONS
3130 || (((maybe_lt
3131 (GET_MODE_BITSIZE (GET_MODE (operand)),
3132 BIGGEST_ALIGNMENT))
3133 && (paradoxical_subreg_p
3134 (operand_mode[i], GET_MODE (operand)))))
3135 || BYTES_BIG_ENDIAN
3136 || (known_le (GET_MODE_SIZE (operand_mode[i]),
3137 UNITS_PER_WORD)
3138 && (is_a <scalar_int_mode>
3139 (GET_MODE (operand), &inner_mode))
3140 && (GET_MODE_SIZE (inner_mode)
3141 <= UNITS_PER_WORD)
3142 && paradoxical_subreg_p (operand_mode[i],
3143 inner_mode)
3144 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3145 )
3146 force_reload = 1;
3147 }
3148
3149 this_alternative[i] = NO_REGS;
3150 this_alternative_win[i] = 0;
3151 this_alternative_match_win[i] = 0;
3152 this_alternative_offmemok[i] = 0;
3153 this_alternative_earlyclobber[i] = 0;
3154 this_alternative_matches[i] = -1;
3155
3156 /* An empty constraint or empty alternative
3157 allows anything which matched the pattern. */
3158 if (*p == 0 || *p == ',')
3159 win = 1, badop = 0;
3160
3161 /* Scan this alternative's specs for this operand;
3162 set WIN if the operand fits any letter in this alternative.
3163 Otherwise, clear BADOP if this operand could
3164 fit some letter after reloads,
3165 or set WINREG if this operand could fit after reloads
3166 provided the constraint allows some registers. */
3167
3168 do
3169 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3170 {
3171 case '\0':
3172 len = 0;
3173 break;
3174 case ',':
3175 c = '\0';
3176 break;
3177
3178 case '?':
3179 reject += 6;
3180 break;
3181
3182 case '!':
3183 reject = 600;
3184 break;
3185
3186 case '#':
3187 /* Ignore rest of this alternative as far as
3188 reloading is concerned. */
3189 do
3190 p++;
3191 while (*p && *p != ',');
3192 len = 0;
3193 break;
3194
3195 case '0': case '1': case '2': case '3': case '4':
3196 case '5': case '6': case '7': case '8': case '9':
3197 m = strtoul (p, &end, 10);
3198 p = end;
3199 len = 0;
3200
3201 this_alternative_matches[i] = m;
3202 /* We are supposed to match a previous operand.
3203 If we do, we win if that one did.
3204 If we do not, count both of the operands as losers.
3205 (This is too conservative, since most of the time
3206 only a single reload insn will be needed to make
3207 the two operands win. As a result, this alternative
3208 may be rejected when it is actually desirable.) */
3209 if ((swapped && (m != commutative || i != commutative + 1))
3210 /* If we are matching as if two operands were swapped,
3211 also pretend that operands_match had been computed
3212 with swapped.
3213 But if I is the second of those and C is the first,
3214 don't exchange them, because operands_match is valid
3215 only on one side of its diagonal. */
3216 ? (operands_match
3217 [(m == commutative || m == commutative + 1)
3218 ? 2 * commutative + 1 - m : m]
3219 [(i == commutative || i == commutative + 1)
3220 ? 2 * commutative + 1 - i : i])
3221 : operands_match[m][i])
3222 {
3223 /* If we are matching a non-offsettable address where an
3224 offsettable address was expected, then we must reject
3225 this combination, because we can't reload it. */
3226 if (this_alternative_offmemok[m]
3227 && MEM_P (recog_data.operand[m])
3228 && this_alternative[m] == NO_REGS
3229 && ! this_alternative_win[m])
3230 bad = 1;
3231
3232 did_match = this_alternative_win[m];
3233 }
3234 else
3235 {
3236 /* Operands don't match. */
3237 rtx value;
3238 int loc1, loc2;
3239 /* Retroactively mark the operand we had to match
3240 as a loser, if it wasn't already. */
3241 if (this_alternative_win[m])
3242 losers++;
3243 this_alternative_win[m] = 0;
3244 if (this_alternative[m] == NO_REGS)
3245 bad = 1;
3246 /* But count the pair only once in the total badness of
3247 this alternative, if the pair can be a dummy reload.
3248 The pointers in operand_loc are not swapped; swap
3249 them by hand if necessary. */
3250 if (swapped && i == commutative)
3251 loc1 = commutative + 1;
3252 else if (swapped && i == commutative + 1)
3253 loc1 = commutative;
3254 else
3255 loc1 = i;
3256 if (swapped && m == commutative)
3257 loc2 = commutative + 1;
3258 else if (swapped && m == commutative + 1)
3259 loc2 = commutative;
3260 else
3261 loc2 = m;
3262 value
3263 = find_dummy_reload (recog_data.operand[i],
3264 recog_data.operand[m],
3265 recog_data.operand_loc[loc1],
3266 recog_data.operand_loc[loc2],
3267 operand_mode[i], operand_mode[m],
3268 this_alternative[m], -1,
3269 this_alternative_earlyclobber[m]);
3270
3271 if (value != 0)
3272 losers--;
3273 }
3274 /* This can be fixed with reloads if the operand
3275 we are supposed to match can be fixed with reloads. */
3276 badop = 0;
3277 this_alternative[i] = this_alternative[m];
3278
3279 /* If we have to reload this operand and some previous
3280 operand also had to match the same thing as this
3281 operand, we don't know how to do that. So reject this
3282 alternative. */
3283 if (! did_match || force_reload)
3284 for (j = 0; j < i; j++)
3285 if (this_alternative_matches[j]
3286 == this_alternative_matches[i])
3287 {
3288 badop = 1;
3289 break;
3290 }
3291 break;
3292
3293 case 'p':
3294 /* All necessary reloads for an address_operand
3295 were handled in find_reloads_address. */
3296 this_alternative[i]
3297 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3298 ADDRESS, SCRATCH);
3299 win = 1;
3300 badop = 0;
3301 break;
3302
3303 case TARGET_MEM_CONSTRAINT:
3304 if (force_reload)
3305 break;
3306 if (MEM_P (operand)
3307 || (REG_P (operand)
3308 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3309 && reg_renumber[REGNO (operand)] < 0))
3310 win = 1;
3311 if (CONST_POOL_OK_P (operand_mode[i], operand))
3312 badop = 0;
3313 constmemok = 1;
3314 break;
3315
3316 case '<':
3317 if (MEM_P (operand)
3318 && ! address_reloaded[i]
3319 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3320 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3321 win = 1;
3322 break;
3323
3324 case '>':
3325 if (MEM_P (operand)
3326 && ! address_reloaded[i]
3327 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3328 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3329 win = 1;
3330 break;
3331
3332 /* Memory operand whose address is not offsettable. */
3333 case 'V':
3334 if (force_reload)
3335 break;
3336 if (MEM_P (operand)
3337 && ! (ind_levels ? offsettable_memref_p (operand)
3338 : offsettable_nonstrict_memref_p (operand))
3339 /* Certain mem addresses will become offsettable
3340 after they themselves are reloaded. This is important;
3341 we don't want our own handling of unoffsettables
3342 to override the handling of reg_equiv_address. */
3343 && !(REG_P (XEXP (operand, 0))
3344 && (ind_levels == 0
3345 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3346 win = 1;
3347 break;
3348
3349 /* Memory operand whose address is offsettable. */
3350 case 'o':
3351 if (force_reload)
3352 break;
3353 if ((MEM_P (operand)
3354 /* If IND_LEVELS, find_reloads_address won't reload a
3355 pseudo that didn't get a hard reg, so we have to
3356 reject that case. */
3357 && ((ind_levels ? offsettable_memref_p (operand)
3358 : offsettable_nonstrict_memref_p (operand))
3359 /* A reloaded address is offsettable because it is now
3360 just a simple register indirect. */
3361 || address_reloaded[i] == 1))
3362 || (REG_P (operand)
3363 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3364 && reg_renumber[REGNO (operand)] < 0
3365 /* If reg_equiv_address is nonzero, we will be
3366 loading it into a register; hence it will be
3367 offsettable, but we cannot say that reg_equiv_mem
3368 is offsettable without checking. */
3369 && ((reg_equiv_mem (REGNO (operand)) != 0
3370 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3371 || (reg_equiv_address (REGNO (operand)) != 0))))
3372 win = 1;
3373 if (CONST_POOL_OK_P (operand_mode[i], operand)
3374 || MEM_P (operand))
3375 badop = 0;
3376 constmemok = 1;
3377 offmemok = 1;
3378 break;
3379
3380 case '&':
3381 /* Output operand that is stored before the need for the
3382 input operands (and their index registers) is over. */
3383 earlyclobber = 1, this_earlyclobber = 1;
3384 break;
3385
3386 case 'X':
3387 force_reload = 0;
3388 win = 1;
3389 break;
3390
3391 case 'g':
3392 if (! force_reload
3393 /* A PLUS is never a valid operand, but reload can make
3394 it from a register when eliminating registers. */
3395 && GET_CODE (operand) != PLUS
3396 /* A SCRATCH is not a valid operand. */
3397 && GET_CODE (operand) != SCRATCH
3398 && (! CONSTANT_P (operand)
3399 || ! flag_pic
3400 || LEGITIMATE_PIC_OPERAND_P (operand))
3401 && (GENERAL_REGS == ALL_REGS
3402 || !REG_P (operand)
3403 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3404 && reg_renumber[REGNO (operand)] < 0)))
3405 win = 1;
3406 cl = GENERAL_REGS;
3407 goto reg;
3408
3409 default:
3410 cn = lookup_constraint (p);
3411 switch (get_constraint_type (cn))
3412 {
3413 case CT_REGISTER:
3414 cl = reg_class_for_constraint (cn);
3415 if (cl != NO_REGS)
3416 goto reg;
3417 break;
3418
3419 case CT_CONST_INT:
3420 if (CONST_INT_P (operand)
3421 && (insn_const_int_ok_for_constraint
3422 (INTVAL (operand), cn)))
3423 win = true;
3424 break;
3425
3426 case CT_MEMORY:
3427 if (force_reload)
3428 break;
3429 if (constraint_satisfied_p (operand, cn))
3430 win = 1;
3431 /* If the address was already reloaded,
3432 we win as well. */
3433 else if (MEM_P (operand) && address_reloaded[i] == 1)
3434 win = 1;
3435 /* Likewise if the address will be reloaded because
3436 reg_equiv_address is nonzero. For reg_equiv_mem
3437 we have to check. */
3438 else if (REG_P (operand)
3439 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3440 && reg_renumber[REGNO (operand)] < 0
3441 && ((reg_equiv_mem (REGNO (operand)) != 0
3442 && (constraint_satisfied_p
3443 (reg_equiv_mem (REGNO (operand)),
3444 cn)))
3445 || (reg_equiv_address (REGNO (operand))
3446 != 0)))
3447 win = 1;
3448
3449 /* If we didn't already win, we can reload
3450 constants via force_const_mem, and other
3451 MEMs by reloading the address like for 'o'. */
3452 if (CONST_POOL_OK_P (operand_mode[i], operand)
3453 || MEM_P (operand))
3454 badop = 0;
3455 constmemok = 1;
3456 offmemok = 1;
3457 break;
3458
3459 case CT_SPECIAL_MEMORY:
3460 if (force_reload)
3461 break;
3462 if (constraint_satisfied_p (operand, cn))
3463 win = 1;
3464 /* Likewise if the address will be reloaded because
3465 reg_equiv_address is nonzero. For reg_equiv_mem
3466 we have to check. */
3467 else if (REG_P (operand)
3468 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3469 && reg_renumber[REGNO (operand)] < 0
3470 && reg_equiv_mem (REGNO (operand)) != 0
3471 && (constraint_satisfied_p
3472 (reg_equiv_mem (REGNO (operand)), cn)))
3473 win = 1;
3474 break;
3475
3476 case CT_ADDRESS:
3477 if (constraint_satisfied_p (operand, cn))
3478 win = 1;
3479
3480 /* If we didn't already win, we can reload
3481 the address into a base register. */
3482 this_alternative[i]
3483 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3484 ADDRESS, SCRATCH);
3485 badop = 0;
3486 break;
3487
3488 case CT_FIXED_FORM:
3489 if (constraint_satisfied_p (operand, cn))
3490 win = 1;
3491 break;
3492 }
3493 break;
3494
3495 reg:
3496 this_alternative[i]
3497 = reg_class_subunion[this_alternative[i]][cl];
3498 if (GET_MODE (operand) == BLKmode)
3499 break;
3500 winreg = 1;
3501 if (REG_P (operand)
3502 && reg_fits_class_p (operand, this_alternative[i],
3503 offset, GET_MODE (recog_data.operand[i])))
3504 win = 1;
3505 break;
3506 }
3507 while ((p += len), c);
3508
3509 if (swapped == (commutative >= 0 ? 1 : 0))
3510 constraints[i] = p;
3511
3512 /* If this operand could be handled with a reg,
3513 and some reg is allowed, then this operand can be handled. */
3514 if (winreg && this_alternative[i] != NO_REGS
3515 && (win || !class_only_fixed_regs[this_alternative[i]]))
3516 badop = 0;
3517
3518 /* Record which operands fit this alternative. */
3519 this_alternative_earlyclobber[i] = earlyclobber;
3520 if (win && ! force_reload)
3521 this_alternative_win[i] = 1;
3522 else if (did_match && ! force_reload)
3523 this_alternative_match_win[i] = 1;
3524 else
3525 {
3526 int const_to_mem = 0;
3527
3528 this_alternative_offmemok[i] = offmemok;
3529 losers++;
3530 if (badop)
3531 bad = 1;
3532 /* Alternative loses if it has no regs for a reg operand. */
3533 if (REG_P (operand)
3534 && this_alternative[i] == NO_REGS
3535 && this_alternative_matches[i] < 0)
3536 bad = 1;
3537
3538 /* If this is a constant that is reloaded into the desired
3539 class by copying it to memory first, count that as another
3540 reload. This is consistent with other code and is
3541 required to avoid choosing another alternative when
3542 the constant is moved into memory by this function on
3543 an early reload pass. Note that the test here is
3544 precisely the same as in the code below that calls
3545 force_const_mem. */
3546 if (CONST_POOL_OK_P (operand_mode[i], operand)
3547 && ((targetm.preferred_reload_class (operand,
3548 this_alternative[i])
3549 == NO_REGS)
3550 || no_input_reloads))
3551 {
3552 const_to_mem = 1;
3553 if (this_alternative[i] != NO_REGS)
3554 losers++;
3555 }
3556
3557 /* Alternative loses if it requires a type of reload not
3558 permitted for this insn. We can always reload SCRATCH
3559 and objects with a REG_UNUSED note. */
3560 if (GET_CODE (operand) != SCRATCH
3561 && modified[i] != RELOAD_READ && no_output_reloads
3562 && ! find_reg_note (insn, REG_UNUSED, operand))
3563 bad = 1;
3564 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3565 && ! const_to_mem)
3566 bad = 1;
3567
3568 /* If we can't reload this value at all, reject this
3569 alternative. Note that we could also lose due to
3570 LIMIT_RELOAD_CLASS, but we don't check that
3571 here. */
3572
3573 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3574 {
3575 if (targetm.preferred_reload_class (operand,
3576 this_alternative[i])
3577 == NO_REGS)
3578 reject = 600;
3579
3580 if (operand_type[i] == RELOAD_FOR_OUTPUT
3581 && (targetm.preferred_output_reload_class (operand,
3582 this_alternative[i])
3583 == NO_REGS))
3584 reject = 600;
3585 }
3586
3587 /* We prefer to reload pseudos over reloading other things,
3588 since such reloads may be able to be eliminated later.
3589 If we are reloading a SCRATCH, we won't be generating any
3590 insns, just using a register, so it is also preferred.
3591 So bump REJECT in other cases. Don't do this in the
3592 case where we are forcing a constant into memory and
3593 it will then win since we don't want to have a different
3594 alternative match then. */
3595 if (! (REG_P (operand)
3596 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3597 && GET_CODE (operand) != SCRATCH
3598 && ! (const_to_mem && constmemok))
3599 reject += 2;
3600
3601 /* Input reloads can be inherited more often than output
3602 reloads can be removed, so penalize output reloads. */
3603 if (operand_type[i] != RELOAD_FOR_INPUT
3604 && GET_CODE (operand) != SCRATCH)
3605 reject++;
3606 }
3607
3608 /* If this operand is a pseudo register that didn't get
3609 a hard reg and this alternative accepts some
3610 register, see if the class that we want is a subset
3611 of the preferred class for this register. If not,
3612 but it intersects that class, use the preferred class
3613 instead. If it does not intersect the preferred
3614 class, show that usage of this alternative should be
3615 discouraged; it will be discouraged more still if the
3616 register is `preferred or nothing'. We do this
3617 because it increases the chance of reusing our spill
3618 register in a later insn and avoiding a pair of
3619 memory stores and loads.
3620
3621 Don't bother with this if this alternative will
3622 accept this operand.
3623
3624 Don't do this for a multiword operand, since it is
3625 only a small win and has the risk of requiring more
3626 spill registers, which could cause a large loss.
3627
3628 Don't do this if the preferred class has only one
3629 register because we might otherwise exhaust the
3630 class. */
3631
3632 if (! win && ! did_match
3633 && this_alternative[i] != NO_REGS
3634 && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3635 && reg_class_size [(int) preferred_class[i]] > 0
3636 && ! small_register_class_p (preferred_class[i]))
3637 {
3638 if (! reg_class_subset_p (this_alternative[i],
3639 preferred_class[i]))
3640 {
3641 /* Since we don't have a way of forming the intersection,
3642 we just do something special if the preferred class
3643 is a subset of the class we have; that's the most
3644 common case anyway. */
3645 if (reg_class_subset_p (preferred_class[i],
3646 this_alternative[i]))
3647 this_alternative[i] = preferred_class[i];
3648 else
3649 reject += (2 + 2 * pref_or_nothing[i]);
3650 }
3651 }
3652 }
3653
3654 /* Now see if any output operands that are marked "earlyclobber"
3655 in this alternative conflict with any input operands
3656 or any memory addresses. */
3657
3658 for (i = 0; i < noperands; i++)
3659 if (this_alternative_earlyclobber[i]
3660 && (this_alternative_win[i] || this_alternative_match_win[i]))
3661 {
3662 struct decomposition early_data;
3663
3664 early_data = decompose (recog_data.operand[i]);
3665
3666 gcc_assert (modified[i] != RELOAD_READ);
3667
3668 if (this_alternative[i] == NO_REGS)
3669 {
3670 this_alternative_earlyclobber[i] = 0;
3671 gcc_assert (this_insn_is_asm);
3672 error_for_asm (this_insn,
3673 "%<&%> constraint used with no register class");
3674 }
3675
3676 for (j = 0; j < noperands; j++)
3677 /* Is this an input operand or a memory ref? */
3678 if ((MEM_P (recog_data.operand[j])
3679 || modified[j] != RELOAD_WRITE)
3680 && j != i
3681 /* Ignore things like match_operator operands. */
3682 && !recog_data.is_operator[j]
3683 /* Don't count an input operand that is constrained to match
3684 the early clobber operand. */
3685 && ! (this_alternative_matches[j] == i
3686 && rtx_equal_p (recog_data.operand[i],
3687 recog_data.operand[j]))
3688 /* Is it altered by storing the earlyclobber operand? */
3689 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3690 early_data))
3691 {
3692 /* If the output is in a non-empty few-regs class,
3693 it's costly to reload it, so reload the input instead. */
3694 if (small_register_class_p (this_alternative[i])
3695 && (REG_P (recog_data.operand[j])
3696 || GET_CODE (recog_data.operand[j]) == SUBREG))
3697 {
3698 losers++;
3699 this_alternative_win[j] = 0;
3700 this_alternative_match_win[j] = 0;
3701 }
3702 else
3703 break;
3704 }
3705 /* If an earlyclobber operand conflicts with something,
3706 it must be reloaded, so request this and count the cost. */
3707 if (j != noperands)
3708 {
3709 losers++;
3710 this_alternative_win[i] = 0;
3711 this_alternative_match_win[j] = 0;
3712 for (j = 0; j < noperands; j++)
3713 if (this_alternative_matches[j] == i
3714 && this_alternative_match_win[j])
3715 {
3716 this_alternative_win[j] = 0;
3717 this_alternative_match_win[j] = 0;
3718 losers++;
3719 }
3720 }
3721 }
3722
3723 /* If one alternative accepts all the operands, no reload required,
3724 choose that alternative; don't consider the remaining ones. */
3725 if (losers == 0)
3726 {
3727 /* Unswap these so that they are never swapped at `finish'. */
3728 if (swapped)
3729 {
3730 recog_data.operand[commutative] = substed_operand[commutative];
3731 recog_data.operand[commutative + 1]
3732 = substed_operand[commutative + 1];
3733 }
3734 for (i = 0; i < noperands; i++)
3735 {
3736 goal_alternative_win[i] = this_alternative_win[i];
3737 goal_alternative_match_win[i] = this_alternative_match_win[i];
3738 goal_alternative[i] = this_alternative[i];
3739 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3740 goal_alternative_matches[i] = this_alternative_matches[i];
3741 goal_alternative_earlyclobber[i]
3742 = this_alternative_earlyclobber[i];
3743 }
3744 goal_alternative_number = this_alternative_number;
3745 goal_alternative_swapped = swapped;
3746 goal_earlyclobber = this_earlyclobber;
3747 goto finish;
3748 }
3749
3750 /* REJECT, set by the ! and ? constraint characters and when a register
3751 would be reloaded into a non-preferred class, discourages the use of
3752 this alternative for a reload goal. REJECT is incremented by six
3753 for each ? and two for each non-preferred class. */
3754 losers = losers * 6 + reject;
3755
3756 /* If this alternative can be made to work by reloading,
3757 and it needs less reloading than the others checked so far,
3758 record it as the chosen goal for reloading. */
3759 if (! bad)
3760 {
3761 if (best > losers)
3762 {
3763 for (i = 0; i < noperands; i++)
3764 {
3765 goal_alternative[i] = this_alternative[i];
3766 goal_alternative_win[i] = this_alternative_win[i];
3767 goal_alternative_match_win[i]
3768 = this_alternative_match_win[i];
3769 goal_alternative_offmemok[i]
3770 = this_alternative_offmemok[i];
3771 goal_alternative_matches[i] = this_alternative_matches[i];
3772 goal_alternative_earlyclobber[i]
3773 = this_alternative_earlyclobber[i];
3774 }
3775 goal_alternative_swapped = swapped;
3776 best = losers;
3777 goal_alternative_number = this_alternative_number;
3778 goal_earlyclobber = this_earlyclobber;
3779 }
3780 }
3781
3782 if (swapped)
3783 {
3784 /* If the commutative operands have been swapped, swap
3785 them back in order to check the next alternative. */
3786 recog_data.operand[commutative] = substed_operand[commutative];
3787 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3788 /* Unswap the duplicates too. */
3789 for (i = 0; i < recog_data.n_dups; i++)
3790 if (recog_data.dup_num[i] == commutative
3791 || recog_data.dup_num[i] == commutative + 1)
3792 *recog_data.dup_loc[i]
3793 = recog_data.operand[(int) recog_data.dup_num[i]];
3794
3795 /* Unswap the operand related information as well. */
3796 std::swap (preferred_class[commutative],
3797 preferred_class[commutative + 1]);
3798 std::swap (pref_or_nothing[commutative],
3799 pref_or_nothing[commutative + 1]);
3800 std::swap (address_reloaded[commutative],
3801 address_reloaded[commutative + 1]);
3802 }
3803 }
3804 }
3805
3806 /* The operands don't meet the constraints.
3807 goal_alternative describes the alternative
3808 that we could reach by reloading the fewest operands.
3809 Reload so as to fit it. */
3810
3811 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3812 {
3813 /* No alternative works with reloads?? */
3814 if (insn_code_number >= 0)
3815 fatal_insn ("unable to generate reloads for:", insn);
3816 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3817 /* Avoid further trouble with this insn. */
3818 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3819 n_reloads = 0;
3820 return 0;
3821 }
3822
3823 /* Jump to `finish' from above if all operands are valid already.
3824 In that case, goal_alternative_win is all 1. */
3825 finish:
3826
3827 /* Right now, for any pair of operands I and J that are required to match,
3828 with I < J,
3829 goal_alternative_matches[J] is I.
3830 Set up goal_alternative_matched as the inverse function:
3831 goal_alternative_matched[I] = J. */
3832
3833 for (i = 0; i < noperands; i++)
3834 goal_alternative_matched[i] = -1;
3835
3836 for (i = 0; i < noperands; i++)
3837 if (! goal_alternative_win[i]
3838 && goal_alternative_matches[i] >= 0)
3839 goal_alternative_matched[goal_alternative_matches[i]] = i;
3840
3841 for (i = 0; i < noperands; i++)
3842 goal_alternative_win[i] |= goal_alternative_match_win[i];
3843
3844 /* If the best alternative is with operands 1 and 2 swapped,
3845 consider them swapped before reporting the reloads. Update the
3846 operand numbers of any reloads already pushed. */
3847
3848 if (goal_alternative_swapped)
3849 {
3850 std::swap (substed_operand[commutative],
3851 substed_operand[commutative + 1]);
3852 std::swap (recog_data.operand[commutative],
3853 recog_data.operand[commutative + 1]);
3854 std::swap (*recog_data.operand_loc[commutative],
3855 *recog_data.operand_loc[commutative + 1]);
3856
3857 for (i = 0; i < recog_data.n_dups; i++)
3858 if (recog_data.dup_num[i] == commutative
3859 || recog_data.dup_num[i] == commutative + 1)
3860 *recog_data.dup_loc[i]
3861 = recog_data.operand[(int) recog_data.dup_num[i]];
3862
3863 for (i = 0; i < n_reloads; i++)
3864 {
3865 if (rld[i].opnum == commutative)
3866 rld[i].opnum = commutative + 1;
3867 else if (rld[i].opnum == commutative + 1)
3868 rld[i].opnum = commutative;
3869 }
3870 }
3871
3872 for (i = 0; i < noperands; i++)
3873 {
3874 operand_reloadnum[i] = -1;
3875
3876 /* If this is an earlyclobber operand, we need to widen the scope.
3877 The reload must remain valid from the start of the insn being
3878 reloaded until after the operand is stored into its destination.
3879 We approximate this with RELOAD_OTHER even though we know that we
3880 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3881
3882 One special case that is worth checking is when we have an
3883 output that is earlyclobber but isn't used past the insn (typically
3884 a SCRATCH). In this case, we only need have the reload live
3885 through the insn itself, but not for any of our input or output
3886 reloads.
3887 But we must not accidentally narrow the scope of an existing
3888 RELOAD_OTHER reload - leave these alone.
3889
3890 In any case, anything needed to address this operand can remain
3891 however they were previously categorized. */
3892
3893 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3894 operand_type[i]
3895 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3896 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3897 }
3898
3899 /* Any constants that aren't allowed and can't be reloaded
3900 into registers are here changed into memory references. */
3901 for (i = 0; i < noperands; i++)
3902 if (! goal_alternative_win[i])
3903 {
3904 rtx op = recog_data.operand[i];
3905 rtx subreg = NULL_RTX;
3906 rtx plus = NULL_RTX;
3907 machine_mode mode = operand_mode[i];
3908
3909 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3910 push_reload so we have to let them pass here. */
3911 if (GET_CODE (op) == SUBREG)
3912 {
3913 subreg = op;
3914 op = SUBREG_REG (op);
3915 mode = GET_MODE (op);
3916 }
3917
3918 if (GET_CODE (op) == PLUS)
3919 {
3920 plus = op;
3921 op = XEXP (op, 1);
3922 }
3923
3924 if (CONST_POOL_OK_P (mode, op)
3925 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3926 == NO_REGS)
3927 || no_input_reloads))
3928 {
3929 int this_address_reloaded;
3930 rtx tem = force_const_mem (mode, op);
3931
3932 /* If we stripped a SUBREG or a PLUS above add it back. */
3933 if (plus != NULL_RTX)
3934 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3935
3936 if (subreg != NULL_RTX)
3937 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3938
3939 this_address_reloaded = 0;
3940 substed_operand[i] = recog_data.operand[i]
3941 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3942 0, insn, &this_address_reloaded);
3943
3944 /* If the alternative accepts constant pool refs directly
3945 there will be no reload needed at all. */
3946 if (plus == NULL_RTX
3947 && subreg == NULL_RTX
3948 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3949 ? substed_operand[i]
3950 : NULL,
3951 recog_data.constraints[i],
3952 goal_alternative_number))
3953 goal_alternative_win[i] = 1;
3954 }
3955 }
3956
3957 /* Record the values of the earlyclobber operands for the caller. */
3958 if (goal_earlyclobber)
3959 for (i = 0; i < noperands; i++)
3960 if (goal_alternative_earlyclobber[i])
3961 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3962
3963 /* Now record reloads for all the operands that need them. */
3964 for (i = 0; i < noperands; i++)
3965 if (! goal_alternative_win[i])
3966 {
3967 /* Operands that match previous ones have already been handled. */
3968 if (goal_alternative_matches[i] >= 0)
3969 ;
3970 /* Handle an operand with a nonoffsettable address
3971 appearing where an offsettable address will do
3972 by reloading the address into a base register.
3973
3974 ??? We can also do this when the operand is a register and
3975 reg_equiv_mem is not offsettable, but this is a bit tricky,
3976 so we don't bother with it. It may not be worth doing. */
3977 else if (goal_alternative_matched[i] == -1
3978 && goal_alternative_offmemok[i]
3979 && MEM_P (recog_data.operand[i]))
3980 {
3981 /* If the address to be reloaded is a VOIDmode constant,
3982 use the default address mode as mode of the reload register,
3983 as would have been done by find_reloads_address. */
3984 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3985 machine_mode address_mode;
3986
3987 address_mode = get_address_mode (recog_data.operand[i]);
3988 operand_reloadnum[i]
3989 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3990 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3991 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3992 address_mode,
3993 VOIDmode, 0, 0, i, RELOAD_OTHER);
3994 rld[operand_reloadnum[i]].inc
3995 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3996
3997 /* If this operand is an output, we will have made any
3998 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3999 now we are treating part of the operand as an input, so
4000 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
4001
4002 if (modified[i] == RELOAD_WRITE)
4003 {
4004 for (j = 0; j < n_reloads; j++)
4005 {
4006 if (rld[j].opnum == i)
4007 {
4008 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4009 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4010 else if (rld[j].when_needed
4011 == RELOAD_FOR_OUTADDR_ADDRESS)
4012 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4013 }
4014 }
4015 }
4016 }
4017 else if (goal_alternative_matched[i] == -1)
4018 {
4019 operand_reloadnum[i]
4020 = push_reload ((modified[i] != RELOAD_WRITE
4021 ? recog_data.operand[i] : 0),
4022 (modified[i] != RELOAD_READ
4023 ? recog_data.operand[i] : 0),
4024 (modified[i] != RELOAD_WRITE
4025 ? recog_data.operand_loc[i] : 0),
4026 (modified[i] != RELOAD_READ
4027 ? recog_data.operand_loc[i] : 0),
4028 (enum reg_class) goal_alternative[i],
4029 (modified[i] == RELOAD_WRITE
4030 ? VOIDmode : operand_mode[i]),
4031 (modified[i] == RELOAD_READ
4032 ? VOIDmode : operand_mode[i]),
4033 (insn_code_number < 0 ? 0
4034 : insn_data[insn_code_number].operand[i].strict_low),
4035 0, i, operand_type[i]);
4036 }
4037 /* In a matching pair of operands, one must be input only
4038 and the other must be output only.
4039 Pass the input operand as IN and the other as OUT. */
4040 else if (modified[i] == RELOAD_READ
4041 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4042 {
4043 operand_reloadnum[i]
4044 = push_reload (recog_data.operand[i],
4045 recog_data.operand[goal_alternative_matched[i]],
4046 recog_data.operand_loc[i],
4047 recog_data.operand_loc[goal_alternative_matched[i]],
4048 (enum reg_class) goal_alternative[i],
4049 operand_mode[i],
4050 operand_mode[goal_alternative_matched[i]],
4051 0, 0, i, RELOAD_OTHER);
4052 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4053 }
4054 else if (modified[i] == RELOAD_WRITE
4055 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4056 {
4057 operand_reloadnum[goal_alternative_matched[i]]
4058 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4059 recog_data.operand[i],
4060 recog_data.operand_loc[goal_alternative_matched[i]],
4061 recog_data.operand_loc[i],
4062 (enum reg_class) goal_alternative[i],
4063 operand_mode[goal_alternative_matched[i]],
4064 operand_mode[i],
4065 0, 0, i, RELOAD_OTHER);
4066 operand_reloadnum[i] = output_reloadnum;
4067 }
4068 else
4069 {
4070 gcc_assert (insn_code_number < 0);
4071 error_for_asm (insn, "inconsistent operand constraints "
4072 "in an %<asm%>");
4073 /* Avoid further trouble with this insn. */
4074 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4075 n_reloads = 0;
4076 return 0;
4077 }
4078 }
4079 else if (goal_alternative_matched[i] < 0
4080 && goal_alternative_matches[i] < 0
4081 && address_operand_reloaded[i] != 1
4082 && optimize)
4083 {
4084 /* For each non-matching operand that's a MEM or a pseudo-register
4085 that didn't get a hard register, make an optional reload.
4086 This may get done even if the insn needs no reloads otherwise. */
4087
4088 rtx operand = recog_data.operand[i];
4089
4090 while (GET_CODE (operand) == SUBREG)
4091 operand = SUBREG_REG (operand);
4092 if ((MEM_P (operand)
4093 || (REG_P (operand)
4094 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4095 /* If this is only for an output, the optional reload would not
4096 actually cause us to use a register now, just note that
4097 something is stored here. */
4098 && (goal_alternative[i] != NO_REGS
4099 || modified[i] == RELOAD_WRITE)
4100 && ! no_input_reloads
4101 /* An optional output reload might allow to delete INSN later.
4102 We mustn't make in-out reloads on insns that are not permitted
4103 output reloads.
4104 If this is an asm, we can't delete it; we must not even call
4105 push_reload for an optional output reload in this case,
4106 because we can't be sure that the constraint allows a register,
4107 and push_reload verifies the constraints for asms. */
4108 && (modified[i] == RELOAD_READ
4109 || (! no_output_reloads && ! this_insn_is_asm)))
4110 operand_reloadnum[i]
4111 = push_reload ((modified[i] != RELOAD_WRITE
4112 ? recog_data.operand[i] : 0),
4113 (modified[i] != RELOAD_READ
4114 ? recog_data.operand[i] : 0),
4115 (modified[i] != RELOAD_WRITE
4116 ? recog_data.operand_loc[i] : 0),
4117 (modified[i] != RELOAD_READ
4118 ? recog_data.operand_loc[i] : 0),
4119 (enum reg_class) goal_alternative[i],
4120 (modified[i] == RELOAD_WRITE
4121 ? VOIDmode : operand_mode[i]),
4122 (modified[i] == RELOAD_READ
4123 ? VOIDmode : operand_mode[i]),
4124 (insn_code_number < 0 ? 0
4125 : insn_data[insn_code_number].operand[i].strict_low),
4126 1, i, operand_type[i]);
4127 /* If a memory reference remains (either as a MEM or a pseudo that
4128 did not get a hard register), yet we can't make an optional
4129 reload, check if this is actually a pseudo register reference;
4130 we then need to emit a USE and/or a CLOBBER so that reload
4131 inheritance will do the right thing. */
4132 else if (replace
4133 && (MEM_P (operand)
4134 || (REG_P (operand)
4135 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4136 && reg_renumber [REGNO (operand)] < 0)))
4137 {
4138 operand = *recog_data.operand_loc[i];
4139
4140 while (GET_CODE (operand) == SUBREG)
4141 operand = SUBREG_REG (operand);
4142 if (REG_P (operand))
4143 {
4144 if (modified[i] != RELOAD_WRITE)
4145 /* We mark the USE with QImode so that we recognize
4146 it as one that can be safely deleted at the end
4147 of reload. */
4148 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4149 insn), QImode);
4150 if (modified[i] != RELOAD_READ)
4151 emit_insn_after (gen_clobber (operand), insn);
4152 }
4153 }
4154 }
4155 else if (goal_alternative_matches[i] >= 0
4156 && goal_alternative_win[goal_alternative_matches[i]]
4157 && modified[i] == RELOAD_READ
4158 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4159 && ! no_input_reloads && ! no_output_reloads
4160 && optimize)
4161 {
4162 /* Similarly, make an optional reload for a pair of matching
4163 objects that are in MEM or a pseudo that didn't get a hard reg. */
4164
4165 rtx operand = recog_data.operand[i];
4166
4167 while (GET_CODE (operand) == SUBREG)
4168 operand = SUBREG_REG (operand);
4169 if ((MEM_P (operand)
4170 || (REG_P (operand)
4171 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4172 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4173 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4174 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4175 recog_data.operand[i],
4176 recog_data.operand_loc[goal_alternative_matches[i]],
4177 recog_data.operand_loc[i],
4178 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4179 operand_mode[goal_alternative_matches[i]],
4180 operand_mode[i],
4181 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4182 }
4183
4184 /* Perform whatever substitutions on the operands we are supposed
4185 to make due to commutativity or replacement of registers
4186 with equivalent constants or memory slots. */
4187
4188 for (i = 0; i < noperands; i++)
4189 {
4190 /* We only do this on the last pass through reload, because it is
4191 possible for some data (like reg_equiv_address) to be changed during
4192 later passes. Moreover, we lose the opportunity to get a useful
4193 reload_{in,out}_reg when we do these replacements. */
4194
4195 if (replace)
4196 {
4197 rtx substitution = substed_operand[i];
4198
4199 *recog_data.operand_loc[i] = substitution;
4200
4201 /* If we're replacing an operand with a LABEL_REF, we need to
4202 make sure that there's a REG_LABEL_OPERAND note attached to
4203 this instruction. */
4204 if (GET_CODE (substitution) == LABEL_REF
4205 && !find_reg_note (insn, REG_LABEL_OPERAND,
4206 label_ref_label (substitution))
4207 /* For a JUMP_P, if it was a branch target it must have
4208 already been recorded as such. */
4209 && (!JUMP_P (insn)
4210 || !label_is_jump_target_p (label_ref_label (substitution),
4211 insn)))
4212 {
4213 add_reg_note (insn, REG_LABEL_OPERAND,
4214 label_ref_label (substitution));
4215 if (LABEL_P (label_ref_label (substitution)))
4216 ++LABEL_NUSES (label_ref_label (substitution));
4217 }
4218
4219 }
4220 else
4221 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4222 }
4223
4224 /* If this insn pattern contains any MATCH_DUP's, make sure that
4225 they will be substituted if the operands they match are substituted.
4226 Also do now any substitutions we already did on the operands.
4227
4228 Don't do this if we aren't making replacements because we might be
4229 propagating things allocated by frame pointer elimination into places
4230 it doesn't expect. */
4231
4232 if (insn_code_number >= 0 && replace)
4233 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4234 {
4235 int opno = recog_data.dup_num[i];
4236 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4237 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4238 }
4239
4240 #if 0
4241 /* This loses because reloading of prior insns can invalidate the equivalence
4242 (or at least find_equiv_reg isn't smart enough to find it any more),
4243 causing this insn to need more reload regs than it needed before.
4244 It may be too late to make the reload regs available.
4245 Now this optimization is done safely in choose_reload_regs. */
4246
4247 /* For each reload of a reg into some other class of reg,
4248 search for an existing equivalent reg (same value now) in the right class.
4249 We can use it as long as we don't need to change its contents. */
4250 for (i = 0; i < n_reloads; i++)
4251 if (rld[i].reg_rtx == 0
4252 && rld[i].in != 0
4253 && REG_P (rld[i].in)
4254 && rld[i].out == 0)
4255 {
4256 rld[i].reg_rtx
4257 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4258 static_reload_reg_p, 0, rld[i].inmode);
4259 /* Prevent generation of insn to load the value
4260 because the one we found already has the value. */
4261 if (rld[i].reg_rtx)
4262 rld[i].in = rld[i].reg_rtx;
4263 }
4264 #endif
4265
4266 /* If we detected error and replaced asm instruction by USE, forget about the
4267 reloads. */
4268 if (GET_CODE (PATTERN (insn)) == USE
4269 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4270 n_reloads = 0;
4271
4272 /* Perhaps an output reload can be combined with another
4273 to reduce needs by one. */
4274 if (!goal_earlyclobber)
4275 combine_reloads ();
4276
4277 /* If we have a pair of reloads for parts of an address, they are reloading
4278 the same object, the operands themselves were not reloaded, and they
4279 are for two operands that are supposed to match, merge the reloads and
4280 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4281
4282 for (i = 0; i < n_reloads; i++)
4283 {
4284 int k;
4285
4286 for (j = i + 1; j < n_reloads; j++)
4287 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4288 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4289 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4290 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4291 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4292 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4293 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4294 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4295 && rtx_equal_p (rld[i].in, rld[j].in)
4296 && (operand_reloadnum[rld[i].opnum] < 0
4297 || rld[operand_reloadnum[rld[i].opnum]].optional)
4298 && (operand_reloadnum[rld[j].opnum] < 0
4299 || rld[operand_reloadnum[rld[j].opnum]].optional)
4300 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4301 || (goal_alternative_matches[rld[j].opnum]
4302 == rld[i].opnum)))
4303 {
4304 for (k = 0; k < n_replacements; k++)
4305 if (replacements[k].what == j)
4306 replacements[k].what = i;
4307
4308 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4309 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4310 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4311 else
4312 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4313 rld[j].in = 0;
4314 }
4315 }
4316
4317 /* Scan all the reloads and update their type.
4318 If a reload is for the address of an operand and we didn't reload
4319 that operand, change the type. Similarly, change the operand number
4320 of a reload when two operands match. If a reload is optional, treat it
4321 as though the operand isn't reloaded.
4322
4323 ??? This latter case is somewhat odd because if we do the optional
4324 reload, it means the object is hanging around. Thus we need only
4325 do the address reload if the optional reload was NOT done.
4326
4327 Change secondary reloads to be the address type of their operand, not
4328 the normal type.
4329
4330 If an operand's reload is now RELOAD_OTHER, change any
4331 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4332 RELOAD_FOR_OTHER_ADDRESS. */
4333
4334 for (i = 0; i < n_reloads; i++)
4335 {
4336 if (rld[i].secondary_p
4337 && rld[i].when_needed == operand_type[rld[i].opnum])
4338 rld[i].when_needed = address_type[rld[i].opnum];
4339
4340 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4341 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4342 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4343 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4344 && (operand_reloadnum[rld[i].opnum] < 0
4345 || rld[operand_reloadnum[rld[i].opnum]].optional))
4346 {
4347 /* If we have a secondary reload to go along with this reload,
4348 change its type to RELOAD_FOR_OPADDR_ADDR. */
4349
4350 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4351 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4352 && rld[i].secondary_in_reload != -1)
4353 {
4354 int secondary_in_reload = rld[i].secondary_in_reload;
4355
4356 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4357
4358 /* If there's a tertiary reload we have to change it also. */
4359 if (secondary_in_reload > 0
4360 && rld[secondary_in_reload].secondary_in_reload != -1)
4361 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4362 = RELOAD_FOR_OPADDR_ADDR;
4363 }
4364
4365 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4366 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4367 && rld[i].secondary_out_reload != -1)
4368 {
4369 int secondary_out_reload = rld[i].secondary_out_reload;
4370
4371 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4372
4373 /* If there's a tertiary reload we have to change it also. */
4374 if (secondary_out_reload
4375 && rld[secondary_out_reload].secondary_out_reload != -1)
4376 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4377 = RELOAD_FOR_OPADDR_ADDR;
4378 }
4379
4380 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4381 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4382 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4383 else
4384 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4385 }
4386
4387 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4388 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4389 && operand_reloadnum[rld[i].opnum] >= 0
4390 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4391 == RELOAD_OTHER))
4392 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4393
4394 if (goal_alternative_matches[rld[i].opnum] >= 0)
4395 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4396 }
4397
4398 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4399 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4400 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4401
4402 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4403 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4404 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4405 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4406 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4407 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4408 This is complicated by the fact that a single operand can have more
4409 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4410 choose_reload_regs without affecting code quality, and cases that
4411 actually fail are extremely rare, so it turns out to be better to fix
4412 the problem here by not generating cases that choose_reload_regs will
4413 fail for. */
4414 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4415 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4416 a single operand.
4417 We can reduce the register pressure by exploiting that a
4418 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4419 does not conflict with any of them, if it is only used for the first of
4420 the RELOAD_FOR_X_ADDRESS reloads. */
4421 {
4422 int first_op_addr_num = -2;
4423 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4424 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4425 int need_change = 0;
4426 /* We use last_op_addr_reload and the contents of the above arrays
4427 first as flags - -2 means no instance encountered, -1 means exactly
4428 one instance encountered.
4429 If more than one instance has been encountered, we store the reload
4430 number of the first reload of the kind in question; reload numbers
4431 are known to be non-negative. */
4432 for (i = 0; i < noperands; i++)
4433 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4434 for (i = n_reloads - 1; i >= 0; i--)
4435 {
4436 switch (rld[i].when_needed)
4437 {
4438 case RELOAD_FOR_OPERAND_ADDRESS:
4439 if (++first_op_addr_num >= 0)
4440 {
4441 first_op_addr_num = i;
4442 need_change = 1;
4443 }
4444 break;
4445 case RELOAD_FOR_INPUT_ADDRESS:
4446 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4447 {
4448 first_inpaddr_num[rld[i].opnum] = i;
4449 need_change = 1;
4450 }
4451 break;
4452 case RELOAD_FOR_OUTPUT_ADDRESS:
4453 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4454 {
4455 first_outpaddr_num[rld[i].opnum] = i;
4456 need_change = 1;
4457 }
4458 break;
4459 default:
4460 break;
4461 }
4462 }
4463
4464 if (need_change)
4465 {
4466 for (i = 0; i < n_reloads; i++)
4467 {
4468 int first_num;
4469 enum reload_type type;
4470
4471 switch (rld[i].when_needed)
4472 {
4473 case RELOAD_FOR_OPADDR_ADDR:
4474 first_num = first_op_addr_num;
4475 type = RELOAD_FOR_OPERAND_ADDRESS;
4476 break;
4477 case RELOAD_FOR_INPADDR_ADDRESS:
4478 first_num = first_inpaddr_num[rld[i].opnum];
4479 type = RELOAD_FOR_INPUT_ADDRESS;
4480 break;
4481 case RELOAD_FOR_OUTADDR_ADDRESS:
4482 first_num = first_outpaddr_num[rld[i].opnum];
4483 type = RELOAD_FOR_OUTPUT_ADDRESS;
4484 break;
4485 default:
4486 continue;
4487 }
4488 if (first_num < 0)
4489 continue;
4490 else if (i > first_num)
4491 rld[i].when_needed = type;
4492 else
4493 {
4494 /* Check if the only TYPE reload that uses reload I is
4495 reload FIRST_NUM. */
4496 for (j = n_reloads - 1; j > first_num; j--)
4497 {
4498 if (rld[j].when_needed == type
4499 && (rld[i].secondary_p
4500 ? rld[j].secondary_in_reload == i
4501 : reg_mentioned_p (rld[i].in, rld[j].in)))
4502 {
4503 rld[i].when_needed = type;
4504 break;
4505 }
4506 }
4507 }
4508 }
4509 }
4510 }
4511
4512 /* See if we have any reloads that are now allowed to be merged
4513 because we've changed when the reload is needed to
4514 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4515 check for the most common cases. */
4516
4517 for (i = 0; i < n_reloads; i++)
4518 if (rld[i].in != 0 && rld[i].out == 0
4519 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4520 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4521 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4522 for (j = 0; j < n_reloads; j++)
4523 if (i != j && rld[j].in != 0 && rld[j].out == 0
4524 && rld[j].when_needed == rld[i].when_needed
4525 && MATCHES (rld[i].in, rld[j].in)
4526 && rld[i].rclass == rld[j].rclass
4527 && !rld[i].nocombine && !rld[j].nocombine
4528 && rld[i].reg_rtx == rld[j].reg_rtx)
4529 {
4530 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4531 transfer_replacements (i, j);
4532 rld[j].in = 0;
4533 }
4534
4535 /* If we made any reloads for addresses, see if they violate a
4536 "no input reloads" requirement for this insn. But loads that we
4537 do after the insn (such as for output addresses) are fine. */
4538 if (HAVE_cc0 && no_input_reloads)
4539 for (i = 0; i < n_reloads; i++)
4540 gcc_assert (rld[i].in == 0
4541 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4542 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4543
4544 /* Compute reload_mode and reload_nregs. */
4545 for (i = 0; i < n_reloads; i++)
4546 {
4547 rld[i].mode = rld[i].inmode;
4548 if (rld[i].mode == VOIDmode
4549 || partial_subreg_p (rld[i].mode, rld[i].outmode))
4550 rld[i].mode = rld[i].outmode;
4551
4552 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4553 }
4554
4555 /* Special case a simple move with an input reload and a
4556 destination of a hard reg, if the hard reg is ok, use it. */
4557 for (i = 0; i < n_reloads; i++)
4558 if (rld[i].when_needed == RELOAD_FOR_INPUT
4559 && GET_CODE (PATTERN (insn)) == SET
4560 && REG_P (SET_DEST (PATTERN (insn)))
4561 && (SET_SRC (PATTERN (insn)) == rld[i].in
4562 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4563 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4564 {
4565 rtx dest = SET_DEST (PATTERN (insn));
4566 unsigned int regno = REGNO (dest);
4567
4568 if (regno < FIRST_PSEUDO_REGISTER
4569 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4570 && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4571 {
4572 int nr = hard_regno_nregs (regno, rld[i].mode);
4573 int ok = 1, nri;
4574
4575 for (nri = 1; nri < nr; nri ++)
4576 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4577 {
4578 ok = 0;
4579 break;
4580 }
4581
4582 if (ok)
4583 rld[i].reg_rtx = dest;
4584 }
4585 }
4586
4587 return retval;
4588 }
4589
4590 /* Return true if alternative number ALTNUM in constraint-string
4591 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4592 MEM gives the reference if its address hasn't been fully reloaded,
4593 otherwise it is NULL. */
4594
4595 static bool
4596 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4597 const char *constraint, int altnum)
4598 {
4599 int c;
4600
4601 /* Skip alternatives before the one requested. */
4602 while (altnum > 0)
4603 {
4604 while (*constraint++ != ',')
4605 ;
4606 altnum--;
4607 }
4608 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4609 If one of them is present, this alternative accepts the result of
4610 passing a constant-pool reference through find_reloads_toplev.
4611
4612 The same is true of extra memory constraints if the address
4613 was reloaded into a register. However, the target may elect
4614 to disallow the original constant address, forcing it to be
4615 reloaded into a register instead. */
4616 for (; (c = *constraint) && c != ',' && c != '#';
4617 constraint += CONSTRAINT_LEN (c, constraint))
4618 {
4619 enum constraint_num cn = lookup_constraint (constraint);
4620 if (insn_extra_memory_constraint (cn)
4621 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4622 return true;
4623 }
4624 return false;
4625 }
4626 \f
4627 /* Scan X for memory references and scan the addresses for reloading.
4628 Also checks for references to "constant" regs that we want to eliminate
4629 and replaces them with the values they stand for.
4630 We may alter X destructively if it contains a reference to such.
4631 If X is just a constant reg, we return the equivalent value
4632 instead of X.
4633
4634 IND_LEVELS says how many levels of indirect addressing this machine
4635 supports.
4636
4637 OPNUM and TYPE identify the purpose of the reload.
4638
4639 IS_SET_DEST is true if X is the destination of a SET, which is not
4640 appropriate to be replaced by a constant.
4641
4642 INSN, if nonzero, is the insn in which we do the reload. It is used
4643 to determine if we may generate output reloads, and where to put USEs
4644 for pseudos that we have to replace with stack slots.
4645
4646 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4647 result of find_reloads_address. */
4648
4649 static rtx
4650 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4651 int ind_levels, int is_set_dest, rtx_insn *insn,
4652 int *address_reloaded)
4653 {
4654 RTX_CODE code = GET_CODE (x);
4655
4656 const char *fmt = GET_RTX_FORMAT (code);
4657 int i;
4658 int copied;
4659
4660 if (code == REG)
4661 {
4662 /* This code is duplicated for speed in find_reloads. */
4663 int regno = REGNO (x);
4664 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4665 x = reg_equiv_constant (regno);
4666 #if 0
4667 /* This creates (subreg (mem...)) which would cause an unnecessary
4668 reload of the mem. */
4669 else if (reg_equiv_mem (regno) != 0)
4670 x = reg_equiv_mem (regno);
4671 #endif
4672 else if (reg_equiv_memory_loc (regno)
4673 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4674 {
4675 rtx mem = make_memloc (x, regno);
4676 if (reg_equiv_address (regno)
4677 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4678 {
4679 /* If this is not a toplevel operand, find_reloads doesn't see
4680 this substitution. We have to emit a USE of the pseudo so
4681 that delete_output_reload can see it. */
4682 if (replace_reloads && recog_data.operand[opnum] != x)
4683 /* We mark the USE with QImode so that we recognize it
4684 as one that can be safely deleted at the end of
4685 reload. */
4686 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4687 QImode);
4688 x = mem;
4689 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4690 opnum, type, ind_levels, insn);
4691 if (!rtx_equal_p (x, mem))
4692 push_reg_equiv_alt_mem (regno, x);
4693 if (address_reloaded)
4694 *address_reloaded = i;
4695 }
4696 }
4697 return x;
4698 }
4699 if (code == MEM)
4700 {
4701 rtx tem = x;
4702
4703 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4704 opnum, type, ind_levels, insn);
4705 if (address_reloaded)
4706 *address_reloaded = i;
4707
4708 return tem;
4709 }
4710
4711 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4712 {
4713 /* Check for SUBREG containing a REG that's equivalent to a
4714 constant. If the constant has a known value, truncate it
4715 right now. Similarly if we are extracting a single-word of a
4716 multi-word constant. If the constant is symbolic, allow it
4717 to be substituted normally. push_reload will strip the
4718 subreg later. The constant must not be VOIDmode, because we
4719 will lose the mode of the register (this should never happen
4720 because one of the cases above should handle it). */
4721
4722 int regno = REGNO (SUBREG_REG (x));
4723 rtx tem;
4724
4725 if (regno >= FIRST_PSEUDO_REGISTER
4726 && reg_renumber[regno] < 0
4727 && reg_equiv_constant (regno) != 0)
4728 {
4729 tem =
4730 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4731 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4732 gcc_assert (tem);
4733 if (CONSTANT_P (tem)
4734 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4735 {
4736 tem = force_const_mem (GET_MODE (x), tem);
4737 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4738 &XEXP (tem, 0), opnum, type,
4739 ind_levels, insn);
4740 if (address_reloaded)
4741 *address_reloaded = i;
4742 }
4743 return tem;
4744 }
4745
4746 /* If the subreg contains a reg that will be converted to a mem,
4747 attempt to convert the whole subreg to a (narrower or wider)
4748 memory reference instead. If this succeeds, we're done --
4749 otherwise fall through to check whether the inner reg still
4750 needs address reloads anyway. */
4751
4752 if (regno >= FIRST_PSEUDO_REGISTER
4753 && reg_equiv_memory_loc (regno) != 0)
4754 {
4755 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4756 insn, address_reloaded);
4757 if (tem)
4758 return tem;
4759 }
4760 }
4761
4762 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4763 {
4764 if (fmt[i] == 'e')
4765 {
4766 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4767 ind_levels, is_set_dest, insn,
4768 address_reloaded);
4769 /* If we have replaced a reg with it's equivalent memory loc -
4770 that can still be handled here e.g. if it's in a paradoxical
4771 subreg - we must make the change in a copy, rather than using
4772 a destructive change. This way, find_reloads can still elect
4773 not to do the change. */
4774 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4775 {
4776 x = shallow_copy_rtx (x);
4777 copied = 1;
4778 }
4779 XEXP (x, i) = new_part;
4780 }
4781 }
4782 return x;
4783 }
4784
4785 /* Return a mem ref for the memory equivalent of reg REGNO.
4786 This mem ref is not shared with anything. */
4787
4788 static rtx
4789 make_memloc (rtx ad, int regno)
4790 {
4791 /* We must rerun eliminate_regs, in case the elimination
4792 offsets have changed. */
4793 rtx tem
4794 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4795 0);
4796
4797 /* If TEM might contain a pseudo, we must copy it to avoid
4798 modifying it when we do the substitution for the reload. */
4799 if (rtx_varies_p (tem, 0))
4800 tem = copy_rtx (tem);
4801
4802 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4803 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4804
4805 /* Copy the result if it's still the same as the equivalence, to avoid
4806 modifying it when we do the substitution for the reload. */
4807 if (tem == reg_equiv_memory_loc (regno))
4808 tem = copy_rtx (tem);
4809 return tem;
4810 }
4811
4812 /* Returns true if AD could be turned into a valid memory reference
4813 to mode MODE in address space AS by reloading the part pointed to
4814 by PART into a register. */
4815
4816 static int
4817 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4818 addr_space_t as, rtx *part)
4819 {
4820 int retv;
4821 rtx tem = *part;
4822 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4823
4824 *part = reg;
4825 retv = memory_address_addr_space_p (mode, ad, as);
4826 *part = tem;
4827
4828 return retv;
4829 }
4830
4831 /* Record all reloads needed for handling memory address AD
4832 which appears in *LOC in a memory reference to mode MODE
4833 which itself is found in location *MEMREFLOC.
4834 Note that we take shortcuts assuming that no multi-reg machine mode
4835 occurs as part of an address.
4836
4837 OPNUM and TYPE specify the purpose of this reload.
4838
4839 IND_LEVELS says how many levels of indirect addressing this machine
4840 supports.
4841
4842 INSN, if nonzero, is the insn in which we do the reload. It is used
4843 to determine if we may generate output reloads, and where to put USEs
4844 for pseudos that we have to replace with stack slots.
4845
4846 Value is one if this address is reloaded or replaced as a whole; it is
4847 zero if the top level of this address was not reloaded or replaced, and
4848 it is -1 if it may or may not have been reloaded or replaced.
4849
4850 Note that there is no verification that the address will be valid after
4851 this routine does its work. Instead, we rely on the fact that the address
4852 was valid when reload started. So we need only undo things that reload
4853 could have broken. These are wrong register types, pseudos not allocated
4854 to a hard register, and frame pointer elimination. */
4855
4856 static int
4857 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4858 rtx *loc, int opnum, enum reload_type type,
4859 int ind_levels, rtx_insn *insn)
4860 {
4861 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4862 : ADDR_SPACE_GENERIC;
4863 int regno;
4864 int removed_and = 0;
4865 int op_index;
4866 rtx tem;
4867
4868 /* If the address is a register, see if it is a legitimate address and
4869 reload if not. We first handle the cases where we need not reload
4870 or where we must reload in a non-standard way. */
4871
4872 if (REG_P (ad))
4873 {
4874 regno = REGNO (ad);
4875
4876 if (reg_equiv_constant (regno) != 0)
4877 {
4878 find_reloads_address_part (reg_equiv_constant (regno), loc,
4879 base_reg_class (mode, as, MEM, SCRATCH),
4880 GET_MODE (ad), opnum, type, ind_levels);
4881 return 1;
4882 }
4883
4884 tem = reg_equiv_memory_loc (regno);
4885 if (tem != 0)
4886 {
4887 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4888 {
4889 tem = make_memloc (ad, regno);
4890 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4891 XEXP (tem, 0),
4892 MEM_ADDR_SPACE (tem)))
4893 {
4894 rtx orig = tem;
4895
4896 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4897 &XEXP (tem, 0), opnum,
4898 ADDR_TYPE (type), ind_levels, insn);
4899 if (!rtx_equal_p (tem, orig))
4900 push_reg_equiv_alt_mem (regno, tem);
4901 }
4902 /* We can avoid a reload if the register's equivalent memory
4903 expression is valid as an indirect memory address.
4904 But not all addresses are valid in a mem used as an indirect
4905 address: only reg or reg+constant. */
4906
4907 if (ind_levels > 0
4908 && strict_memory_address_addr_space_p (mode, tem, as)
4909 && (REG_P (XEXP (tem, 0))
4910 || (GET_CODE (XEXP (tem, 0)) == PLUS
4911 && REG_P (XEXP (XEXP (tem, 0), 0))
4912 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4913 {
4914 /* TEM is not the same as what we'll be replacing the
4915 pseudo with after reload, put a USE in front of INSN
4916 in the final reload pass. */
4917 if (replace_reloads
4918 && num_not_at_initial_offset
4919 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4920 {
4921 *loc = tem;
4922 /* We mark the USE with QImode so that we
4923 recognize it as one that can be safely
4924 deleted at the end of reload. */
4925 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4926 insn), QImode);
4927
4928 /* This doesn't really count as replacing the address
4929 as a whole, since it is still a memory access. */
4930 }
4931 return 0;
4932 }
4933 ad = tem;
4934 }
4935 }
4936
4937 /* The only remaining case where we can avoid a reload is if this is a
4938 hard register that is valid as a base register and which is not the
4939 subject of a CLOBBER in this insn. */
4940
4941 else if (regno < FIRST_PSEUDO_REGISTER
4942 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4943 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4944 return 0;
4945
4946 /* If we do not have one of the cases above, we must do the reload. */
4947 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4948 base_reg_class (mode, as, MEM, SCRATCH),
4949 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4950 return 1;
4951 }
4952
4953 if (strict_memory_address_addr_space_p (mode, ad, as))
4954 {
4955 /* The address appears valid, so reloads are not needed.
4956 But the address may contain an eliminable register.
4957 This can happen because a machine with indirect addressing
4958 may consider a pseudo register by itself a valid address even when
4959 it has failed to get a hard reg.
4960 So do a tree-walk to find and eliminate all such regs. */
4961
4962 /* But first quickly dispose of a common case. */
4963 if (GET_CODE (ad) == PLUS
4964 && CONST_INT_P (XEXP (ad, 1))
4965 && REG_P (XEXP (ad, 0))
4966 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4967 return 0;
4968
4969 subst_reg_equivs_changed = 0;
4970 *loc = subst_reg_equivs (ad, insn);
4971
4972 if (! subst_reg_equivs_changed)
4973 return 0;
4974
4975 /* Check result for validity after substitution. */
4976 if (strict_memory_address_addr_space_p (mode, ad, as))
4977 return 0;
4978 }
4979
4980 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4981 do
4982 {
4983 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4984 {
4985 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4986 ind_levels, win);
4987 }
4988 break;
4989 win:
4990 *memrefloc = copy_rtx (*memrefloc);
4991 XEXP (*memrefloc, 0) = ad;
4992 move_replacements (&ad, &XEXP (*memrefloc, 0));
4993 return -1;
4994 }
4995 while (0);
4996 #endif
4997
4998 /* The address is not valid. We have to figure out why. First see if
4999 we have an outer AND and remove it if so. Then analyze what's inside. */
5000
5001 if (GET_CODE (ad) == AND)
5002 {
5003 removed_and = 1;
5004 loc = &XEXP (ad, 0);
5005 ad = *loc;
5006 }
5007
5008 /* One possibility for why the address is invalid is that it is itself
5009 a MEM. This can happen when the frame pointer is being eliminated, a
5010 pseudo is not allocated to a hard register, and the offset between the
5011 frame and stack pointers is not its initial value. In that case the
5012 pseudo will have been replaced by a MEM referring to the
5013 stack pointer. */
5014 if (MEM_P (ad))
5015 {
5016 /* First ensure that the address in this MEM is valid. Then, unless
5017 indirect addresses are valid, reload the MEM into a register. */
5018 tem = ad;
5019 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5020 opnum, ADDR_TYPE (type),
5021 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5022
5023 /* If tem was changed, then we must create a new memory reference to
5024 hold it and store it back into memrefloc. */
5025 if (tem != ad && memrefloc)
5026 {
5027 *memrefloc = copy_rtx (*memrefloc);
5028 copy_replacements (tem, XEXP (*memrefloc, 0));
5029 loc = &XEXP (*memrefloc, 0);
5030 if (removed_and)
5031 loc = &XEXP (*loc, 0);
5032 }
5033
5034 /* Check similar cases as for indirect addresses as above except
5035 that we can allow pseudos and a MEM since they should have been
5036 taken care of above. */
5037
5038 if (ind_levels == 0
5039 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5040 || MEM_P (XEXP (tem, 0))
5041 || ! (REG_P (XEXP (tem, 0))
5042 || (GET_CODE (XEXP (tem, 0)) == PLUS
5043 && REG_P (XEXP (XEXP (tem, 0), 0))
5044 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5045 {
5046 /* Must use TEM here, not AD, since it is the one that will
5047 have any subexpressions reloaded, if needed. */
5048 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5049 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5050 VOIDmode, 0,
5051 0, opnum, type);
5052 return ! removed_and;
5053 }
5054 else
5055 return 0;
5056 }
5057
5058 /* If we have address of a stack slot but it's not valid because the
5059 displacement is too large, compute the sum in a register.
5060 Handle all base registers here, not just fp/ap/sp, because on some
5061 targets (namely SH) we can also get too large displacements from
5062 big-endian corrections. */
5063 else if (GET_CODE (ad) == PLUS
5064 && REG_P (XEXP (ad, 0))
5065 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5066 && CONST_INT_P (XEXP (ad, 1))
5067 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5068 CONST_INT)
5069 /* Similarly, if we were to reload the base register and the
5070 mem+offset address is still invalid, then we want to reload
5071 the whole address, not just the base register. */
5072 || ! maybe_memory_address_addr_space_p
5073 (mode, ad, as, &(XEXP (ad, 0)))))
5074
5075 {
5076 /* Unshare the MEM rtx so we can safely alter it. */
5077 if (memrefloc)
5078 {
5079 *memrefloc = copy_rtx (*memrefloc);
5080 loc = &XEXP (*memrefloc, 0);
5081 if (removed_and)
5082 loc = &XEXP (*loc, 0);
5083 }
5084
5085 if (double_reg_address_ok[mode]
5086 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5087 PLUS, CONST_INT))
5088 {
5089 /* Unshare the sum as well. */
5090 *loc = ad = copy_rtx (ad);
5091
5092 /* Reload the displacement into an index reg.
5093 We assume the frame pointer or arg pointer is a base reg. */
5094 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5095 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5096 type, ind_levels);
5097 return 0;
5098 }
5099 else
5100 {
5101 /* If the sum of two regs is not necessarily valid,
5102 reload the sum into a base reg.
5103 That will at least work. */
5104 find_reloads_address_part (ad, loc,
5105 base_reg_class (mode, as, MEM, SCRATCH),
5106 GET_MODE (ad), opnum, type, ind_levels);
5107 }
5108 return ! removed_and;
5109 }
5110
5111 /* If we have an indexed stack slot, there are three possible reasons why
5112 it might be invalid: The index might need to be reloaded, the address
5113 might have been made by frame pointer elimination and hence have a
5114 constant out of range, or both reasons might apply.
5115
5116 We can easily check for an index needing reload, but even if that is the
5117 case, we might also have an invalid constant. To avoid making the
5118 conservative assumption and requiring two reloads, we see if this address
5119 is valid when not interpreted strictly. If it is, the only problem is
5120 that the index needs a reload and find_reloads_address_1 will take care
5121 of it.
5122
5123 Handle all base registers here, not just fp/ap/sp, because on some
5124 targets (namely SPARC) we can also get invalid addresses from preventive
5125 subreg big-endian corrections made by find_reloads_toplev. We
5126 can also get expressions involving LO_SUM (rather than PLUS) from
5127 find_reloads_subreg_address.
5128
5129 If we decide to do something, it must be that `double_reg_address_ok'
5130 is true. We generate a reload of the base register + constant and
5131 rework the sum so that the reload register will be added to the index.
5132 This is safe because we know the address isn't shared.
5133
5134 We check for the base register as both the first and second operand of
5135 the innermost PLUS and/or LO_SUM. */
5136
5137 for (op_index = 0; op_index < 2; ++op_index)
5138 {
5139 rtx operand, addend;
5140 enum rtx_code inner_code;
5141
5142 if (GET_CODE (ad) != PLUS)
5143 continue;
5144
5145 inner_code = GET_CODE (XEXP (ad, 0));
5146 if (!(GET_CODE (ad) == PLUS
5147 && CONST_INT_P (XEXP (ad, 1))
5148 && (inner_code == PLUS || inner_code == LO_SUM)))
5149 continue;
5150
5151 operand = XEXP (XEXP (ad, 0), op_index);
5152 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5153 continue;
5154
5155 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5156
5157 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5158 GET_CODE (addend))
5159 || operand == frame_pointer_rtx
5160 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5161 && operand == hard_frame_pointer_rtx)
5162 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5163 && operand == arg_pointer_rtx)
5164 || operand == stack_pointer_rtx)
5165 && ! maybe_memory_address_addr_space_p
5166 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5167 {
5168 rtx offset_reg;
5169 enum reg_class cls;
5170
5171 offset_reg = plus_constant (GET_MODE (ad), operand,
5172 INTVAL (XEXP (ad, 1)));
5173
5174 /* Form the adjusted address. */
5175 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5176 ad = gen_rtx_PLUS (GET_MODE (ad),
5177 op_index == 0 ? offset_reg : addend,
5178 op_index == 0 ? addend : offset_reg);
5179 else
5180 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5181 op_index == 0 ? offset_reg : addend,
5182 op_index == 0 ? addend : offset_reg);
5183 *loc = ad;
5184
5185 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5186 find_reloads_address_part (XEXP (ad, op_index),
5187 &XEXP (ad, op_index), cls,
5188 GET_MODE (ad), opnum, type, ind_levels);
5189 find_reloads_address_1 (mode, as,
5190 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5191 GET_CODE (XEXP (ad, op_index)),
5192 &XEXP (ad, 1 - op_index), opnum,
5193 type, 0, insn);
5194
5195 return 0;
5196 }
5197 }
5198
5199 /* See if address becomes valid when an eliminable register
5200 in a sum is replaced. */
5201
5202 tem = ad;
5203 if (GET_CODE (ad) == PLUS)
5204 tem = subst_indexed_address (ad);
5205 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5206 {
5207 /* Ok, we win that way. Replace any additional eliminable
5208 registers. */
5209
5210 subst_reg_equivs_changed = 0;
5211 tem = subst_reg_equivs (tem, insn);
5212
5213 /* Make sure that didn't make the address invalid again. */
5214
5215 if (! subst_reg_equivs_changed
5216 || strict_memory_address_addr_space_p (mode, tem, as))
5217 {
5218 *loc = tem;
5219 return 0;
5220 }
5221 }
5222
5223 /* If constants aren't valid addresses, reload the constant address
5224 into a register. */
5225 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5226 {
5227 machine_mode address_mode = GET_MODE (ad);
5228 if (address_mode == VOIDmode)
5229 address_mode = targetm.addr_space.address_mode (as);
5230
5231 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5232 Unshare it so we can safely alter it. */
5233 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5234 && CONSTANT_POOL_ADDRESS_P (ad))
5235 {
5236 *memrefloc = copy_rtx (*memrefloc);
5237 loc = &XEXP (*memrefloc, 0);
5238 if (removed_and)
5239 loc = &XEXP (*loc, 0);
5240 }
5241
5242 find_reloads_address_part (ad, loc,
5243 base_reg_class (mode, as, MEM, SCRATCH),
5244 address_mode, opnum, type, ind_levels);
5245 return ! removed_and;
5246 }
5247
5248 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5249 opnum, type, ind_levels, insn);
5250 }
5251 \f
5252 /* Find all pseudo regs appearing in AD
5253 that are eliminable in favor of equivalent values
5254 and do not have hard regs; replace them by their equivalents.
5255 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5256 front of it for pseudos that we have to replace with stack slots. */
5257
5258 static rtx
5259 subst_reg_equivs (rtx ad, rtx_insn *insn)
5260 {
5261 RTX_CODE code = GET_CODE (ad);
5262 int i;
5263 const char *fmt;
5264
5265 switch (code)
5266 {
5267 case HIGH:
5268 case CONST:
5269 CASE_CONST_ANY:
5270 case SYMBOL_REF:
5271 case LABEL_REF:
5272 case PC:
5273 case CC0:
5274 return ad;
5275
5276 case REG:
5277 {
5278 int regno = REGNO (ad);
5279
5280 if (reg_equiv_constant (regno) != 0)
5281 {
5282 subst_reg_equivs_changed = 1;
5283 return reg_equiv_constant (regno);
5284 }
5285 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5286 {
5287 rtx mem = make_memloc (ad, regno);
5288 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5289 {
5290 subst_reg_equivs_changed = 1;
5291 /* We mark the USE with QImode so that we recognize it
5292 as one that can be safely deleted at the end of
5293 reload. */
5294 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5295 QImode);
5296 return mem;
5297 }
5298 }
5299 }
5300 return ad;
5301
5302 case PLUS:
5303 /* Quickly dispose of a common case. */
5304 if (XEXP (ad, 0) == frame_pointer_rtx
5305 && CONST_INT_P (XEXP (ad, 1)))
5306 return ad;
5307 break;
5308
5309 default:
5310 break;
5311 }
5312
5313 fmt = GET_RTX_FORMAT (code);
5314 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5315 if (fmt[i] == 'e')
5316 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5317 return ad;
5318 }
5319 \f
5320 /* Compute the sum of X and Y, making canonicalizations assumed in an
5321 address, namely: sum constant integers, surround the sum of two
5322 constants with a CONST, put the constant as the second operand, and
5323 group the constant on the outermost sum.
5324
5325 This routine assumes both inputs are already in canonical form. */
5326
5327 rtx
5328 form_sum (machine_mode mode, rtx x, rtx y)
5329 {
5330 rtx tem;
5331
5332 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5333 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5334
5335 if (CONST_INT_P (x))
5336 return plus_constant (mode, y, INTVAL (x));
5337 else if (CONST_INT_P (y))
5338 return plus_constant (mode, x, INTVAL (y));
5339 else if (CONSTANT_P (x))
5340 tem = x, x = y, y = tem;
5341
5342 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5343 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5344
5345 /* Note that if the operands of Y are specified in the opposite
5346 order in the recursive calls below, infinite recursion will occur. */
5347 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5348 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5349
5350 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5351 constant will have been placed second. */
5352 if (CONSTANT_P (x) && CONSTANT_P (y))
5353 {
5354 if (GET_CODE (x) == CONST)
5355 x = XEXP (x, 0);
5356 if (GET_CODE (y) == CONST)
5357 y = XEXP (y, 0);
5358
5359 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5360 }
5361
5362 return gen_rtx_PLUS (mode, x, y);
5363 }
5364 \f
5365 /* If ADDR is a sum containing a pseudo register that should be
5366 replaced with a constant (from reg_equiv_constant),
5367 return the result of doing so, and also apply the associative
5368 law so that the result is more likely to be a valid address.
5369 (But it is not guaranteed to be one.)
5370
5371 Note that at most one register is replaced, even if more are
5372 replaceable. Also, we try to put the result into a canonical form
5373 so it is more likely to be a valid address.
5374
5375 In all other cases, return ADDR. */
5376
5377 static rtx
5378 subst_indexed_address (rtx addr)
5379 {
5380 rtx op0 = 0, op1 = 0, op2 = 0;
5381 rtx tem;
5382 int regno;
5383
5384 if (GET_CODE (addr) == PLUS)
5385 {
5386 /* Try to find a register to replace. */
5387 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5388 if (REG_P (op0)
5389 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5390 && reg_renumber[regno] < 0
5391 && reg_equiv_constant (regno) != 0)
5392 op0 = reg_equiv_constant (regno);
5393 else if (REG_P (op1)
5394 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5395 && reg_renumber[regno] < 0
5396 && reg_equiv_constant (regno) != 0)
5397 op1 = reg_equiv_constant (regno);
5398 else if (GET_CODE (op0) == PLUS
5399 && (tem = subst_indexed_address (op0)) != op0)
5400 op0 = tem;
5401 else if (GET_CODE (op1) == PLUS
5402 && (tem = subst_indexed_address (op1)) != op1)
5403 op1 = tem;
5404 else
5405 return addr;
5406
5407 /* Pick out up to three things to add. */
5408 if (GET_CODE (op1) == PLUS)
5409 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5410 else if (GET_CODE (op0) == PLUS)
5411 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5412
5413 /* Compute the sum. */
5414 if (op2 != 0)
5415 op1 = form_sum (GET_MODE (addr), op1, op2);
5416 if (op1 != 0)
5417 op0 = form_sum (GET_MODE (addr), op0, op1);
5418
5419 return op0;
5420 }
5421 return addr;
5422 }
5423 \f
5424 /* Update the REG_INC notes for an insn. It updates all REG_INC
5425 notes for the instruction which refer to REGNO the to refer
5426 to the reload number.
5427
5428 INSN is the insn for which any REG_INC notes need updating.
5429
5430 REGNO is the register number which has been reloaded.
5431
5432 RELOADNUM is the reload number. */
5433
5434 static void
5435 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5436 int reloadnum ATTRIBUTE_UNUSED)
5437 {
5438 if (!AUTO_INC_DEC)
5439 return;
5440
5441 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5442 if (REG_NOTE_KIND (link) == REG_INC
5443 && (int) REGNO (XEXP (link, 0)) == regno)
5444 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5445 }
5446 \f
5447 /* Record the pseudo registers we must reload into hard registers in a
5448 subexpression of a would-be memory address, X referring to a value
5449 in mode MODE. (This function is not called if the address we find
5450 is strictly valid.)
5451
5452 CONTEXT = 1 means we are considering regs as index regs,
5453 = 0 means we are considering them as base regs.
5454 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5455 or an autoinc code.
5456 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5457 is the code of the index part of the address. Otherwise, pass SCRATCH
5458 for this argument.
5459 OPNUM and TYPE specify the purpose of any reloads made.
5460
5461 IND_LEVELS says how many levels of indirect addressing are
5462 supported at this point in the address.
5463
5464 INSN, if nonzero, is the insn in which we do the reload. It is used
5465 to determine if we may generate output reloads.
5466
5467 We return nonzero if X, as a whole, is reloaded or replaced. */
5468
5469 /* Note that we take shortcuts assuming that no multi-reg machine mode
5470 occurs as part of an address.
5471 Also, this is not fully machine-customizable; it works for machines
5472 such as VAXen and 68000's and 32000's, but other possible machines
5473 could have addressing modes that this does not handle right.
5474 If you add push_reload calls here, you need to make sure gen_reload
5475 handles those cases gracefully. */
5476
5477 static int
5478 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5479 rtx x, int context,
5480 enum rtx_code outer_code, enum rtx_code index_code,
5481 rtx *loc, int opnum, enum reload_type type,
5482 int ind_levels, rtx_insn *insn)
5483 {
5484 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5485 ((CONTEXT) == 0 \
5486 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5487 : REGNO_OK_FOR_INDEX_P (REGNO))
5488
5489 enum reg_class context_reg_class;
5490 RTX_CODE code = GET_CODE (x);
5491 bool reloaded_inner_of_autoinc = false;
5492
5493 if (context == 1)
5494 context_reg_class = INDEX_REG_CLASS;
5495 else
5496 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5497
5498 switch (code)
5499 {
5500 case PLUS:
5501 {
5502 rtx orig_op0 = XEXP (x, 0);
5503 rtx orig_op1 = XEXP (x, 1);
5504 RTX_CODE code0 = GET_CODE (orig_op0);
5505 RTX_CODE code1 = GET_CODE (orig_op1);
5506 rtx op0 = orig_op0;
5507 rtx op1 = orig_op1;
5508
5509 if (GET_CODE (op0) == SUBREG)
5510 {
5511 op0 = SUBREG_REG (op0);
5512 code0 = GET_CODE (op0);
5513 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5514 op0 = gen_rtx_REG (word_mode,
5515 (REGNO (op0) +
5516 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5517 GET_MODE (SUBREG_REG (orig_op0)),
5518 SUBREG_BYTE (orig_op0),
5519 GET_MODE (orig_op0))));
5520 }
5521
5522 if (GET_CODE (op1) == SUBREG)
5523 {
5524 op1 = SUBREG_REG (op1);
5525 code1 = GET_CODE (op1);
5526 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5527 /* ??? Why is this given op1's mode and above for
5528 ??? op0 SUBREGs we use word_mode? */
5529 op1 = gen_rtx_REG (GET_MODE (op1),
5530 (REGNO (op1) +
5531 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5532 GET_MODE (SUBREG_REG (orig_op1)),
5533 SUBREG_BYTE (orig_op1),
5534 GET_MODE (orig_op1))));
5535 }
5536 /* Plus in the index register may be created only as a result of
5537 register rematerialization for expression like &localvar*4. Reload it.
5538 It may be possible to combine the displacement on the outer level,
5539 but it is probably not worthwhile to do so. */
5540 if (context == 1)
5541 {
5542 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5543 opnum, ADDR_TYPE (type), ind_levels, insn);
5544 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5545 context_reg_class,
5546 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5547 return 1;
5548 }
5549
5550 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5551 || code0 == ZERO_EXTEND || code1 == MEM)
5552 {
5553 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5554 &XEXP (x, 0), opnum, type, ind_levels,
5555 insn);
5556 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5557 &XEXP (x, 1), opnum, type, ind_levels,
5558 insn);
5559 }
5560
5561 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5562 || code1 == ZERO_EXTEND || code0 == MEM)
5563 {
5564 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5565 &XEXP (x, 0), opnum, type, ind_levels,
5566 insn);
5567 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5568 &XEXP (x, 1), opnum, type, ind_levels,
5569 insn);
5570 }
5571
5572 else if (code0 == CONST_INT || code0 == CONST
5573 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5574 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5575 &XEXP (x, 1), opnum, type, ind_levels,
5576 insn);
5577
5578 else if (code1 == CONST_INT || code1 == CONST
5579 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5580 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5581 &XEXP (x, 0), opnum, type, ind_levels,
5582 insn);
5583
5584 else if (code0 == REG && code1 == REG)
5585 {
5586 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5587 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5588 return 0;
5589 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5590 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5591 return 0;
5592 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5593 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5594 &XEXP (x, 1), opnum, type, ind_levels,
5595 insn);
5596 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5597 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5598 &XEXP (x, 0), opnum, type, ind_levels,
5599 insn);
5600 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5601 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5602 &XEXP (x, 0), opnum, type, ind_levels,
5603 insn);
5604 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5605 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5606 &XEXP (x, 1), opnum, type, ind_levels,
5607 insn);
5608 else
5609 {
5610 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5611 &XEXP (x, 0), opnum, type, ind_levels,
5612 insn);
5613 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5614 &XEXP (x, 1), opnum, type, ind_levels,
5615 insn);
5616 }
5617 }
5618
5619 else if (code0 == REG)
5620 {
5621 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5622 &XEXP (x, 0), opnum, type, ind_levels,
5623 insn);
5624 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5625 &XEXP (x, 1), opnum, type, ind_levels,
5626 insn);
5627 }
5628
5629 else if (code1 == REG)
5630 {
5631 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5632 &XEXP (x, 1), opnum, type, ind_levels,
5633 insn);
5634 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5635 &XEXP (x, 0), opnum, type, ind_levels,
5636 insn);
5637 }
5638 }
5639
5640 return 0;
5641
5642 case POST_MODIFY:
5643 case PRE_MODIFY:
5644 {
5645 rtx op0 = XEXP (x, 0);
5646 rtx op1 = XEXP (x, 1);
5647 enum rtx_code index_code;
5648 int regno;
5649 int reloadnum;
5650
5651 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5652 return 0;
5653
5654 /* Currently, we only support {PRE,POST}_MODIFY constructs
5655 where a base register is {inc,dec}remented by the contents
5656 of another register or by a constant value. Thus, these
5657 operands must match. */
5658 gcc_assert (op0 == XEXP (op1, 0));
5659
5660 /* Require index register (or constant). Let's just handle the
5661 register case in the meantime... If the target allows
5662 auto-modify by a constant then we could try replacing a pseudo
5663 register with its equivalent constant where applicable.
5664
5665 We also handle the case where the register was eliminated
5666 resulting in a PLUS subexpression.
5667
5668 If we later decide to reload the whole PRE_MODIFY or
5669 POST_MODIFY, inc_for_reload might clobber the reload register
5670 before reading the index. The index register might therefore
5671 need to live longer than a TYPE reload normally would, so be
5672 conservative and class it as RELOAD_OTHER. */
5673 if ((REG_P (XEXP (op1, 1))
5674 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5675 || GET_CODE (XEXP (op1, 1)) == PLUS)
5676 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5677 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5678 ind_levels, insn);
5679
5680 gcc_assert (REG_P (XEXP (op1, 0)));
5681
5682 regno = REGNO (XEXP (op1, 0));
5683 index_code = GET_CODE (XEXP (op1, 1));
5684
5685 /* A register that is incremented cannot be constant! */
5686 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5687 || reg_equiv_constant (regno) == 0);
5688
5689 /* Handle a register that is equivalent to a memory location
5690 which cannot be addressed directly. */
5691 if (reg_equiv_memory_loc (regno) != 0
5692 && (reg_equiv_address (regno) != 0
5693 || num_not_at_initial_offset))
5694 {
5695 rtx tem = make_memloc (XEXP (x, 0), regno);
5696
5697 if (reg_equiv_address (regno)
5698 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5699 {
5700 rtx orig = tem;
5701
5702 /* First reload the memory location's address.
5703 We can't use ADDR_TYPE (type) here, because we need to
5704 write back the value after reading it, hence we actually
5705 need two registers. */
5706 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5707 &XEXP (tem, 0), opnum,
5708 RELOAD_OTHER,
5709 ind_levels, insn);
5710
5711 if (!rtx_equal_p (tem, orig))
5712 push_reg_equiv_alt_mem (regno, tem);
5713
5714 /* Then reload the memory location into a base
5715 register. */
5716 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5717 &XEXP (op1, 0),
5718 base_reg_class (mode, as,
5719 code, index_code),
5720 GET_MODE (x), GET_MODE (x), 0,
5721 0, opnum, RELOAD_OTHER);
5722
5723 update_auto_inc_notes (this_insn, regno, reloadnum);
5724 return 0;
5725 }
5726 }
5727
5728 if (reg_renumber[regno] >= 0)
5729 regno = reg_renumber[regno];
5730
5731 /* We require a base register here... */
5732 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5733 {
5734 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5735 &XEXP (op1, 0), &XEXP (x, 0),
5736 base_reg_class (mode, as,
5737 code, index_code),
5738 GET_MODE (x), GET_MODE (x), 0, 0,
5739 opnum, RELOAD_OTHER);
5740
5741 update_auto_inc_notes (this_insn, regno, reloadnum);
5742 return 0;
5743 }
5744 }
5745 return 0;
5746
5747 case POST_INC:
5748 case POST_DEC:
5749 case PRE_INC:
5750 case PRE_DEC:
5751 if (REG_P (XEXP (x, 0)))
5752 {
5753 int regno = REGNO (XEXP (x, 0));
5754 int value = 0;
5755 rtx x_orig = x;
5756
5757 /* A register that is incremented cannot be constant! */
5758 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5759 || reg_equiv_constant (regno) == 0);
5760
5761 /* Handle a register that is equivalent to a memory location
5762 which cannot be addressed directly. */
5763 if (reg_equiv_memory_loc (regno) != 0
5764 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5765 {
5766 rtx tem = make_memloc (XEXP (x, 0), regno);
5767 if (reg_equiv_address (regno)
5768 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5769 {
5770 rtx orig = tem;
5771
5772 /* First reload the memory location's address.
5773 We can't use ADDR_TYPE (type) here, because we need to
5774 write back the value after reading it, hence we actually
5775 need two registers. */
5776 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5777 &XEXP (tem, 0), opnum, type,
5778 ind_levels, insn);
5779 reloaded_inner_of_autoinc = true;
5780 if (!rtx_equal_p (tem, orig))
5781 push_reg_equiv_alt_mem (regno, tem);
5782 /* Put this inside a new increment-expression. */
5783 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5784 /* Proceed to reload that, as if it contained a register. */
5785 }
5786 }
5787
5788 /* If we have a hard register that is ok in this incdec context,
5789 don't make a reload. If the register isn't nice enough for
5790 autoincdec, we can reload it. But, if an autoincrement of a
5791 register that we here verified as playing nice, still outside
5792 isn't "valid", it must be that no autoincrement is "valid".
5793 If that is true and something made an autoincrement anyway,
5794 this must be a special context where one is allowed.
5795 (For example, a "push" instruction.)
5796 We can't improve this address, so leave it alone. */
5797
5798 /* Otherwise, reload the autoincrement into a suitable hard reg
5799 and record how much to increment by. */
5800
5801 if (reg_renumber[regno] >= 0)
5802 regno = reg_renumber[regno];
5803 if (regno >= FIRST_PSEUDO_REGISTER
5804 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5805 index_code))
5806 {
5807 int reloadnum;
5808
5809 /* If we can output the register afterwards, do so, this
5810 saves the extra update.
5811 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5812 CALL_INSN - and it does not set CC0.
5813 But don't do this if we cannot directly address the
5814 memory location, since this will make it harder to
5815 reuse address reloads, and increases register pressure.
5816 Also don't do this if we can probably update x directly. */
5817 rtx equiv = (MEM_P (XEXP (x, 0))
5818 ? XEXP (x, 0)
5819 : reg_equiv_mem (regno));
5820 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5821 if (insn && NONJUMP_INSN_P (insn)
5822 #if HAVE_cc0
5823 && ! sets_cc0_p (PATTERN (insn))
5824 #endif
5825 && (regno < FIRST_PSEUDO_REGISTER
5826 || (equiv
5827 && memory_operand (equiv, GET_MODE (equiv))
5828 && ! (icode != CODE_FOR_nothing
5829 && insn_operand_matches (icode, 0, equiv)
5830 && insn_operand_matches (icode, 1, equiv))))
5831 /* Using RELOAD_OTHER means we emit this and the reload we
5832 made earlier in the wrong order. */
5833 && !reloaded_inner_of_autoinc)
5834 {
5835 /* We use the original pseudo for loc, so that
5836 emit_reload_insns() knows which pseudo this
5837 reload refers to and updates the pseudo rtx, not
5838 its equivalent memory location, as well as the
5839 corresponding entry in reg_last_reload_reg. */
5840 loc = &XEXP (x_orig, 0);
5841 x = XEXP (x, 0);
5842 reloadnum
5843 = push_reload (x, x, loc, loc,
5844 context_reg_class,
5845 GET_MODE (x), GET_MODE (x), 0, 0,
5846 opnum, RELOAD_OTHER);
5847 }
5848 else
5849 {
5850 reloadnum
5851 = push_reload (x, x, loc, (rtx*) 0,
5852 context_reg_class,
5853 GET_MODE (x), GET_MODE (x), 0, 0,
5854 opnum, type);
5855 rld[reloadnum].inc
5856 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5857
5858 value = 1;
5859 }
5860
5861 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5862 reloadnum);
5863 }
5864 return value;
5865 }
5866 return 0;
5867
5868 case TRUNCATE:
5869 case SIGN_EXTEND:
5870 case ZERO_EXTEND:
5871 /* Look for parts to reload in the inner expression and reload them
5872 too, in addition to this operation. Reloading all inner parts in
5873 addition to this one shouldn't be necessary, but at this point,
5874 we don't know if we can possibly omit any part that *can* be
5875 reloaded. Targets that are better off reloading just either part
5876 (or perhaps even a different part of an outer expression), should
5877 define LEGITIMIZE_RELOAD_ADDRESS. */
5878 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5879 context, code, SCRATCH, &XEXP (x, 0), opnum,
5880 type, ind_levels, insn);
5881 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5882 context_reg_class,
5883 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5884 return 1;
5885
5886 case MEM:
5887 /* This is probably the result of a substitution, by eliminate_regs, of
5888 an equivalent address for a pseudo that was not allocated to a hard
5889 register. Verify that the specified address is valid and reload it
5890 into a register.
5891
5892 Since we know we are going to reload this item, don't decrement for
5893 the indirection level.
5894
5895 Note that this is actually conservative: it would be slightly more
5896 efficient to use the value of SPILL_INDIRECT_LEVELS from
5897 reload1.c here. */
5898
5899 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5900 opnum, ADDR_TYPE (type), ind_levels, insn);
5901 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5902 context_reg_class,
5903 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5904 return 1;
5905
5906 case REG:
5907 {
5908 int regno = REGNO (x);
5909
5910 if (reg_equiv_constant (regno) != 0)
5911 {
5912 find_reloads_address_part (reg_equiv_constant (regno), loc,
5913 context_reg_class,
5914 GET_MODE (x), opnum, type, ind_levels);
5915 return 1;
5916 }
5917
5918 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5919 that feeds this insn. */
5920 if (reg_equiv_mem (regno) != 0)
5921 {
5922 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5923 context_reg_class,
5924 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5925 return 1;
5926 }
5927 #endif
5928
5929 if (reg_equiv_memory_loc (regno)
5930 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5931 {
5932 rtx tem = make_memloc (x, regno);
5933 if (reg_equiv_address (regno) != 0
5934 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5935 {
5936 x = tem;
5937 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5938 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5939 ind_levels, insn);
5940 if (!rtx_equal_p (x, tem))
5941 push_reg_equiv_alt_mem (regno, x);
5942 }
5943 }
5944
5945 if (reg_renumber[regno] >= 0)
5946 regno = reg_renumber[regno];
5947
5948 if (regno >= FIRST_PSEUDO_REGISTER
5949 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5950 index_code))
5951 {
5952 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5953 context_reg_class,
5954 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5955 return 1;
5956 }
5957
5958 /* If a register appearing in an address is the subject of a CLOBBER
5959 in this insn, reload it into some other register to be safe.
5960 The CLOBBER is supposed to make the register unavailable
5961 from before this insn to after it. */
5962 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5963 {
5964 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5965 context_reg_class,
5966 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5967 return 1;
5968 }
5969 }
5970 return 0;
5971
5972 case SUBREG:
5973 if (REG_P (SUBREG_REG (x)))
5974 {
5975 /* If this is a SUBREG of a hard register and the resulting register
5976 is of the wrong class, reload the whole SUBREG. This avoids
5977 needless copies if SUBREG_REG is multi-word. */
5978 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5979 {
5980 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5981
5982 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5983 index_code))
5984 {
5985 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5986 context_reg_class,
5987 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5988 return 1;
5989 }
5990 }
5991 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5992 is larger than the class size, then reload the whole SUBREG. */
5993 else
5994 {
5995 enum reg_class rclass = context_reg_class;
5996 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
5997 > reg_class_size[(int) rclass])
5998 {
5999 /* If the inner register will be replaced by a memory
6000 reference, we can do this only if we can replace the
6001 whole subreg by a (narrower) memory reference. If
6002 this is not possible, fall through and reload just
6003 the inner register (including address reloads). */
6004 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6005 {
6006 rtx tem = find_reloads_subreg_address (x, opnum,
6007 ADDR_TYPE (type),
6008 ind_levels, insn,
6009 NULL);
6010 if (tem)
6011 {
6012 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6013 GET_MODE (tem), VOIDmode, 0, 0,
6014 opnum, type);
6015 return 1;
6016 }
6017 }
6018 else
6019 {
6020 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6021 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6022 return 1;
6023 }
6024 }
6025 }
6026 }
6027 break;
6028
6029 default:
6030 break;
6031 }
6032
6033 {
6034 const char *fmt = GET_RTX_FORMAT (code);
6035 int i;
6036
6037 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6038 {
6039 if (fmt[i] == 'e')
6040 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6041 we get here. */
6042 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6043 code, SCRATCH, &XEXP (x, i),
6044 opnum, type, ind_levels, insn);
6045 }
6046 }
6047
6048 #undef REG_OK_FOR_CONTEXT
6049 return 0;
6050 }
6051 \f
6052 /* X, which is found at *LOC, is a part of an address that needs to be
6053 reloaded into a register of class RCLASS. If X is a constant, or if
6054 X is a PLUS that contains a constant, check that the constant is a
6055 legitimate operand and that we are supposed to be able to load
6056 it into the register.
6057
6058 If not, force the constant into memory and reload the MEM instead.
6059
6060 MODE is the mode to use, in case X is an integer constant.
6061
6062 OPNUM and TYPE describe the purpose of any reloads made.
6063
6064 IND_LEVELS says how many levels of indirect addressing this machine
6065 supports. */
6066
6067 static void
6068 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6069 machine_mode mode, int opnum,
6070 enum reload_type type, int ind_levels)
6071 {
6072 if (CONSTANT_P (x)
6073 && (!targetm.legitimate_constant_p (mode, x)
6074 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6075 {
6076 x = force_const_mem (mode, x);
6077 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6078 opnum, type, ind_levels, 0);
6079 }
6080
6081 else if (GET_CODE (x) == PLUS
6082 && CONSTANT_P (XEXP (x, 1))
6083 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6084 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6085 == NO_REGS))
6086 {
6087 rtx tem;
6088
6089 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6090 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6091 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6092 opnum, type, ind_levels, 0);
6093 }
6094
6095 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6096 mode, VOIDmode, 0, 0, opnum, type);
6097 }
6098 \f
6099 /* X, a subreg of a pseudo, is a part of an address that needs to be
6100 reloaded, and the pseusdo is equivalent to a memory location.
6101
6102 Attempt to replace the whole subreg by a (possibly narrower or wider)
6103 memory reference. If this is possible, return this new memory
6104 reference, and push all required address reloads. Otherwise,
6105 return NULL.
6106
6107 OPNUM and TYPE identify the purpose of the reload.
6108
6109 IND_LEVELS says how many levels of indirect addressing are
6110 supported at this point in the address.
6111
6112 INSN, if nonzero, is the insn in which we do the reload. It is used
6113 to determine where to put USEs for pseudos that we have to replace with
6114 stack slots. */
6115
6116 static rtx
6117 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6118 int ind_levels, rtx_insn *insn,
6119 int *address_reloaded)
6120 {
6121 machine_mode outer_mode = GET_MODE (x);
6122 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6123 int regno = REGNO (SUBREG_REG (x));
6124 int reloaded = 0;
6125 rtx tem, orig;
6126 poly_int64 offset;
6127
6128 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6129
6130 /* We cannot replace the subreg with a modified memory reference if:
6131
6132 - we have a paradoxical subreg that implicitly acts as a zero or
6133 sign extension operation due to LOAD_EXTEND_OP;
6134
6135 - we have a subreg that is implicitly supposed to act on the full
6136 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6137
6138 - the address of the equivalent memory location is mode-dependent; or
6139
6140 - we have a paradoxical subreg and the resulting memory is not
6141 sufficiently aligned to allow access in the wider mode.
6142
6143 In addition, we choose not to perform the replacement for *any*
6144 paradoxical subreg, even if it were possible in principle. This
6145 is to avoid generating wider memory references than necessary.
6146
6147 This corresponds to how previous versions of reload used to handle
6148 paradoxical subregs where no address reload was required. */
6149
6150 if (paradoxical_subreg_p (x))
6151 return NULL;
6152
6153 if (WORD_REGISTER_OPERATIONS
6154 && partial_subreg_p (outer_mode, inner_mode)
6155 && known_equal_after_align_down (GET_MODE_SIZE (outer_mode) - 1,
6156 GET_MODE_SIZE (inner_mode) - 1,
6157 UNITS_PER_WORD))
6158 return NULL;
6159
6160 /* Since we don't attempt to handle paradoxical subregs, we can just
6161 call into simplify_subreg, which will handle all remaining checks
6162 for us. */
6163 orig = make_memloc (SUBREG_REG (x), regno);
6164 offset = SUBREG_BYTE (x);
6165 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6166 if (!tem || !MEM_P (tem))
6167 return NULL;
6168
6169 /* Now push all required address reloads, if any. */
6170 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6171 XEXP (tem, 0), &XEXP (tem, 0),
6172 opnum, type, ind_levels, insn);
6173 /* ??? Do we need to handle nonzero offsets somehow? */
6174 if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6175 push_reg_equiv_alt_mem (regno, tem);
6176
6177 /* For some processors an address may be valid in the original mode but
6178 not in a smaller mode. For example, ARM accepts a scaled index register
6179 in SImode but not in HImode. Note that this is only a problem if the
6180 address in reg_equiv_mem is already invalid in the new mode; other
6181 cases would be fixed by find_reloads_address as usual.
6182
6183 ??? We attempt to handle such cases here by doing an additional reload
6184 of the full address after the usual processing by find_reloads_address.
6185 Note that this may not work in the general case, but it seems to cover
6186 the cases where this situation currently occurs. A more general fix
6187 might be to reload the *value* instead of the address, but this would
6188 not be expected by the callers of this routine as-is.
6189
6190 If find_reloads_address already completed replaced the address, there
6191 is nothing further to do. */
6192 if (reloaded == 0
6193 && reg_equiv_mem (regno) != 0
6194 && !strict_memory_address_addr_space_p
6195 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6196 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6197 {
6198 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6199 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6200 MEM, SCRATCH),
6201 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6202 reloaded = 1;
6203 }
6204
6205 /* If this is not a toplevel operand, find_reloads doesn't see this
6206 substitution. We have to emit a USE of the pseudo so that
6207 delete_output_reload can see it. */
6208 if (replace_reloads && recog_data.operand[opnum] != x)
6209 /* We mark the USE with QImode so that we recognize it as one that
6210 can be safely deleted at the end of reload. */
6211 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6212 QImode);
6213
6214 if (address_reloaded)
6215 *address_reloaded = reloaded;
6216
6217 return tem;
6218 }
6219 \f
6220 /* Substitute into the current INSN the registers into which we have reloaded
6221 the things that need reloading. The array `replacements'
6222 contains the locations of all pointers that must be changed
6223 and says what to replace them with.
6224
6225 Return the rtx that X translates into; usually X, but modified. */
6226
6227 void
6228 subst_reloads (rtx_insn *insn)
6229 {
6230 int i;
6231
6232 for (i = 0; i < n_replacements; i++)
6233 {
6234 struct replacement *r = &replacements[i];
6235 rtx reloadreg = rld[r->what].reg_rtx;
6236 if (reloadreg)
6237 {
6238 #ifdef DEBUG_RELOAD
6239 /* This checking takes a very long time on some platforms
6240 causing the gcc.c-torture/compile/limits-fnargs.c test
6241 to time out during testing. See PR 31850.
6242
6243 Internal consistency test. Check that we don't modify
6244 anything in the equivalence arrays. Whenever something from
6245 those arrays needs to be reloaded, it must be unshared before
6246 being substituted into; the equivalence must not be modified.
6247 Otherwise, if the equivalence is used after that, it will
6248 have been modified, and the thing substituted (probably a
6249 register) is likely overwritten and not a usable equivalence. */
6250 int check_regno;
6251
6252 for (check_regno = 0; check_regno < max_regno; check_regno++)
6253 {
6254 #define CHECK_MODF(ARRAY) \
6255 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6256 || !loc_mentioned_in_p (r->where, \
6257 (*reg_equivs)[check_regno].ARRAY))
6258
6259 CHECK_MODF (constant);
6260 CHECK_MODF (memory_loc);
6261 CHECK_MODF (address);
6262 CHECK_MODF (mem);
6263 #undef CHECK_MODF
6264 }
6265 #endif /* DEBUG_RELOAD */
6266
6267 /* If we're replacing a LABEL_REF with a register, there must
6268 already be an indication (to e.g. flow) which label this
6269 register refers to. */
6270 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6271 || !JUMP_P (insn)
6272 || find_reg_note (insn,
6273 REG_LABEL_OPERAND,
6274 XEXP (*r->where, 0))
6275 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6276
6277 /* Encapsulate RELOADREG so its machine mode matches what
6278 used to be there. Note that gen_lowpart_common will
6279 do the wrong thing if RELOADREG is multi-word. RELOADREG
6280 will always be a REG here. */
6281 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6282 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6283
6284 *r->where = reloadreg;
6285 }
6286 /* If reload got no reg and isn't optional, something's wrong. */
6287 else
6288 gcc_assert (rld[r->what].optional);
6289 }
6290 }
6291 \f
6292 /* Make a copy of any replacements being done into X and move those
6293 copies to locations in Y, a copy of X. */
6294
6295 void
6296 copy_replacements (rtx x, rtx y)
6297 {
6298 copy_replacements_1 (&x, &y, n_replacements);
6299 }
6300
6301 static void
6302 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6303 {
6304 int i, j;
6305 rtx x, y;
6306 struct replacement *r;
6307 enum rtx_code code;
6308 const char *fmt;
6309
6310 for (j = 0; j < orig_replacements; j++)
6311 if (replacements[j].where == px)
6312 {
6313 r = &replacements[n_replacements++];
6314 r->where = py;
6315 r->what = replacements[j].what;
6316 r->mode = replacements[j].mode;
6317 }
6318
6319 x = *px;
6320 y = *py;
6321 code = GET_CODE (x);
6322 fmt = GET_RTX_FORMAT (code);
6323
6324 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6325 {
6326 if (fmt[i] == 'e')
6327 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6328 else if (fmt[i] == 'E')
6329 for (j = XVECLEN (x, i); --j >= 0; )
6330 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6331 orig_replacements);
6332 }
6333 }
6334
6335 /* Change any replacements being done to *X to be done to *Y. */
6336
6337 void
6338 move_replacements (rtx *x, rtx *y)
6339 {
6340 int i;
6341
6342 for (i = 0; i < n_replacements; i++)
6343 if (replacements[i].where == x)
6344 replacements[i].where = y;
6345 }
6346 \f
6347 /* If LOC was scheduled to be replaced by something, return the replacement.
6348 Otherwise, return *LOC. */
6349
6350 rtx
6351 find_replacement (rtx *loc)
6352 {
6353 struct replacement *r;
6354
6355 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6356 {
6357 rtx reloadreg = rld[r->what].reg_rtx;
6358
6359 if (reloadreg && r->where == loc)
6360 {
6361 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6362 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6363
6364 return reloadreg;
6365 }
6366 else if (reloadreg && GET_CODE (*loc) == SUBREG
6367 && r->where == &SUBREG_REG (*loc))
6368 {
6369 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6370 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6371
6372 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6373 GET_MODE (SUBREG_REG (*loc)),
6374 SUBREG_BYTE (*loc));
6375 }
6376 }
6377
6378 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6379 what's inside and make a new rtl if so. */
6380 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6381 || GET_CODE (*loc) == MULT)
6382 {
6383 rtx x = find_replacement (&XEXP (*loc, 0));
6384 rtx y = find_replacement (&XEXP (*loc, 1));
6385
6386 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6387 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6388 }
6389
6390 return *loc;
6391 }
6392 \f
6393 /* Return nonzero if register in range [REGNO, ENDREGNO)
6394 appears either explicitly or implicitly in X
6395 other than being stored into (except for earlyclobber operands).
6396
6397 References contained within the substructure at LOC do not count.
6398 LOC may be zero, meaning don't ignore anything.
6399
6400 This is similar to refers_to_regno_p in rtlanal.c except that we
6401 look at equivalences for pseudos that didn't get hard registers. */
6402
6403 static int
6404 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6405 rtx x, rtx *loc)
6406 {
6407 int i;
6408 unsigned int r;
6409 RTX_CODE code;
6410 const char *fmt;
6411
6412 if (x == 0)
6413 return 0;
6414
6415 repeat:
6416 code = GET_CODE (x);
6417
6418 switch (code)
6419 {
6420 case REG:
6421 r = REGNO (x);
6422
6423 /* If this is a pseudo, a hard register must not have been allocated.
6424 X must therefore either be a constant or be in memory. */
6425 if (r >= FIRST_PSEUDO_REGISTER)
6426 {
6427 if (reg_equiv_memory_loc (r))
6428 return refers_to_regno_for_reload_p (regno, endregno,
6429 reg_equiv_memory_loc (r),
6430 (rtx*) 0);
6431
6432 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6433 return 0;
6434 }
6435
6436 return endregno > r && regno < END_REGNO (x);
6437
6438 case SUBREG:
6439 /* If this is a SUBREG of a hard reg, we can see exactly which
6440 registers are being modified. Otherwise, handle normally. */
6441 if (REG_P (SUBREG_REG (x))
6442 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6443 {
6444 unsigned int inner_regno = subreg_regno (x);
6445 unsigned int inner_endregno
6446 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6447 ? subreg_nregs (x) : 1);
6448
6449 return endregno > inner_regno && regno < inner_endregno;
6450 }
6451 break;
6452
6453 case CLOBBER:
6454 case SET:
6455 if (&SET_DEST (x) != loc
6456 /* Note setting a SUBREG counts as referring to the REG it is in for
6457 a pseudo but not for hard registers since we can
6458 treat each word individually. */
6459 && ((GET_CODE (SET_DEST (x)) == SUBREG
6460 && loc != &SUBREG_REG (SET_DEST (x))
6461 && REG_P (SUBREG_REG (SET_DEST (x)))
6462 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6463 && refers_to_regno_for_reload_p (regno, endregno,
6464 SUBREG_REG (SET_DEST (x)),
6465 loc))
6466 /* If the output is an earlyclobber operand, this is
6467 a conflict. */
6468 || ((!REG_P (SET_DEST (x))
6469 || earlyclobber_operand_p (SET_DEST (x)))
6470 && refers_to_regno_for_reload_p (regno, endregno,
6471 SET_DEST (x), loc))))
6472 return 1;
6473
6474 if (code == CLOBBER || loc == &SET_SRC (x))
6475 return 0;
6476 x = SET_SRC (x);
6477 goto repeat;
6478
6479 default:
6480 break;
6481 }
6482
6483 /* X does not match, so try its subexpressions. */
6484
6485 fmt = GET_RTX_FORMAT (code);
6486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6487 {
6488 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6489 {
6490 if (i == 0)
6491 {
6492 x = XEXP (x, 0);
6493 goto repeat;
6494 }
6495 else
6496 if (refers_to_regno_for_reload_p (regno, endregno,
6497 XEXP (x, i), loc))
6498 return 1;
6499 }
6500 else if (fmt[i] == 'E')
6501 {
6502 int j;
6503 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6504 if (loc != &XVECEXP (x, i, j)
6505 && refers_to_regno_for_reload_p (regno, endregno,
6506 XVECEXP (x, i, j), loc))
6507 return 1;
6508 }
6509 }
6510 return 0;
6511 }
6512
6513 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6514 we check if any register number in X conflicts with the relevant register
6515 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6516 contains a MEM (we don't bother checking for memory addresses that can't
6517 conflict because we expect this to be a rare case.
6518
6519 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6520 that we look at equivalences for pseudos that didn't get hard registers. */
6521
6522 int
6523 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6524 {
6525 int regno, endregno;
6526
6527 /* Overly conservative. */
6528 if (GET_CODE (x) == STRICT_LOW_PART
6529 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6530 x = XEXP (x, 0);
6531
6532 /* If either argument is a constant, then modifying X cannot affect IN. */
6533 if (CONSTANT_P (x) || CONSTANT_P (in))
6534 return 0;
6535 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6536 return refers_to_mem_for_reload_p (in);
6537 else if (GET_CODE (x) == SUBREG)
6538 {
6539 regno = REGNO (SUBREG_REG (x));
6540 if (regno < FIRST_PSEUDO_REGISTER)
6541 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6542 GET_MODE (SUBREG_REG (x)),
6543 SUBREG_BYTE (x),
6544 GET_MODE (x));
6545 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6546 ? subreg_nregs (x) : 1);
6547
6548 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6549 }
6550 else if (REG_P (x))
6551 {
6552 regno = REGNO (x);
6553
6554 /* If this is a pseudo, it must not have been assigned a hard register.
6555 Therefore, it must either be in memory or be a constant. */
6556
6557 if (regno >= FIRST_PSEUDO_REGISTER)
6558 {
6559 if (reg_equiv_memory_loc (regno))
6560 return refers_to_mem_for_reload_p (in);
6561 gcc_assert (reg_equiv_constant (regno));
6562 return 0;
6563 }
6564
6565 endregno = END_REGNO (x);
6566
6567 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6568 }
6569 else if (MEM_P (x))
6570 return refers_to_mem_for_reload_p (in);
6571 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6572 || GET_CODE (x) == CC0)
6573 return reg_mentioned_p (x, in);
6574 else
6575 {
6576 gcc_assert (GET_CODE (x) == PLUS);
6577
6578 /* We actually want to know if X is mentioned somewhere inside IN.
6579 We must not say that (plus (sp) (const_int 124)) is in
6580 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6581 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6582 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6583 while (MEM_P (in))
6584 in = XEXP (in, 0);
6585 if (REG_P (in))
6586 return 0;
6587 else if (GET_CODE (in) == PLUS)
6588 return (rtx_equal_p (x, in)
6589 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6590 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6591 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6592 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6593 }
6594
6595 gcc_unreachable ();
6596 }
6597
6598 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6599 registers. */
6600
6601 static int
6602 refers_to_mem_for_reload_p (rtx x)
6603 {
6604 const char *fmt;
6605 int i;
6606
6607 if (MEM_P (x))
6608 return 1;
6609
6610 if (REG_P (x))
6611 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6612 && reg_equiv_memory_loc (REGNO (x)));
6613
6614 fmt = GET_RTX_FORMAT (GET_CODE (x));
6615 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6616 if (fmt[i] == 'e'
6617 && (MEM_P (XEXP (x, i))
6618 || refers_to_mem_for_reload_p (XEXP (x, i))))
6619 return 1;
6620
6621 return 0;
6622 }
6623 \f
6624 /* Check the insns before INSN to see if there is a suitable register
6625 containing the same value as GOAL.
6626 If OTHER is -1, look for a register in class RCLASS.
6627 Otherwise, just see if register number OTHER shares GOAL's value.
6628
6629 Return an rtx for the register found, or zero if none is found.
6630
6631 If RELOAD_REG_P is (short *)1,
6632 we reject any hard reg that appears in reload_reg_rtx
6633 because such a hard reg is also needed coming into this insn.
6634
6635 If RELOAD_REG_P is any other nonzero value,
6636 it is a vector indexed by hard reg number
6637 and we reject any hard reg whose element in the vector is nonnegative
6638 as well as any that appears in reload_reg_rtx.
6639
6640 If GOAL is zero, then GOALREG is a register number; we look
6641 for an equivalent for that register.
6642
6643 MODE is the machine mode of the value we want an equivalence for.
6644 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6645
6646 This function is used by jump.c as well as in the reload pass.
6647
6648 If GOAL is the sum of the stack pointer and a constant, we treat it
6649 as if it were a constant except that sp is required to be unchanging. */
6650
6651 rtx
6652 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6653 short *reload_reg_p, int goalreg, machine_mode mode)
6654 {
6655 rtx_insn *p = insn;
6656 rtx goaltry, valtry, value;
6657 rtx_insn *where;
6658 rtx pat;
6659 int regno = -1;
6660 int valueno;
6661 int goal_mem = 0;
6662 int goal_const = 0;
6663 int goal_mem_addr_varies = 0;
6664 int need_stable_sp = 0;
6665 int nregs;
6666 int valuenregs;
6667 int num = 0;
6668
6669 if (goal == 0)
6670 regno = goalreg;
6671 else if (REG_P (goal))
6672 regno = REGNO (goal);
6673 else if (MEM_P (goal))
6674 {
6675 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6676 if (MEM_VOLATILE_P (goal))
6677 return 0;
6678 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6679 return 0;
6680 /* An address with side effects must be reexecuted. */
6681 switch (code)
6682 {
6683 case POST_INC:
6684 case PRE_INC:
6685 case POST_DEC:
6686 case PRE_DEC:
6687 case POST_MODIFY:
6688 case PRE_MODIFY:
6689 return 0;
6690 default:
6691 break;
6692 }
6693 goal_mem = 1;
6694 }
6695 else if (CONSTANT_P (goal))
6696 goal_const = 1;
6697 else if (GET_CODE (goal) == PLUS
6698 && XEXP (goal, 0) == stack_pointer_rtx
6699 && CONSTANT_P (XEXP (goal, 1)))
6700 goal_const = need_stable_sp = 1;
6701 else if (GET_CODE (goal) == PLUS
6702 && XEXP (goal, 0) == frame_pointer_rtx
6703 && CONSTANT_P (XEXP (goal, 1)))
6704 goal_const = 1;
6705 else
6706 return 0;
6707
6708 num = 0;
6709 /* Scan insns back from INSN, looking for one that copies
6710 a value into or out of GOAL.
6711 Stop and give up if we reach a label. */
6712
6713 while (1)
6714 {
6715 p = PREV_INSN (p);
6716 if (p && DEBUG_INSN_P (p))
6717 continue;
6718 num++;
6719 if (p == 0 || LABEL_P (p)
6720 || num > param_max_reload_search_insns)
6721 return 0;
6722
6723 /* Don't reuse register contents from before a setjmp-type
6724 function call; on the second return (from the longjmp) it
6725 might have been clobbered by a later reuse. It doesn't
6726 seem worthwhile to actually go and see if it is actually
6727 reused even if that information would be readily available;
6728 just don't reuse it across the setjmp call. */
6729 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6730 return 0;
6731
6732 if (NONJUMP_INSN_P (p)
6733 /* If we don't want spill regs ... */
6734 && (! (reload_reg_p != 0
6735 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6736 /* ... then ignore insns introduced by reload; they aren't
6737 useful and can cause results in reload_as_needed to be
6738 different from what they were when calculating the need for
6739 spills. If we notice an input-reload insn here, we will
6740 reject it below, but it might hide a usable equivalent.
6741 That makes bad code. It may even fail: perhaps no reg was
6742 spilled for this insn because it was assumed we would find
6743 that equivalent. */
6744 || INSN_UID (p) < reload_first_uid))
6745 {
6746 rtx tem;
6747 pat = single_set (p);
6748
6749 /* First check for something that sets some reg equal to GOAL. */
6750 if (pat != 0
6751 && ((regno >= 0
6752 && true_regnum (SET_SRC (pat)) == regno
6753 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6754 ||
6755 (regno >= 0
6756 && true_regnum (SET_DEST (pat)) == regno
6757 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6758 ||
6759 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6760 /* When looking for stack pointer + const,
6761 make sure we don't use a stack adjust. */
6762 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6763 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6764 || (goal_mem
6765 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6766 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6767 || (goal_mem
6768 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6769 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6770 /* If we are looking for a constant,
6771 and something equivalent to that constant was copied
6772 into a reg, we can use that reg. */
6773 || (goal_const && REG_NOTES (p) != 0
6774 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6775 && ((rtx_equal_p (XEXP (tem, 0), goal)
6776 && (valueno
6777 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6778 || (REG_P (SET_DEST (pat))
6779 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6780 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6781 && CONST_INT_P (goal)
6782 && (goaltry = operand_subword (XEXP (tem, 0), 0,
6783 0, VOIDmode)) != 0
6784 && rtx_equal_p (goal, goaltry)
6785 && (valtry
6786 = operand_subword (SET_DEST (pat), 0, 0,
6787 VOIDmode))
6788 && (valueno = true_regnum (valtry)) >= 0)))
6789 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6790 NULL_RTX))
6791 && REG_P (SET_DEST (pat))
6792 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6793 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6794 && CONST_INT_P (goal)
6795 && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6796 VOIDmode)) != 0
6797 && rtx_equal_p (goal, goaltry)
6798 && (valtry
6799 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6800 && (valueno = true_regnum (valtry)) >= 0)))
6801 {
6802 if (other >= 0)
6803 {
6804 if (valueno != other)
6805 continue;
6806 }
6807 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6808 continue;
6809 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6810 mode, valueno))
6811 continue;
6812 value = valtry;
6813 where = p;
6814 break;
6815 }
6816 }
6817 }
6818
6819 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6820 (or copying VALUE into GOAL, if GOAL is also a register).
6821 Now verify that VALUE is really valid. */
6822
6823 /* VALUENO is the register number of VALUE; a hard register. */
6824
6825 /* Don't try to re-use something that is killed in this insn. We want
6826 to be able to trust REG_UNUSED notes. */
6827 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6828 return 0;
6829
6830 /* If we propose to get the value from the stack pointer or if GOAL is
6831 a MEM based on the stack pointer, we need a stable SP. */
6832 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6833 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6834 goal)))
6835 need_stable_sp = 1;
6836
6837 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6838 if (GET_MODE (value) != mode)
6839 return 0;
6840
6841 /* Reject VALUE if it was loaded from GOAL
6842 and is also a register that appears in the address of GOAL. */
6843
6844 if (goal_mem && value == SET_DEST (single_set (where))
6845 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6846 goal, (rtx*) 0))
6847 return 0;
6848
6849 /* Reject registers that overlap GOAL. */
6850
6851 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6852 nregs = hard_regno_nregs (regno, mode);
6853 else
6854 nregs = 1;
6855 valuenregs = hard_regno_nregs (valueno, mode);
6856
6857 if (!goal_mem && !goal_const
6858 && regno + nregs > valueno && regno < valueno + valuenregs)
6859 return 0;
6860
6861 /* Reject VALUE if it is one of the regs reserved for reloads.
6862 Reload1 knows how to reuse them anyway, and it would get
6863 confused if we allocated one without its knowledge.
6864 (Now that insns introduced by reload are ignored above,
6865 this case shouldn't happen, but I'm not positive.) */
6866
6867 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6868 {
6869 int i;
6870 for (i = 0; i < valuenregs; ++i)
6871 if (reload_reg_p[valueno + i] >= 0)
6872 return 0;
6873 }
6874
6875 /* Reject VALUE if it is a register being used for an input reload
6876 even if it is not one of those reserved. */
6877
6878 if (reload_reg_p != 0)
6879 {
6880 int i;
6881 for (i = 0; i < n_reloads; i++)
6882 if (rld[i].reg_rtx != 0
6883 && rld[i].in
6884 && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6885 && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6886 return 0;
6887 }
6888
6889 if (goal_mem)
6890 /* We must treat frame pointer as varying here,
6891 since it can vary--in a nonlocal goto as generated by expand_goto. */
6892 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6893
6894 /* Now verify that the values of GOAL and VALUE remain unaltered
6895 until INSN is reached. */
6896
6897 p = insn;
6898 while (1)
6899 {
6900 p = PREV_INSN (p);
6901 if (p == where)
6902 return value;
6903
6904 /* Don't trust the conversion past a function call
6905 if either of the two is in a call-clobbered register, or memory. */
6906 if (CALL_P (p))
6907 {
6908 if (goal_mem || need_stable_sp)
6909 return 0;
6910
6911 function_abi callee_abi = insn_callee_abi (p);
6912 if (regno >= 0
6913 && regno < FIRST_PSEUDO_REGISTER
6914 && callee_abi.clobbers_reg_p (mode, regno))
6915 return 0;
6916
6917 if (valueno >= 0
6918 && valueno < FIRST_PSEUDO_REGISTER
6919 && callee_abi.clobbers_reg_p (mode, valueno))
6920 return 0;
6921 }
6922
6923 if (INSN_P (p))
6924 {
6925 pat = PATTERN (p);
6926
6927 /* Watch out for unspec_volatile, and volatile asms. */
6928 if (volatile_insn_p (pat))
6929 return 0;
6930
6931 /* If this insn P stores in either GOAL or VALUE, return 0.
6932 If GOAL is a memory ref and this insn writes memory, return 0.
6933 If GOAL is a memory ref and its address is not constant,
6934 and this insn P changes a register used in GOAL, return 0. */
6935
6936 if (GET_CODE (pat) == COND_EXEC)
6937 pat = COND_EXEC_CODE (pat);
6938 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6939 {
6940 rtx dest = SET_DEST (pat);
6941 while (GET_CODE (dest) == SUBREG
6942 || GET_CODE (dest) == ZERO_EXTRACT
6943 || GET_CODE (dest) == STRICT_LOW_PART)
6944 dest = XEXP (dest, 0);
6945 if (REG_P (dest))
6946 {
6947 int xregno = REGNO (dest);
6948 int end_xregno = END_REGNO (dest);
6949 if (xregno < regno + nregs && end_xregno > regno)
6950 return 0;
6951 if (xregno < valueno + valuenregs
6952 && end_xregno > valueno)
6953 return 0;
6954 if (goal_mem_addr_varies
6955 && reg_overlap_mentioned_for_reload_p (dest, goal))
6956 return 0;
6957 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6958 return 0;
6959 }
6960 else if (goal_mem && MEM_P (dest)
6961 && ! push_operand (dest, GET_MODE (dest)))
6962 return 0;
6963 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6964 && reg_equiv_memory_loc (regno) != 0)
6965 return 0;
6966 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6967 return 0;
6968 }
6969 else if (GET_CODE (pat) == PARALLEL)
6970 {
6971 int i;
6972 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6973 {
6974 rtx v1 = XVECEXP (pat, 0, i);
6975 if (GET_CODE (v1) == COND_EXEC)
6976 v1 = COND_EXEC_CODE (v1);
6977 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6978 {
6979 rtx dest = SET_DEST (v1);
6980 while (GET_CODE (dest) == SUBREG
6981 || GET_CODE (dest) == ZERO_EXTRACT
6982 || GET_CODE (dest) == STRICT_LOW_PART)
6983 dest = XEXP (dest, 0);
6984 if (REG_P (dest))
6985 {
6986 int xregno = REGNO (dest);
6987 int end_xregno = END_REGNO (dest);
6988 if (xregno < regno + nregs
6989 && end_xregno > regno)
6990 return 0;
6991 if (xregno < valueno + valuenregs
6992 && end_xregno > valueno)
6993 return 0;
6994 if (goal_mem_addr_varies
6995 && reg_overlap_mentioned_for_reload_p (dest,
6996 goal))
6997 return 0;
6998 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6999 return 0;
7000 }
7001 else if (goal_mem && MEM_P (dest)
7002 && ! push_operand (dest, GET_MODE (dest)))
7003 return 0;
7004 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7005 && reg_equiv_memory_loc (regno) != 0)
7006 return 0;
7007 else if (need_stable_sp
7008 && push_operand (dest, GET_MODE (dest)))
7009 return 0;
7010 }
7011 }
7012 }
7013
7014 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7015 {
7016 rtx link;
7017
7018 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7019 link = XEXP (link, 1))
7020 {
7021 pat = XEXP (link, 0);
7022 if (GET_CODE (pat) == CLOBBER)
7023 {
7024 rtx dest = SET_DEST (pat);
7025
7026 if (REG_P (dest))
7027 {
7028 int xregno = REGNO (dest);
7029 int end_xregno = END_REGNO (dest);
7030
7031 if (xregno < regno + nregs
7032 && end_xregno > regno)
7033 return 0;
7034 else if (xregno < valueno + valuenregs
7035 && end_xregno > valueno)
7036 return 0;
7037 else if (goal_mem_addr_varies
7038 && reg_overlap_mentioned_for_reload_p (dest,
7039 goal))
7040 return 0;
7041 }
7042
7043 else if (goal_mem && MEM_P (dest)
7044 && ! push_operand (dest, GET_MODE (dest)))
7045 return 0;
7046 else if (need_stable_sp
7047 && push_operand (dest, GET_MODE (dest)))
7048 return 0;
7049 }
7050 }
7051 }
7052
7053 #if AUTO_INC_DEC
7054 /* If this insn auto-increments or auto-decrements
7055 either regno or valueno, return 0 now.
7056 If GOAL is a memory ref and its address is not constant,
7057 and this insn P increments a register used in GOAL, return 0. */
7058 {
7059 rtx link;
7060
7061 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7062 if (REG_NOTE_KIND (link) == REG_INC
7063 && REG_P (XEXP (link, 0)))
7064 {
7065 int incno = REGNO (XEXP (link, 0));
7066 if (incno < regno + nregs && incno >= regno)
7067 return 0;
7068 if (incno < valueno + valuenregs && incno >= valueno)
7069 return 0;
7070 if (goal_mem_addr_varies
7071 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7072 goal))
7073 return 0;
7074 }
7075 }
7076 #endif
7077 }
7078 }
7079 }
7080 \f
7081 /* Find a place where INCED appears in an increment or decrement operator
7082 within X, and return the amount INCED is incremented or decremented by.
7083 The value is always positive. */
7084
7085 static poly_int64
7086 find_inc_amount (rtx x, rtx inced)
7087 {
7088 enum rtx_code code = GET_CODE (x);
7089 const char *fmt;
7090 int i;
7091
7092 if (code == MEM)
7093 {
7094 rtx addr = XEXP (x, 0);
7095 if ((GET_CODE (addr) == PRE_DEC
7096 || GET_CODE (addr) == POST_DEC
7097 || GET_CODE (addr) == PRE_INC
7098 || GET_CODE (addr) == POST_INC)
7099 && XEXP (addr, 0) == inced)
7100 return GET_MODE_SIZE (GET_MODE (x));
7101 else if ((GET_CODE (addr) == PRE_MODIFY
7102 || GET_CODE (addr) == POST_MODIFY)
7103 && GET_CODE (XEXP (addr, 1)) == PLUS
7104 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7105 && XEXP (addr, 0) == inced
7106 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7107 {
7108 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7109 return i < 0 ? -i : i;
7110 }
7111 }
7112
7113 fmt = GET_RTX_FORMAT (code);
7114 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7115 {
7116 if (fmt[i] == 'e')
7117 {
7118 poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7119 if (maybe_ne (tem, 0))
7120 return tem;
7121 }
7122 if (fmt[i] == 'E')
7123 {
7124 int j;
7125 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7126 {
7127 poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7128 if (maybe_ne (tem, 0))
7129 return tem;
7130 }
7131 }
7132 }
7133
7134 return 0;
7135 }
7136 \f
7137 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7138 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7139
7140 static int
7141 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7142 rtx insn)
7143 {
7144 rtx link;
7145
7146 if (!AUTO_INC_DEC)
7147 return 0;
7148
7149 gcc_assert (insn);
7150
7151 if (! INSN_P (insn))
7152 return 0;
7153
7154 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7155 if (REG_NOTE_KIND (link) == REG_INC)
7156 {
7157 unsigned int test = (int) REGNO (XEXP (link, 0));
7158 if (test >= regno && test < endregno)
7159 return 1;
7160 }
7161 return 0;
7162 }
7163
7164 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7165 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7166 REG_INC. REGNO must refer to a hard register. */
7167
7168 int
7169 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7170 int sets)
7171 {
7172 /* regno must be a hard register. */
7173 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7174
7175 unsigned int endregno = end_hard_regno (mode, regno);
7176
7177 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7178 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7179 && REG_P (XEXP (PATTERN (insn), 0)))
7180 {
7181 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7182
7183 return test >= regno && test < endregno;
7184 }
7185
7186 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7187 return 1;
7188
7189 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7190 {
7191 int i = XVECLEN (PATTERN (insn), 0) - 1;
7192
7193 for (; i >= 0; i--)
7194 {
7195 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7196 if ((GET_CODE (elt) == CLOBBER
7197 || (sets == 1 && GET_CODE (elt) == SET))
7198 && REG_P (XEXP (elt, 0)))
7199 {
7200 unsigned int test = REGNO (XEXP (elt, 0));
7201
7202 if (test >= regno && test < endregno)
7203 return 1;
7204 }
7205 if (sets == 2
7206 && reg_inc_found_and_valid_p (regno, endregno, elt))
7207 return 1;
7208 }
7209 }
7210
7211 return 0;
7212 }
7213
7214 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7215 rtx
7216 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7217 {
7218 int regno;
7219
7220 if (GET_MODE (reloadreg) == mode)
7221 return reloadreg;
7222
7223 regno = REGNO (reloadreg);
7224
7225 if (REG_WORDS_BIG_ENDIAN)
7226 regno += ((int) REG_NREGS (reloadreg)
7227 - (int) hard_regno_nregs (regno, mode));
7228
7229 return gen_rtx_REG (mode, regno);
7230 }
7231
7232 static const char *const reload_when_needed_name[] =
7233 {
7234 "RELOAD_FOR_INPUT",
7235 "RELOAD_FOR_OUTPUT",
7236 "RELOAD_FOR_INSN",
7237 "RELOAD_FOR_INPUT_ADDRESS",
7238 "RELOAD_FOR_INPADDR_ADDRESS",
7239 "RELOAD_FOR_OUTPUT_ADDRESS",
7240 "RELOAD_FOR_OUTADDR_ADDRESS",
7241 "RELOAD_FOR_OPERAND_ADDRESS",
7242 "RELOAD_FOR_OPADDR_ADDR",
7243 "RELOAD_OTHER",
7244 "RELOAD_FOR_OTHER_ADDRESS"
7245 };
7246
7247 /* These functions are used to print the variables set by 'find_reloads' */
7248
7249 DEBUG_FUNCTION void
7250 debug_reload_to_stream (FILE *f)
7251 {
7252 int r;
7253 const char *prefix;
7254
7255 if (! f)
7256 f = stderr;
7257 for (r = 0; r < n_reloads; r++)
7258 {
7259 fprintf (f, "Reload %d: ", r);
7260
7261 if (rld[r].in != 0)
7262 {
7263 fprintf (f, "reload_in (%s) = ",
7264 GET_MODE_NAME (rld[r].inmode));
7265 print_inline_rtx (f, rld[r].in, 24);
7266 fprintf (f, "\n\t");
7267 }
7268
7269 if (rld[r].out != 0)
7270 {
7271 fprintf (f, "reload_out (%s) = ",
7272 GET_MODE_NAME (rld[r].outmode));
7273 print_inline_rtx (f, rld[r].out, 24);
7274 fprintf (f, "\n\t");
7275 }
7276
7277 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7278
7279 fprintf (f, "%s (opnum = %d)",
7280 reload_when_needed_name[(int) rld[r].when_needed],
7281 rld[r].opnum);
7282
7283 if (rld[r].optional)
7284 fprintf (f, ", optional");
7285
7286 if (rld[r].nongroup)
7287 fprintf (f, ", nongroup");
7288
7289 if (maybe_ne (rld[r].inc, 0))
7290 {
7291 fprintf (f, ", inc by ");
7292 print_dec (rld[r].inc, f, SIGNED);
7293 }
7294
7295 if (rld[r].nocombine)
7296 fprintf (f, ", can't combine");
7297
7298 if (rld[r].secondary_p)
7299 fprintf (f, ", secondary_reload_p");
7300
7301 if (rld[r].in_reg != 0)
7302 {
7303 fprintf (f, "\n\treload_in_reg: ");
7304 print_inline_rtx (f, rld[r].in_reg, 24);
7305 }
7306
7307 if (rld[r].out_reg != 0)
7308 {
7309 fprintf (f, "\n\treload_out_reg: ");
7310 print_inline_rtx (f, rld[r].out_reg, 24);
7311 }
7312
7313 if (rld[r].reg_rtx != 0)
7314 {
7315 fprintf (f, "\n\treload_reg_rtx: ");
7316 print_inline_rtx (f, rld[r].reg_rtx, 24);
7317 }
7318
7319 prefix = "\n\t";
7320 if (rld[r].secondary_in_reload != -1)
7321 {
7322 fprintf (f, "%ssecondary_in_reload = %d",
7323 prefix, rld[r].secondary_in_reload);
7324 prefix = ", ";
7325 }
7326
7327 if (rld[r].secondary_out_reload != -1)
7328 fprintf (f, "%ssecondary_out_reload = %d\n",
7329 prefix, rld[r].secondary_out_reload);
7330
7331 prefix = "\n\t";
7332 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7333 {
7334 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7335 insn_data[rld[r].secondary_in_icode].name);
7336 prefix = ", ";
7337 }
7338
7339 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7340 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7341 insn_data[rld[r].secondary_out_icode].name);
7342
7343 fprintf (f, "\n");
7344 }
7345 }
7346
7347 DEBUG_FUNCTION void
7348 debug_reload (void)
7349 {
7350 debug_reload_to_stream (stderr);
7351 }