]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload.c
Update copyright years.
[thirdparty/gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
109
110 /* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112 #define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
117
118 /* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
120
121 static inline bool
122 small_register_class_p (reg_class_t rclass)
123 {
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
127 }
128
129 \f
130 /* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
134
135 /* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
139
140 int reload_n_operands;
141
142 /* Replacing reloads.
143
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
148
149 /* Nonzero means record the places to replace. */
150 static int replace_reloads;
151
152 /* Each replacement is recorded with a structure like this. */
153 struct replacement
154 {
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
158 };
159
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
161
162 /* Number of replacements currently recorded. */
163 static int n_replacements;
164
165 /* Used to track what is modified by an operand. */
166 struct decomposition
167 {
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 poly_int64_pod start; /* Starting offset or register number. */
172 poly_int64_pod end; /* Ending offset or register number. */
173 };
174
175 /* Save MEMs needed to copy from one class of registers to another. One MEM
176 is used per mode, but normally only one or two modes are ever used.
177
178 We keep two versions, before and after register elimination. The one
179 after register elimination is record separately for each operand. This
180 is done in case the address is not valid to be sure that we separately
181 reload each. */
182
183 static rtx secondary_memlocs[NUM_MACHINE_MODES];
184 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185 static int secondary_memlocs_elim_used = 0;
186
187 /* The instruction we are doing reloads for;
188 so we can test whether a register dies in it. */
189 static rtx_insn *this_insn;
190
191 /* Nonzero if this instruction is a user-specified asm with operands. */
192 static int this_insn_is_asm;
193
194 /* If hard_regs_live_known is nonzero,
195 we can tell which hard regs are currently live,
196 at least enough to succeed in choosing dummy reloads. */
197 static int hard_regs_live_known;
198
199 /* Indexed by hard reg number,
200 element is nonnegative if hard reg has been spilled.
201 This vector is passed to `find_reloads' as an argument
202 and is not changed here. */
203 static short *static_reload_reg_p;
204
205 /* Set to 1 in subst_reg_equivs if it changes anything. */
206 static int subst_reg_equivs_changed;
207
208 /* On return from push_reload, holds the reload-number for the OUT
209 operand, which can be different for that from the input operand. */
210 static int output_reloadnum;
211
212 /* Compare two RTX's. */
213 #define MATCHES(x, y) \
214 (x == y || (x != 0 && (REG_P (x) \
215 ? REG_P (y) && REGNO (x) == REGNO (y) \
216 : rtx_equal_p (x, y) && ! side_effects_p (x))))
217
218 /* Indicates if two reloads purposes are for similar enough things that we
219 can merge their reloads. */
220 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
221 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
222 || ((when1) == (when2) && (op1) == (op2)) \
223 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
225 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
226 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
227 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
228
229 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
230 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
231 ((when1) != (when2) \
232 || ! ((op1) == (op2) \
233 || (when1) == RELOAD_FOR_INPUT \
234 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
236
237 /* If we are going to reload an address, compute the reload type to
238 use. */
239 #define ADDR_TYPE(type) \
240 ((type) == RELOAD_FOR_INPUT_ADDRESS \
241 ? RELOAD_FOR_INPADDR_ADDRESS \
242 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
243 ? RELOAD_FOR_OUTADDR_ADDRESS \
244 : (type)))
245
246 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 machine_mode, enum reload_type,
248 enum insn_code *, secondary_reload_info *);
249 static enum reg_class find_valid_class (machine_mode, machine_mode,
250 int, unsigned int);
251 static void push_replacement (rtx *, int, machine_mode);
252 static void dup_replacements (rtx *, rtx *);
253 static void combine_reloads (void);
254 static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 enum reload_type, int, int);
256 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 machine_mode, reg_class_t, int, int);
258 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259 static struct decomposition decompose (rtx);
260 static int immune_p (rtx, rtx, struct decomposition);
261 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 rtx_insn *, int *);
264 static rtx make_memloc (rtx, int);
265 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
266 addr_space_t, rtx *);
267 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 int, enum reload_type, int, rtx_insn *);
269 static rtx subst_reg_equivs (rtx, rtx_insn *);
270 static rtx subst_indexed_address (rtx);
271 static void update_auto_inc_notes (rtx_insn *, int, int);
272 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 enum rtx_code, enum rtx_code, rtx *,
274 int, enum reload_type,int, rtx_insn *);
275 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 machine_mode, int,
277 enum reload_type, int);
278 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 int, rtx_insn *, int *);
280 static void copy_replacements_1 (rtx *, rtx *, int);
281 static poly_int64 find_inc_amount (rtx, rtx);
282 static int refers_to_mem_for_reload_p (rtx);
283 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 rtx, rtx *);
285
286 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287 list yet. */
288
289 static void
290 push_reg_equiv_alt_mem (int regno, rtx mem)
291 {
292 rtx it;
293
294 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295 if (rtx_equal_p (XEXP (it, 0), mem))
296 return;
297
298 reg_equiv_alt_mem_list (regno)
299 = alloc_EXPR_LIST (REG_EQUIV, mem,
300 reg_equiv_alt_mem_list (regno));
301 }
302 \f
303 /* Determine if any secondary reloads are needed for loading (if IN_P is
304 nonzero) or storing (if IN_P is zero) X to or from a reload register of
305 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
306 are needed, push them.
307
308 Return the reload number of the secondary reload we made, or -1 if
309 we didn't need one. *PICODE is set to the insn_code to use if we do
310 need a secondary reload. */
311
312 static int
313 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 enum reg_class reload_class,
315 machine_mode reload_mode, enum reload_type type,
316 enum insn_code *picode, secondary_reload_info *prev_sri)
317 {
318 enum reg_class rclass = NO_REGS;
319 enum reg_class scratch_class;
320 machine_mode mode = reload_mode;
321 enum insn_code icode = CODE_FOR_nothing;
322 enum insn_code t_icode = CODE_FOR_nothing;
323 enum reload_type secondary_type;
324 int s_reload, t_reload = -1;
325 const char *scratch_constraint;
326 secondary_reload_info sri;
327
328 if (type == RELOAD_FOR_INPUT_ADDRESS
329 || type == RELOAD_FOR_OUTPUT_ADDRESS
330 || type == RELOAD_FOR_INPADDR_ADDRESS
331 || type == RELOAD_FOR_OUTADDR_ADDRESS)
332 secondary_type = type;
333 else
334 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
335
336 *picode = CODE_FOR_nothing;
337
338 /* If X is a paradoxical SUBREG, use the inner value to determine both the
339 mode and object being reloaded. */
340 if (paradoxical_subreg_p (x))
341 {
342 x = SUBREG_REG (x);
343 reload_mode = GET_MODE (x);
344 }
345
346 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347 is still a pseudo-register by now, it *must* have an equivalent MEM
348 but we don't want to assume that), use that equivalent when seeing if
349 a secondary reload is needed since whether or not a reload is needed
350 might be sensitive to the form of the MEM. */
351
352 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353 && reg_equiv_mem (REGNO (x)))
354 x = reg_equiv_mem (REGNO (x));
355
356 sri.icode = CODE_FOR_nothing;
357 sri.prev_sri = prev_sri;
358 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 reload_mode, &sri);
360 icode = (enum insn_code) sri.icode;
361
362 /* If we don't need any secondary registers, done. */
363 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364 return -1;
365
366 if (rclass != NO_REGS)
367 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 reload_mode, type, &t_icode, &sri);
369
370 /* If we will be using an insn, the secondary reload is for a
371 scratch register. */
372
373 if (icode != CODE_FOR_nothing)
374 {
375 /* If IN_P is nonzero, the reload register will be the output in
376 operand 0. If IN_P is zero, the reload register will be the input
377 in operand 1. Outputs should have an initial "=", which we must
378 skip. */
379
380 /* ??? It would be useful to be able to handle only two, or more than
381 three, operands, but for now we can only handle the case of having
382 exactly three: output, input and one temp/scratch. */
383 gcc_assert (insn_data[(int) icode].n_operands == 3);
384
385 /* ??? We currently have no way to represent a reload that needs
386 an icode to reload from an intermediate tertiary reload register.
387 We should probably have a new field in struct reload to tag a
388 chain of scratch operand reloads onto. */
389 gcc_assert (rclass == NO_REGS);
390
391 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392 gcc_assert (*scratch_constraint == '=');
393 scratch_constraint++;
394 if (*scratch_constraint == '&')
395 scratch_constraint++;
396 scratch_class = (reg_class_for_constraint
397 (lookup_constraint (scratch_constraint)));
398
399 rclass = scratch_class;
400 mode = insn_data[(int) icode].operand[2].mode;
401 }
402
403 /* This case isn't valid, so fail. Reload is allowed to use the same
404 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405 in the case of a secondary register, we actually need two different
406 registers for correct code. We fail here to prevent the possibility of
407 silently generating incorrect code later.
408
409 The convention is that secondary input reloads are valid only if the
410 secondary_class is different from class. If you have such a case, you
411 can not use secondary reloads, you must work around the problem some
412 other way.
413
414 Allow this when a reload_in/out pattern is being used. I.e. assume
415 that the generated code handles this case. */
416
417 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 || t_icode != CODE_FOR_nothing);
419
420 /* See if we can reuse an existing secondary reload. */
421 for (s_reload = 0; s_reload < n_reloads; s_reload++)
422 if (rld[s_reload].secondary_p
423 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 && ((in_p && rld[s_reload].inmode == mode)
426 || (! in_p && rld[s_reload].outmode == mode))
427 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 && (small_register_class_p (rclass)
432 || targetm.small_register_classes_for_mode_p (VOIDmode))
433 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 opnum, rld[s_reload].opnum))
435 {
436 if (in_p)
437 rld[s_reload].inmode = mode;
438 if (! in_p)
439 rld[s_reload].outmode = mode;
440
441 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 rld[s_reload].rclass = rclass;
443
444 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 rld[s_reload].optional &= optional;
446 rld[s_reload].secondary_p = 1;
447 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 rld[s_reload].when_needed = RELOAD_OTHER;
450
451 break;
452 }
453
454 if (s_reload == n_reloads)
455 {
456 /* If we need a memory location to copy between the two reload regs,
457 set it up now. Note that we do the input case before making
458 the reload and the output case after. This is due to the
459 way reloads are output. */
460
461 if (in_p && icode == CODE_FOR_nothing
462 && targetm.secondary_memory_needed (mode, rclass, reload_class))
463 {
464 get_secondary_mem (x, reload_mode, opnum, type);
465
466 /* We may have just added new reloads. Make sure we add
467 the new reload at the end. */
468 s_reload = n_reloads;
469 }
470
471 /* We need to make a new secondary reload for this register class. */
472 rld[s_reload].in = rld[s_reload].out = 0;
473 rld[s_reload].rclass = rclass;
474
475 rld[s_reload].inmode = in_p ? mode : VOIDmode;
476 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477 rld[s_reload].reg_rtx = 0;
478 rld[s_reload].optional = optional;
479 rld[s_reload].inc = 0;
480 /* Maybe we could combine these, but it seems too tricky. */
481 rld[s_reload].nocombine = 1;
482 rld[s_reload].in_reg = 0;
483 rld[s_reload].out_reg = 0;
484 rld[s_reload].opnum = opnum;
485 rld[s_reload].when_needed = secondary_type;
486 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489 rld[s_reload].secondary_out_icode
490 = ! in_p ? t_icode : CODE_FOR_nothing;
491 rld[s_reload].secondary_p = 1;
492
493 n_reloads++;
494
495 if (! in_p && icode == CODE_FOR_nothing
496 && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 get_secondary_mem (x, mode, opnum, type);
498 }
499
500 *picode = icode;
501 return s_reload;
502 }
503
504 /* If a secondary reload is needed, return its class. If both an intermediate
505 register and a scratch register is needed, we return the class of the
506 intermediate register. */
507 reg_class_t
508 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 rtx x)
510 {
511 enum insn_code icode;
512 secondary_reload_info sri;
513
514 sri.icode = CODE_FOR_nothing;
515 sri.prev_sri = NULL;
516 rclass
517 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518 icode = (enum insn_code) sri.icode;
519
520 /* If there are no secondary reloads at all, we return NO_REGS.
521 If an intermediate register is needed, we return its class. */
522 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523 return rclass;
524
525 /* No intermediate register is needed, but we have a special reload
526 pattern, which we assume for now needs a scratch register. */
527 return scratch_reload_class (icode);
528 }
529
530 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
531 three operands, verify that operand 2 is an output operand, and return
532 its register class.
533 ??? We'd like to be able to handle any pattern with at least 2 operands,
534 for zero or more scratch registers, but that needs more infrastructure. */
535 enum reg_class
536 scratch_reload_class (enum insn_code icode)
537 {
538 const char *scratch_constraint;
539 enum reg_class rclass;
540
541 gcc_assert (insn_data[(int) icode].n_operands == 3);
542 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543 gcc_assert (*scratch_constraint == '=');
544 scratch_constraint++;
545 if (*scratch_constraint == '&')
546 scratch_constraint++;
547 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548 gcc_assert (rclass != NO_REGS);
549 return rclass;
550 }
551 \f
552 /* Return a memory location that will be used to copy X in mode MODE.
553 If we haven't already made a location for this mode in this insn,
554 call find_reloads_address on the location being returned. */
555
556 rtx
557 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 int opnum, enum reload_type type)
559 {
560 rtx loc;
561 int mem_valid;
562
563 /* By default, if MODE is narrower than a word, widen it to a word.
564 This is required because most machines that require these memory
565 locations do not support short load and stores from all registers
566 (e.g., FP registers). */
567
568 mode = targetm.secondary_memory_needed_mode (mode);
569
570 /* If we already have made a MEM for this operand in MODE, return it. */
571 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572 return secondary_memlocs_elim[(int) mode][opnum];
573
574 /* If this is the first time we've tried to get a MEM for this mode,
575 allocate a new one. `something_changed' in reload will get set
576 by noticing that the frame size has changed. */
577
578 if (secondary_memlocs[(int) mode] == 0)
579 {
580 #ifdef SECONDARY_MEMORY_NEEDED_RTX
581 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582 #else
583 secondary_memlocs[(int) mode]
584 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585 #endif
586 }
587
588 /* Get a version of the address doing any eliminations needed. If that
589 didn't give us a new MEM, make a new one if it isn't valid. */
590
591 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 MEM_ADDR_SPACE (loc));
594
595 if (! mem_valid && loc == secondary_memlocs[(int) mode])
596 loc = copy_rtx (loc);
597
598 /* The only time the call below will do anything is if the stack
599 offset is too large. In that case IND_LEVELS doesn't matter, so we
600 can just pass a zero. Adjust the type to be the address of the
601 corresponding object. If the address was valid, save the eliminated
602 address. If it wasn't valid, we need to make a reload each time, so
603 don't save it. */
604
605 if (! mem_valid)
606 {
607 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 : RELOAD_OTHER);
610
611 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 opnum, type, 0, 0);
613 }
614
615 secondary_memlocs_elim[(int) mode][opnum] = loc;
616 if (secondary_memlocs_elim_used <= (int)mode)
617 secondary_memlocs_elim_used = (int)mode + 1;
618 return loc;
619 }
620
621 /* Clear any secondary memory locations we've made. */
622
623 void
624 clear_secondary_mem (void)
625 {
626 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
627 }
628 \f
629
630 /* Find the largest class which has at least one register valid in
631 mode INNER, and which for every such register, that register number
632 plus N is also valid in OUTER (if in range) and is cheap to move
633 into REGNO. Such a class must exist. */
634
635 static enum reg_class
636 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 machine_mode inner ATTRIBUTE_UNUSED, int n,
638 unsigned int dest_regno ATTRIBUTE_UNUSED)
639 {
640 int best_cost = -1;
641 int rclass;
642 int regno;
643 enum reg_class best_class = NO_REGS;
644 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645 unsigned int best_size = 0;
646 int cost;
647
648 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
649 {
650 int bad = 0;
651 int good = 0;
652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
654 {
655 if (targetm.hard_regno_mode_ok (regno, inner))
656 {
657 good = 1;
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 && !targetm.hard_regno_mode_ok (regno + n, outer))
660 bad = 1;
661 }
662 }
663
664 if (bad || !good)
665 continue;
666 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
667
668 if ((reg_class_size[rclass] > best_size
669 && (best_cost < 0 || best_cost >= cost))
670 || best_cost > cost)
671 {
672 best_class = (enum reg_class) rclass;
673 best_size = reg_class_size[rclass];
674 best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 dest_class);
676 }
677 }
678
679 gcc_assert (best_size != 0);
680
681 return best_class;
682 }
683
684 /* We are trying to reload a subreg of something that is not a register.
685 Find the largest class which contains only registers valid in
686 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
687 which we would eventually like to obtain the object. */
688
689 static enum reg_class
690 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 machine_mode mode ATTRIBUTE_UNUSED,
692 enum reg_class dest_class ATTRIBUTE_UNUSED)
693 {
694 int best_cost = -1;
695 int rclass;
696 int regno;
697 enum reg_class best_class = NO_REGS;
698 unsigned int best_size = 0;
699 int cost;
700
701 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
702 {
703 unsigned int computed_rclass_size = 0;
704
705 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
706 {
707 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 && targetm.hard_regno_mode_ok (regno, mode))
709 computed_rclass_size++;
710 }
711
712 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
713
714 if ((computed_rclass_size > best_size
715 && (best_cost < 0 || best_cost >= cost))
716 || best_cost > cost)
717 {
718 best_class = (enum reg_class) rclass;
719 best_size = computed_rclass_size;
720 best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 dest_class);
722 }
723 }
724
725 gcc_assert (best_size != 0);
726
727 #ifdef LIMIT_RELOAD_CLASS
728 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729 #endif
730 return best_class;
731 }
732 \f
733 /* Return the number of a previously made reload that can be combined with
734 a new one, or n_reloads if none of the existing reloads can be used.
735 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736 push_reload, they determine the kind of the new reload that we try to
737 combine. P_IN points to the corresponding value of IN, which can be
738 modified by this function.
739 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
740
741 static int
742 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 enum reload_type type, int opnum, int dont_share)
744 {
745 rtx in = *p_in;
746 int i;
747 /* We can't merge two reloads if the output of either one is
748 earlyclobbered. */
749
750 if (earlyclobber_operand_p (out))
751 return n_reloads;
752
753 /* We can use an existing reload if the class is right
754 and at least one of IN and OUT is a match
755 and the other is at worst neutral.
756 (A zero compared against anything is neutral.)
757
758 For targets with small register classes, don't use existing reloads
759 unless they are for the same thing since that can cause us to need
760 more reload registers than we otherwise would. */
761
762 for (i = 0; i < n_reloads; i++)
763 if ((reg_class_subset_p (rclass, rld[i].rclass)
764 || reg_class_subset_p (rld[i].rclass, rclass))
765 /* If the existing reload has a register, it must fit our class. */
766 && (rld[i].reg_rtx == 0
767 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 true_regnum (rld[i].reg_rtx)))
769 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 || (out != 0 && MATCHES (rld[i].out, out)
772 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 && (small_register_class_p (rclass)
775 || targetm.small_register_classes_for_mode_p (VOIDmode))
776 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777 return i;
778
779 /* Reloading a plain reg for input can match a reload to postincrement
780 that reg, since the postincrement's value is the right value.
781 Likewise, it can match a preincrement reload, since we regard
782 the preincrementation as happening before any ref in this insn
783 to that register. */
784 for (i = 0; i < n_reloads; i++)
785 if ((reg_class_subset_p (rclass, rld[i].rclass)
786 || reg_class_subset_p (rld[i].rclass, rclass))
787 /* If the existing reload has a register, it must fit our
788 class. */
789 && (rld[i].reg_rtx == 0
790 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 true_regnum (rld[i].reg_rtx)))
792 && out == 0 && rld[i].out == 0 && rld[i].in != 0
793 && ((REG_P (in)
794 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 && MATCHES (XEXP (rld[i].in, 0), in))
796 || (REG_P (rld[i].in)
797 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 && MATCHES (XEXP (in, 0), rld[i].in)))
799 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 && (small_register_class_p (rclass)
801 || targetm.small_register_classes_for_mode_p (VOIDmode))
802 && MERGABLE_RELOADS (type, rld[i].when_needed,
803 opnum, rld[i].opnum))
804 {
805 /* Make sure reload_in ultimately has the increment,
806 not the plain register. */
807 if (REG_P (in))
808 *p_in = rld[i].in;
809 return i;
810 }
811 return n_reloads;
812 }
813
814 /* Return true if:
815
816 (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817 of a multiword value; and
818
819 (b) the number of *words* in REG does not match the number of *registers*
820 in REG. */
821
822 static bool
823 complex_word_subreg_p (machine_mode outer_mode, rtx reg)
824 {
825 machine_mode inner_mode = GET_MODE (reg);
826 return (GET_MODE_SIZE (outer_mode) <= UNITS_PER_WORD
827 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
828 && GET_MODE_SIZE (inner_mode) / UNITS_PER_WORD != REG_NREGS (reg));
829 }
830
831 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
832 expression. MODE is the mode that X will be used in. OUTPUT is true if
833 the function is invoked for the output part of an enclosing reload. */
834
835 static bool
836 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
837 {
838 rtx inner;
839
840 /* Only SUBREGs are problematical. */
841 if (GET_CODE (x) != SUBREG)
842 return false;
843
844 inner = SUBREG_REG (x);
845
846 /* If INNER is a constant or PLUS, then INNER will need reloading. */
847 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
848 return true;
849
850 /* If INNER is not a hard register, then INNER will not need reloading. */
851 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
852 return false;
853
854 /* If INNER is not ok for MODE, then INNER will need reloading. */
855 if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
856 return true;
857
858 /* If this is for an output, and the outer part is a word or smaller,
859 INNER is larger than a word and the number of registers in INNER is
860 not the same as the number of words in INNER, then INNER will need
861 reloading (with an in-out reload). */
862 return output && complex_word_subreg_p (mode, inner);
863 }
864
865 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
866 requiring an extra reload register. The caller has already found that
867 IN contains some reference to REGNO, so check that we can produce the
868 new value in a single step. E.g. if we have
869 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
870 instruction that adds one to a register, this should succeed.
871 However, if we have something like
872 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
873 needs to be loaded into a register first, we need a separate reload
874 register.
875 Such PLUS reloads are generated by find_reload_address_part.
876 The out-of-range PLUS expressions are usually introduced in the instruction
877 patterns by register elimination and substituting pseudos without a home
878 by their function-invariant equivalences. */
879 static int
880 can_reload_into (rtx in, int regno, machine_mode mode)
881 {
882 rtx dst;
883 rtx_insn *test_insn;
884 int r = 0;
885 struct recog_data_d save_recog_data;
886
887 /* For matching constraints, we often get notional input reloads where
888 we want to use the original register as the reload register. I.e.
889 technically this is a non-optional input-output reload, but IN is
890 already a valid register, and has been chosen as the reload register.
891 Speed this up, since it trivially works. */
892 if (REG_P (in))
893 return 1;
894
895 /* To test MEMs properly, we'd have to take into account all the reloads
896 that are already scheduled, which can become quite complicated.
897 And since we've already handled address reloads for this MEM, it
898 should always succeed anyway. */
899 if (MEM_P (in))
900 return 1;
901
902 /* If we can make a simple SET insn that does the job, everything should
903 be fine. */
904 dst = gen_rtx_REG (mode, regno);
905 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
906 save_recog_data = recog_data;
907 if (recog_memoized (test_insn) >= 0)
908 {
909 extract_insn (test_insn);
910 r = constrain_operands (1, get_enabled_alternatives (test_insn));
911 }
912 recog_data = save_recog_data;
913 return r;
914 }
915
916 /* Record one reload that needs to be performed.
917 IN is an rtx saying where the data are to be found before this instruction.
918 OUT says where they must be stored after the instruction.
919 (IN is zero for data not read, and OUT is zero for data not written.)
920 INLOC and OUTLOC point to the places in the instructions where
921 IN and OUT were found.
922 If IN and OUT are both nonzero, it means the same register must be used
923 to reload both IN and OUT.
924
925 RCLASS is a register class required for the reloaded data.
926 INMODE is the machine mode that the instruction requires
927 for the reg that replaces IN and OUTMODE is likewise for OUT.
928
929 If IN is zero, then OUT's location and mode should be passed as
930 INLOC and INMODE.
931
932 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
933
934 OPTIONAL nonzero means this reload does not need to be performed:
935 it can be discarded if that is more convenient.
936
937 OPNUM and TYPE say what the purpose of this reload is.
938
939 The return value is the reload-number for this reload.
940
941 If both IN and OUT are nonzero, in some rare cases we might
942 want to make two separate reloads. (Actually we never do this now.)
943 Therefore, the reload-number for OUT is stored in
944 output_reloadnum when we return; the return value applies to IN.
945 Usually (presently always), when IN and OUT are nonzero,
946 the two reload-numbers are equal, but the caller should be careful to
947 distinguish them. */
948
949 int
950 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
951 enum reg_class rclass, machine_mode inmode,
952 machine_mode outmode, int strict_low, int optional,
953 int opnum, enum reload_type type)
954 {
955 int i;
956 int dont_share = 0;
957 int dont_remove_subreg = 0;
958 #ifdef LIMIT_RELOAD_CLASS
959 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
960 #endif
961 int secondary_in_reload = -1, secondary_out_reload = -1;
962 enum insn_code secondary_in_icode = CODE_FOR_nothing;
963 enum insn_code secondary_out_icode = CODE_FOR_nothing;
964 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
965 subreg_in_class = NO_REGS;
966
967 /* INMODE and/or OUTMODE could be VOIDmode if no mode
968 has been specified for the operand. In that case,
969 use the operand's mode as the mode to reload. */
970 if (inmode == VOIDmode && in != 0)
971 inmode = GET_MODE (in);
972 if (outmode == VOIDmode && out != 0)
973 outmode = GET_MODE (out);
974
975 /* If find_reloads and friends until now missed to replace a pseudo
976 with a constant of reg_equiv_constant something went wrong
977 beforehand.
978 Note that it can't simply be done here if we missed it earlier
979 since the constant might need to be pushed into the literal pool
980 and the resulting memref would probably need further
981 reloading. */
982 if (in != 0 && REG_P (in))
983 {
984 int regno = REGNO (in);
985
986 gcc_assert (regno < FIRST_PSEUDO_REGISTER
987 || reg_renumber[regno] >= 0
988 || reg_equiv_constant (regno) == NULL_RTX);
989 }
990
991 /* reg_equiv_constant only contains constants which are obviously
992 not appropriate as destination. So if we would need to replace
993 the destination pseudo with a constant we are in real
994 trouble. */
995 if (out != 0 && REG_P (out))
996 {
997 int regno = REGNO (out);
998
999 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1000 || reg_renumber[regno] >= 0
1001 || reg_equiv_constant (regno) == NULL_RTX);
1002 }
1003
1004 /* If we have a read-write operand with an address side-effect,
1005 change either IN or OUT so the side-effect happens only once. */
1006 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1007 switch (GET_CODE (XEXP (in, 0)))
1008 {
1009 case POST_INC: case POST_DEC: case POST_MODIFY:
1010 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1011 break;
1012
1013 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1014 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1015 break;
1016
1017 default:
1018 break;
1019 }
1020
1021 /* If we are reloading a (SUBREG constant ...), really reload just the
1022 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1023 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1024 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1025 register is a pseudo, also reload the inside expression.
1026 For machines that extend byte loads, do this for any SUBREG of a pseudo
1027 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1028 M2 is an integral mode that gets extended when loaded.
1029 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1030 where either M1 is not valid for R or M2 is wider than a word but we
1031 only need one register to store an M2-sized quantity in R.
1032 (However, if OUT is nonzero, we need to reload the reg *and*
1033 the subreg, so do nothing here, and let following statement handle it.)
1034
1035 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1036 we can't handle it here because CONST_INT does not indicate a mode.
1037
1038 Similarly, we must reload the inside expression if we have a
1039 STRICT_LOW_PART (presumably, in == out in this case).
1040
1041 Also reload the inner expression if it does not require a secondary
1042 reload but the SUBREG does.
1043
1044 Finally, reload the inner expression if it is a register that is in
1045 the class whose registers cannot be referenced in a different size
1046 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1047 cannot reload just the inside since we might end up with the wrong
1048 register class. But if it is inside a STRICT_LOW_PART, we have
1049 no choice, so we hope we do get the right register class there. */
1050
1051 scalar_int_mode inner_mode;
1052 if (in != 0 && GET_CODE (in) == SUBREG
1053 && (subreg_lowpart_p (in) || strict_low)
1054 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1055 inmode, rclass)
1056 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1057 && (CONSTANT_P (SUBREG_REG (in))
1058 || GET_CODE (SUBREG_REG (in)) == PLUS
1059 || strict_low
1060 || (((REG_P (SUBREG_REG (in))
1061 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1062 || MEM_P (SUBREG_REG (in)))
1063 && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1064 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1065 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1066 &inner_mode)
1067 && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1068 && paradoxical_subreg_p (inmode, inner_mode)
1069 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1070 || (WORD_REGISTER_OPERATIONS
1071 && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1072 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1073 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1074 / UNITS_PER_WORD)))))
1075 || (REG_P (SUBREG_REG (in))
1076 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1077 /* The case where out is nonzero
1078 is handled differently in the following statement. */
1079 && (out == 0 || subreg_lowpart_p (in))
1080 && (complex_word_subreg_p (inmode, SUBREG_REG (in))
1081 || !targetm.hard_regno_mode_ok (subreg_regno (in), inmode)))
1082 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1083 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1084 SUBREG_REG (in))
1085 == NO_REGS))
1086 || (REG_P (SUBREG_REG (in))
1087 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1088 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1089 GET_MODE (SUBREG_REG (in)), inmode))))
1090 {
1091 #ifdef LIMIT_RELOAD_CLASS
1092 in_subreg_loc = inloc;
1093 #endif
1094 inloc = &SUBREG_REG (in);
1095 in = *inloc;
1096
1097 if (!WORD_REGISTER_OPERATIONS
1098 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1099 && MEM_P (in))
1100 /* This is supposed to happen only for paradoxical subregs made by
1101 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1102 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1103
1104 inmode = GET_MODE (in);
1105 }
1106
1107 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1108 where M1 is not valid for R if it was not handled by the code above.
1109
1110 Similar issue for (SUBREG constant ...) if it was not handled by the
1111 code above. This can happen if SUBREG_BYTE != 0.
1112
1113 However, we must reload the inner reg *as well as* the subreg in
1114 that case. */
1115
1116 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1117 {
1118 if (REG_P (SUBREG_REG (in)))
1119 subreg_in_class
1120 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1121 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1122 GET_MODE (SUBREG_REG (in)),
1123 SUBREG_BYTE (in),
1124 GET_MODE (in)),
1125 REGNO (SUBREG_REG (in)));
1126 else if (CONSTANT_P (SUBREG_REG (in))
1127 || GET_CODE (SUBREG_REG (in)) == PLUS)
1128 subreg_in_class = find_valid_class_1 (inmode,
1129 GET_MODE (SUBREG_REG (in)),
1130 rclass);
1131
1132 /* This relies on the fact that emit_reload_insns outputs the
1133 instructions for input reloads of type RELOAD_OTHER in the same
1134 order as the reloads. Thus if the outer reload is also of type
1135 RELOAD_OTHER, we are guaranteed that this inner reload will be
1136 output before the outer reload. */
1137 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1138 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1139 dont_remove_subreg = 1;
1140 }
1141
1142 /* Similarly for paradoxical and problematical SUBREGs on the output.
1143 Note that there is no reason we need worry about the previous value
1144 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1145 entitled to clobber it all (except in the case of a word mode subreg
1146 or of a STRICT_LOW_PART, in that latter case the constraint should
1147 label it input-output.) */
1148 if (out != 0 && GET_CODE (out) == SUBREG
1149 && (subreg_lowpart_p (out) || strict_low)
1150 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1151 outmode, rclass)
1152 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1153 && (CONSTANT_P (SUBREG_REG (out))
1154 || strict_low
1155 || (((REG_P (SUBREG_REG (out))
1156 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1157 || MEM_P (SUBREG_REG (out)))
1158 && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1159 || (WORD_REGISTER_OPERATIONS
1160 && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1161 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1162 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1163 / UNITS_PER_WORD)))))
1164 || (REG_P (SUBREG_REG (out))
1165 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1166 /* The case of a word mode subreg
1167 is handled differently in the following statement. */
1168 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1169 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1170 > UNITS_PER_WORD))
1171 && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1172 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1173 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1174 SUBREG_REG (out))
1175 == NO_REGS))
1176 || (REG_P (SUBREG_REG (out))
1177 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1178 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1179 GET_MODE (SUBREG_REG (out)),
1180 outmode))))
1181 {
1182 #ifdef LIMIT_RELOAD_CLASS
1183 out_subreg_loc = outloc;
1184 #endif
1185 outloc = &SUBREG_REG (out);
1186 out = *outloc;
1187 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1188 || GET_MODE_SIZE (GET_MODE (out))
1189 <= GET_MODE_SIZE (outmode));
1190 outmode = GET_MODE (out);
1191 }
1192
1193 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1194 where either M1 is not valid for R or M2 is wider than a word but we
1195 only need one register to store an M2-sized quantity in R.
1196
1197 However, we must reload the inner reg *as well as* the subreg in
1198 that case and the inner reg is an in-out reload. */
1199
1200 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1201 {
1202 enum reg_class in_out_class
1203 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1204 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1205 GET_MODE (SUBREG_REG (out)),
1206 SUBREG_BYTE (out),
1207 GET_MODE (out)),
1208 REGNO (SUBREG_REG (out)));
1209
1210 /* This relies on the fact that emit_reload_insns outputs the
1211 instructions for output reloads of type RELOAD_OTHER in reverse
1212 order of the reloads. Thus if the outer reload is also of type
1213 RELOAD_OTHER, we are guaranteed that this inner reload will be
1214 output after the outer reload. */
1215 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1216 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1217 0, 0, opnum, RELOAD_OTHER);
1218 dont_remove_subreg = 1;
1219 }
1220
1221 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1222 if (in != 0 && out != 0 && MEM_P (out)
1223 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1224 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1225 dont_share = 1;
1226
1227 /* If IN is a SUBREG of a hard register, make a new REG. This
1228 simplifies some of the cases below. */
1229
1230 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1231 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1232 && ! dont_remove_subreg)
1233 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1234
1235 /* Similarly for OUT. */
1236 if (out != 0 && GET_CODE (out) == SUBREG
1237 && REG_P (SUBREG_REG (out))
1238 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1239 && ! dont_remove_subreg)
1240 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1241
1242 /* Narrow down the class of register wanted if that is
1243 desirable on this machine for efficiency. */
1244 {
1245 reg_class_t preferred_class = rclass;
1246
1247 if (in != 0)
1248 preferred_class = targetm.preferred_reload_class (in, rclass);
1249
1250 /* Output reloads may need analogous treatment, different in detail. */
1251 if (out != 0)
1252 preferred_class
1253 = targetm.preferred_output_reload_class (out, preferred_class);
1254
1255 /* Discard what the target said if we cannot do it. */
1256 if (preferred_class != NO_REGS
1257 || (optional && type == RELOAD_FOR_OUTPUT))
1258 rclass = (enum reg_class) preferred_class;
1259 }
1260
1261 /* Make sure we use a class that can handle the actual pseudo
1262 inside any subreg. For example, on the 386, QImode regs
1263 can appear within SImode subregs. Although GENERAL_REGS
1264 can handle SImode, QImode needs a smaller class. */
1265 #ifdef LIMIT_RELOAD_CLASS
1266 if (in_subreg_loc)
1267 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1268 else if (in != 0 && GET_CODE (in) == SUBREG)
1269 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1270
1271 if (out_subreg_loc)
1272 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1273 if (out != 0 && GET_CODE (out) == SUBREG)
1274 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1275 #endif
1276
1277 /* Verify that this class is at least possible for the mode that
1278 is specified. */
1279 if (this_insn_is_asm)
1280 {
1281 machine_mode mode;
1282 if (paradoxical_subreg_p (inmode, outmode))
1283 mode = inmode;
1284 else
1285 mode = outmode;
1286 if (mode == VOIDmode)
1287 {
1288 error_for_asm (this_insn, "cannot reload integer constant "
1289 "operand in %<asm%>");
1290 mode = word_mode;
1291 if (in != 0)
1292 inmode = word_mode;
1293 if (out != 0)
1294 outmode = word_mode;
1295 }
1296 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1297 if (targetm.hard_regno_mode_ok (i, mode)
1298 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1299 break;
1300 if (i == FIRST_PSEUDO_REGISTER)
1301 {
1302 error_for_asm (this_insn, "impossible register constraint "
1303 "in %<asm%>");
1304 /* Avoid further trouble with this insn. */
1305 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1306 /* We used to continue here setting class to ALL_REGS, but it triggers
1307 sanity check on i386 for:
1308 void foo(long double d)
1309 {
1310 asm("" :: "a" (d));
1311 }
1312 Returning zero here ought to be safe as we take care in
1313 find_reloads to not process the reloads when instruction was
1314 replaced by USE. */
1315
1316 return 0;
1317 }
1318 }
1319
1320 /* Optional output reloads are always OK even if we have no register class,
1321 since the function of these reloads is only to have spill_reg_store etc.
1322 set, so that the storing insn can be deleted later. */
1323 gcc_assert (rclass != NO_REGS
1324 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1325
1326 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1327
1328 if (i == n_reloads)
1329 {
1330 /* See if we need a secondary reload register to move between CLASS
1331 and IN or CLASS and OUT. Get the icode and push any required reloads
1332 needed for each of them if so. */
1333
1334 if (in != 0)
1335 secondary_in_reload
1336 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1337 &secondary_in_icode, NULL);
1338 if (out != 0 && GET_CODE (out) != SCRATCH)
1339 secondary_out_reload
1340 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1341 type, &secondary_out_icode, NULL);
1342
1343 /* We found no existing reload suitable for re-use.
1344 So add an additional reload. */
1345
1346 if (subreg_in_class == NO_REGS
1347 && in != 0
1348 && (REG_P (in)
1349 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1350 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1351 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1352 /* If a memory location is needed for the copy, make one. */
1353 if (subreg_in_class != NO_REGS
1354 && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1355 get_secondary_mem (in, inmode, opnum, type);
1356
1357 i = n_reloads;
1358 rld[i].in = in;
1359 rld[i].out = out;
1360 rld[i].rclass = rclass;
1361 rld[i].inmode = inmode;
1362 rld[i].outmode = outmode;
1363 rld[i].reg_rtx = 0;
1364 rld[i].optional = optional;
1365 rld[i].inc = 0;
1366 rld[i].nocombine = 0;
1367 rld[i].in_reg = inloc ? *inloc : 0;
1368 rld[i].out_reg = outloc ? *outloc : 0;
1369 rld[i].opnum = opnum;
1370 rld[i].when_needed = type;
1371 rld[i].secondary_in_reload = secondary_in_reload;
1372 rld[i].secondary_out_reload = secondary_out_reload;
1373 rld[i].secondary_in_icode = secondary_in_icode;
1374 rld[i].secondary_out_icode = secondary_out_icode;
1375 rld[i].secondary_p = 0;
1376
1377 n_reloads++;
1378
1379 if (out != 0
1380 && (REG_P (out)
1381 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1382 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1383 && (targetm.secondary_memory_needed
1384 (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1385 get_secondary_mem (out, outmode, opnum, type);
1386 }
1387 else
1388 {
1389 /* We are reusing an existing reload,
1390 but we may have additional information for it.
1391 For example, we may now have both IN and OUT
1392 while the old one may have just one of them. */
1393
1394 /* The modes can be different. If they are, we want to reload in
1395 the larger mode, so that the value is valid for both modes. */
1396 if (inmode != VOIDmode
1397 && partial_subreg_p (rld[i].inmode, inmode))
1398 rld[i].inmode = inmode;
1399 if (outmode != VOIDmode
1400 && partial_subreg_p (rld[i].outmode, outmode))
1401 rld[i].outmode = outmode;
1402 if (in != 0)
1403 {
1404 rtx in_reg = inloc ? *inloc : 0;
1405 /* If we merge reloads for two distinct rtl expressions that
1406 are identical in content, there might be duplicate address
1407 reloads. Remove the extra set now, so that if we later find
1408 that we can inherit this reload, we can get rid of the
1409 address reloads altogether.
1410
1411 Do not do this if both reloads are optional since the result
1412 would be an optional reload which could potentially leave
1413 unresolved address replacements.
1414
1415 It is not sufficient to call transfer_replacements since
1416 choose_reload_regs will remove the replacements for address
1417 reloads of inherited reloads which results in the same
1418 problem. */
1419 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1420 && ! (rld[i].optional && optional))
1421 {
1422 /* We must keep the address reload with the lower operand
1423 number alive. */
1424 if (opnum > rld[i].opnum)
1425 {
1426 remove_address_replacements (in);
1427 in = rld[i].in;
1428 in_reg = rld[i].in_reg;
1429 }
1430 else
1431 remove_address_replacements (rld[i].in);
1432 }
1433 /* When emitting reloads we don't necessarily look at the in-
1434 and outmode, but also directly at the operands (in and out).
1435 So we can't simply overwrite them with whatever we have found
1436 for this (to-be-merged) reload, we have to "merge" that too.
1437 Reusing another reload already verified that we deal with the
1438 same operands, just possibly in different modes. So we
1439 overwrite the operands only when the new mode is larger.
1440 See also PR33613. */
1441 if (!rld[i].in
1442 || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1443 rld[i].in = in;
1444 if (!rld[i].in_reg
1445 || (in_reg
1446 && partial_subreg_p (GET_MODE (rld[i].in_reg),
1447 GET_MODE (in_reg))))
1448 rld[i].in_reg = in_reg;
1449 }
1450 if (out != 0)
1451 {
1452 if (!rld[i].out
1453 || (out
1454 && partial_subreg_p (GET_MODE (rld[i].out),
1455 GET_MODE (out))))
1456 rld[i].out = out;
1457 if (outloc
1458 && (!rld[i].out_reg
1459 || partial_subreg_p (GET_MODE (rld[i].out_reg),
1460 GET_MODE (*outloc))))
1461 rld[i].out_reg = *outloc;
1462 }
1463 if (reg_class_subset_p (rclass, rld[i].rclass))
1464 rld[i].rclass = rclass;
1465 rld[i].optional &= optional;
1466 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1467 opnum, rld[i].opnum))
1468 rld[i].when_needed = RELOAD_OTHER;
1469 rld[i].opnum = MIN (rld[i].opnum, opnum);
1470 }
1471
1472 /* If the ostensible rtx being reloaded differs from the rtx found
1473 in the location to substitute, this reload is not safe to combine
1474 because we cannot reliably tell whether it appears in the insn. */
1475
1476 if (in != 0 && in != *inloc)
1477 rld[i].nocombine = 1;
1478
1479 #if 0
1480 /* This was replaced by changes in find_reloads_address_1 and the new
1481 function inc_for_reload, which go with a new meaning of reload_inc. */
1482
1483 /* If this is an IN/OUT reload in an insn that sets the CC,
1484 it must be for an autoincrement. It doesn't work to store
1485 the incremented value after the insn because that would clobber the CC.
1486 So we must do the increment of the value reloaded from,
1487 increment it, store it back, then decrement again. */
1488 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1489 {
1490 out = 0;
1491 rld[i].out = 0;
1492 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1493 /* If we did not find a nonzero amount-to-increment-by,
1494 that contradicts the belief that IN is being incremented
1495 in an address in this insn. */
1496 gcc_assert (rld[i].inc != 0);
1497 }
1498 #endif
1499
1500 /* If we will replace IN and OUT with the reload-reg,
1501 record where they are located so that substitution need
1502 not do a tree walk. */
1503
1504 if (replace_reloads)
1505 {
1506 if (inloc != 0)
1507 {
1508 struct replacement *r = &replacements[n_replacements++];
1509 r->what = i;
1510 r->where = inloc;
1511 r->mode = inmode;
1512 }
1513 if (outloc != 0 && outloc != inloc)
1514 {
1515 struct replacement *r = &replacements[n_replacements++];
1516 r->what = i;
1517 r->where = outloc;
1518 r->mode = outmode;
1519 }
1520 }
1521
1522 /* If this reload is just being introduced and it has both
1523 an incoming quantity and an outgoing quantity that are
1524 supposed to be made to match, see if either one of the two
1525 can serve as the place to reload into.
1526
1527 If one of them is acceptable, set rld[i].reg_rtx
1528 to that one. */
1529
1530 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1531 {
1532 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1533 inmode, outmode,
1534 rld[i].rclass, i,
1535 earlyclobber_operand_p (out));
1536
1537 /* If the outgoing register already contains the same value
1538 as the incoming one, we can dispense with loading it.
1539 The easiest way to tell the caller that is to give a phony
1540 value for the incoming operand (same as outgoing one). */
1541 if (rld[i].reg_rtx == out
1542 && (REG_P (in) || CONSTANT_P (in))
1543 && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1544 static_reload_reg_p, i, inmode) != 0)
1545 rld[i].in = out;
1546 }
1547
1548 /* If this is an input reload and the operand contains a register that
1549 dies in this insn and is used nowhere else, see if it is the right class
1550 to be used for this reload. Use it if so. (This occurs most commonly
1551 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1552 this if it is also an output reload that mentions the register unless
1553 the output is a SUBREG that clobbers an entire register.
1554
1555 Note that the operand might be one of the spill regs, if it is a
1556 pseudo reg and we are in a block where spilling has not taken place.
1557 But if there is no spilling in this block, that is OK.
1558 An explicitly used hard reg cannot be a spill reg. */
1559
1560 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1561 {
1562 rtx note;
1563 int regno;
1564 machine_mode rel_mode = inmode;
1565
1566 if (out && partial_subreg_p (rel_mode, outmode))
1567 rel_mode = outmode;
1568
1569 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1570 if (REG_NOTE_KIND (note) == REG_DEAD
1571 && REG_P (XEXP (note, 0))
1572 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1573 && reg_mentioned_p (XEXP (note, 0), in)
1574 /* Check that a former pseudo is valid; see find_dummy_reload. */
1575 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1576 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1577 ORIGINAL_REGNO (XEXP (note, 0)))
1578 && REG_NREGS (XEXP (note, 0)) == 1))
1579 && ! refers_to_regno_for_reload_p (regno,
1580 end_hard_regno (rel_mode,
1581 regno),
1582 PATTERN (this_insn), inloc)
1583 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1584 /* If this is also an output reload, IN cannot be used as
1585 the reload register if it is set in this insn unless IN
1586 is also OUT. */
1587 && (out == 0 || in == out
1588 || ! hard_reg_set_here_p (regno,
1589 end_hard_regno (rel_mode, regno),
1590 PATTERN (this_insn)))
1591 /* ??? Why is this code so different from the previous?
1592 Is there any simple coherent way to describe the two together?
1593 What's going on here. */
1594 && (in != out
1595 || (GET_CODE (in) == SUBREG
1596 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1597 / UNITS_PER_WORD)
1598 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1599 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1600 /* Make sure the operand fits in the reg that dies. */
1601 && (GET_MODE_SIZE (rel_mode)
1602 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1603 && targetm.hard_regno_mode_ok (regno, inmode)
1604 && targetm.hard_regno_mode_ok (regno, outmode))
1605 {
1606 unsigned int offs;
1607 unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1608 hard_regno_nregs (regno, outmode));
1609
1610 for (offs = 0; offs < nregs; offs++)
1611 if (fixed_regs[regno + offs]
1612 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1613 regno + offs))
1614 break;
1615
1616 if (offs == nregs
1617 && (! (refers_to_regno_for_reload_p
1618 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1619 || can_reload_into (in, regno, inmode)))
1620 {
1621 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1622 break;
1623 }
1624 }
1625 }
1626
1627 if (out)
1628 output_reloadnum = i;
1629
1630 return i;
1631 }
1632
1633 /* Record an additional place we must replace a value
1634 for which we have already recorded a reload.
1635 RELOADNUM is the value returned by push_reload
1636 when the reload was recorded.
1637 This is used in insn patterns that use match_dup. */
1638
1639 static void
1640 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1641 {
1642 if (replace_reloads)
1643 {
1644 struct replacement *r = &replacements[n_replacements++];
1645 r->what = reloadnum;
1646 r->where = loc;
1647 r->mode = mode;
1648 }
1649 }
1650
1651 /* Duplicate any replacement we have recorded to apply at
1652 location ORIG_LOC to also be performed at DUP_LOC.
1653 This is used in insn patterns that use match_dup. */
1654
1655 static void
1656 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1657 {
1658 int i, n = n_replacements;
1659
1660 for (i = 0; i < n; i++)
1661 {
1662 struct replacement *r = &replacements[i];
1663 if (r->where == orig_loc)
1664 push_replacement (dup_loc, r->what, r->mode);
1665 }
1666 }
1667 \f
1668 /* Transfer all replacements that used to be in reload FROM to be in
1669 reload TO. */
1670
1671 void
1672 transfer_replacements (int to, int from)
1673 {
1674 int i;
1675
1676 for (i = 0; i < n_replacements; i++)
1677 if (replacements[i].what == from)
1678 replacements[i].what = to;
1679 }
1680 \f
1681 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1682 or a subpart of it. If we have any replacements registered for IN_RTX,
1683 cancel the reloads that were supposed to load them.
1684 Return nonzero if we canceled any reloads. */
1685 int
1686 remove_address_replacements (rtx in_rtx)
1687 {
1688 int i, j;
1689 char reload_flags[MAX_RELOADS];
1690 int something_changed = 0;
1691
1692 memset (reload_flags, 0, sizeof reload_flags);
1693 for (i = 0, j = 0; i < n_replacements; i++)
1694 {
1695 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1696 reload_flags[replacements[i].what] |= 1;
1697 else
1698 {
1699 replacements[j++] = replacements[i];
1700 reload_flags[replacements[i].what] |= 2;
1701 }
1702 }
1703 /* Note that the following store must be done before the recursive calls. */
1704 n_replacements = j;
1705
1706 for (i = n_reloads - 1; i >= 0; i--)
1707 {
1708 if (reload_flags[i] == 1)
1709 {
1710 deallocate_reload_reg (i);
1711 remove_address_replacements (rld[i].in);
1712 rld[i].in = 0;
1713 something_changed = 1;
1714 }
1715 }
1716 return something_changed;
1717 }
1718 \f
1719 /* If there is only one output reload, and it is not for an earlyclobber
1720 operand, try to combine it with a (logically unrelated) input reload
1721 to reduce the number of reload registers needed.
1722
1723 This is safe if the input reload does not appear in
1724 the value being output-reloaded, because this implies
1725 it is not needed any more once the original insn completes.
1726
1727 If that doesn't work, see we can use any of the registers that
1728 die in this insn as a reload register. We can if it is of the right
1729 class and does not appear in the value being output-reloaded. */
1730
1731 static void
1732 combine_reloads (void)
1733 {
1734 int i, regno;
1735 int output_reload = -1;
1736 int secondary_out = -1;
1737 rtx note;
1738
1739 /* Find the output reload; return unless there is exactly one
1740 and that one is mandatory. */
1741
1742 for (i = 0; i < n_reloads; i++)
1743 if (rld[i].out != 0)
1744 {
1745 if (output_reload >= 0)
1746 return;
1747 output_reload = i;
1748 }
1749
1750 if (output_reload < 0 || rld[output_reload].optional)
1751 return;
1752
1753 /* An input-output reload isn't combinable. */
1754
1755 if (rld[output_reload].in != 0)
1756 return;
1757
1758 /* If this reload is for an earlyclobber operand, we can't do anything. */
1759 if (earlyclobber_operand_p (rld[output_reload].out))
1760 return;
1761
1762 /* If there is a reload for part of the address of this operand, we would
1763 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1764 its life to the point where doing this combine would not lower the
1765 number of spill registers needed. */
1766 for (i = 0; i < n_reloads; i++)
1767 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1768 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1769 && rld[i].opnum == rld[output_reload].opnum)
1770 return;
1771
1772 /* Check each input reload; can we combine it? */
1773
1774 for (i = 0; i < n_reloads; i++)
1775 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1776 /* Life span of this reload must not extend past main insn. */
1777 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1778 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1779 && rld[i].when_needed != RELOAD_OTHER
1780 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1781 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1782 [(int) rld[output_reload].outmode])
1783 && known_eq (rld[i].inc, 0)
1784 && rld[i].reg_rtx == 0
1785 /* Don't combine two reloads with different secondary
1786 memory locations. */
1787 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1788 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1789 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1790 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1791 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1792 ? (rld[i].rclass == rld[output_reload].rclass)
1793 : (reg_class_subset_p (rld[i].rclass,
1794 rld[output_reload].rclass)
1795 || reg_class_subset_p (rld[output_reload].rclass,
1796 rld[i].rclass)))
1797 && (MATCHES (rld[i].in, rld[output_reload].out)
1798 /* Args reversed because the first arg seems to be
1799 the one that we imagine being modified
1800 while the second is the one that might be affected. */
1801 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1802 rld[i].in)
1803 /* However, if the input is a register that appears inside
1804 the output, then we also can't share.
1805 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1806 If the same reload reg is used for both reg 69 and the
1807 result to be stored in memory, then that result
1808 will clobber the address of the memory ref. */
1809 && ! (REG_P (rld[i].in)
1810 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1811 rld[output_reload].out))))
1812 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1813 rld[i].when_needed != RELOAD_FOR_INPUT)
1814 && (reg_class_size[(int) rld[i].rclass]
1815 || targetm.small_register_classes_for_mode_p (VOIDmode))
1816 /* We will allow making things slightly worse by combining an
1817 input and an output, but no worse than that. */
1818 && (rld[i].when_needed == RELOAD_FOR_INPUT
1819 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1820 {
1821 int j;
1822
1823 /* We have found a reload to combine with! */
1824 rld[i].out = rld[output_reload].out;
1825 rld[i].out_reg = rld[output_reload].out_reg;
1826 rld[i].outmode = rld[output_reload].outmode;
1827 /* Mark the old output reload as inoperative. */
1828 rld[output_reload].out = 0;
1829 /* The combined reload is needed for the entire insn. */
1830 rld[i].when_needed = RELOAD_OTHER;
1831 /* If the output reload had a secondary reload, copy it. */
1832 if (rld[output_reload].secondary_out_reload != -1)
1833 {
1834 rld[i].secondary_out_reload
1835 = rld[output_reload].secondary_out_reload;
1836 rld[i].secondary_out_icode
1837 = rld[output_reload].secondary_out_icode;
1838 }
1839
1840 /* Copy any secondary MEM. */
1841 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1842 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1843 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1844 /* If required, minimize the register class. */
1845 if (reg_class_subset_p (rld[output_reload].rclass,
1846 rld[i].rclass))
1847 rld[i].rclass = rld[output_reload].rclass;
1848
1849 /* Transfer all replacements from the old reload to the combined. */
1850 for (j = 0; j < n_replacements; j++)
1851 if (replacements[j].what == output_reload)
1852 replacements[j].what = i;
1853
1854 return;
1855 }
1856
1857 /* If this insn has only one operand that is modified or written (assumed
1858 to be the first), it must be the one corresponding to this reload. It
1859 is safe to use anything that dies in this insn for that output provided
1860 that it does not occur in the output (we already know it isn't an
1861 earlyclobber. If this is an asm insn, give up. */
1862
1863 if (INSN_CODE (this_insn) == -1)
1864 return;
1865
1866 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1867 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1868 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1869 return;
1870
1871 /* See if some hard register that dies in this insn and is not used in
1872 the output is the right class. Only works if the register we pick
1873 up can fully hold our output reload. */
1874 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1875 if (REG_NOTE_KIND (note) == REG_DEAD
1876 && REG_P (XEXP (note, 0))
1877 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1878 rld[output_reload].out)
1879 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1880 && targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1881 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1882 regno)
1883 && (hard_regno_nregs (regno, rld[output_reload].outmode)
1884 <= REG_NREGS (XEXP (note, 0)))
1885 /* Ensure that a secondary or tertiary reload for this output
1886 won't want this register. */
1887 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1888 || (!(TEST_HARD_REG_BIT
1889 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1890 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1891 || !(TEST_HARD_REG_BIT
1892 (reg_class_contents[(int) rld[secondary_out].rclass],
1893 regno)))))
1894 && !fixed_regs[regno]
1895 /* Check that a former pseudo is valid; see find_dummy_reload. */
1896 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1897 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1898 ORIGINAL_REGNO (XEXP (note, 0)))
1899 && REG_NREGS (XEXP (note, 0)) == 1)))
1900 {
1901 rld[output_reload].reg_rtx
1902 = gen_rtx_REG (rld[output_reload].outmode, regno);
1903 return;
1904 }
1905 }
1906 \f
1907 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1908 See if one of IN and OUT is a register that may be used;
1909 this is desirable since a spill-register won't be needed.
1910 If so, return the register rtx that proves acceptable.
1911
1912 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1913 RCLASS is the register class required for the reload.
1914
1915 If FOR_REAL is >= 0, it is the number of the reload,
1916 and in some cases when it can be discovered that OUT doesn't need
1917 to be computed, clear out rld[FOR_REAL].out.
1918
1919 If FOR_REAL is -1, this should not be done, because this call
1920 is just to see if a register can be found, not to find and install it.
1921
1922 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1923 puts an additional constraint on being able to use IN for OUT since
1924 IN must not appear elsewhere in the insn (it is assumed that IN itself
1925 is safe from the earlyclobber). */
1926
1927 static rtx
1928 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1929 machine_mode inmode, machine_mode outmode,
1930 reg_class_t rclass, int for_real, int earlyclobber)
1931 {
1932 rtx in = real_in;
1933 rtx out = real_out;
1934 int in_offset = 0;
1935 int out_offset = 0;
1936 rtx value = 0;
1937
1938 /* If operands exceed a word, we can't use either of them
1939 unless they have the same size. */
1940 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1941 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1942 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1943 return 0;
1944
1945 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1946 respectively refers to a hard register. */
1947
1948 /* Find the inside of any subregs. */
1949 while (GET_CODE (out) == SUBREG)
1950 {
1951 if (REG_P (SUBREG_REG (out))
1952 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1953 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1954 GET_MODE (SUBREG_REG (out)),
1955 SUBREG_BYTE (out),
1956 GET_MODE (out));
1957 out = SUBREG_REG (out);
1958 }
1959 while (GET_CODE (in) == SUBREG)
1960 {
1961 if (REG_P (SUBREG_REG (in))
1962 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1963 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1964 GET_MODE (SUBREG_REG (in)),
1965 SUBREG_BYTE (in),
1966 GET_MODE (in));
1967 in = SUBREG_REG (in);
1968 }
1969
1970 /* Narrow down the reg class, the same way push_reload will;
1971 otherwise we might find a dummy now, but push_reload won't. */
1972 {
1973 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1974 if (preferred_class != NO_REGS)
1975 rclass = (enum reg_class) preferred_class;
1976 }
1977
1978 /* See if OUT will do. */
1979 if (REG_P (out)
1980 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1981 {
1982 unsigned int regno = REGNO (out) + out_offset;
1983 unsigned int nwords = hard_regno_nregs (regno, outmode);
1984 rtx saved_rtx;
1985
1986 /* When we consider whether the insn uses OUT,
1987 ignore references within IN. They don't prevent us
1988 from copying IN into OUT, because those refs would
1989 move into the insn that reloads IN.
1990
1991 However, we only ignore IN in its role as this reload.
1992 If the insn uses IN elsewhere and it contains OUT,
1993 that counts. We can't be sure it's the "same" operand
1994 so it might not go through this reload.
1995
1996 We also need to avoid using OUT if it, or part of it, is a
1997 fixed register. Modifying such registers, even transiently,
1998 may have undefined effects on the machine, such as modifying
1999 the stack pointer. */
2000 saved_rtx = *inloc;
2001 *inloc = const0_rtx;
2002
2003 if (regno < FIRST_PSEUDO_REGISTER
2004 && targetm.hard_regno_mode_ok (regno, outmode)
2005 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2006 PATTERN (this_insn), outloc))
2007 {
2008 unsigned int i;
2009
2010 for (i = 0; i < nwords; i++)
2011 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2012 regno + i)
2013 || fixed_regs[regno + i])
2014 break;
2015
2016 if (i == nwords)
2017 {
2018 if (REG_P (real_out))
2019 value = real_out;
2020 else
2021 value = gen_rtx_REG (outmode, regno);
2022 }
2023 }
2024
2025 *inloc = saved_rtx;
2026 }
2027
2028 /* Consider using IN if OUT was not acceptable
2029 or if OUT dies in this insn (like the quotient in a divmod insn).
2030 We can't use IN unless it is dies in this insn,
2031 which means we must know accurately which hard regs are live.
2032 Also, the result can't go in IN if IN is used within OUT,
2033 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2034 if (hard_regs_live_known
2035 && REG_P (in)
2036 && REGNO (in) < FIRST_PSEUDO_REGISTER
2037 && (value == 0
2038 || find_reg_note (this_insn, REG_UNUSED, real_out))
2039 && find_reg_note (this_insn, REG_DEAD, real_in)
2040 && !fixed_regs[REGNO (in)]
2041 && targetm.hard_regno_mode_ok (REGNO (in),
2042 /* The only case where out and real_out
2043 might have different modes is where
2044 real_out is a subreg, and in that
2045 case, out has a real mode. */
2046 (GET_MODE (out) != VOIDmode
2047 ? GET_MODE (out) : outmode))
2048 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2049 /* However only do this if we can be sure that this input
2050 operand doesn't correspond with an uninitialized pseudo.
2051 global can assign some hardreg to it that is the same as
2052 the one assigned to a different, also live pseudo (as it
2053 can ignore the conflict). We must never introduce writes
2054 to such hardregs, as they would clobber the other live
2055 pseudo. See PR 20973. */
2056 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2057 ORIGINAL_REGNO (in))
2058 /* Similarly, only do this if we can be sure that the death
2059 note is still valid. global can assign some hardreg to
2060 the pseudo referenced in the note and simultaneously a
2061 subword of this hardreg to a different, also live pseudo,
2062 because only another subword of the hardreg is actually
2063 used in the insn. This cannot happen if the pseudo has
2064 been assigned exactly one hardreg. See PR 33732. */
2065 && REG_NREGS (in) == 1)))
2066 {
2067 unsigned int regno = REGNO (in) + in_offset;
2068 unsigned int nwords = hard_regno_nregs (regno, inmode);
2069
2070 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2071 && ! hard_reg_set_here_p (regno, regno + nwords,
2072 PATTERN (this_insn))
2073 && (! earlyclobber
2074 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2075 PATTERN (this_insn), inloc)))
2076 {
2077 unsigned int i;
2078
2079 for (i = 0; i < nwords; i++)
2080 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2081 regno + i))
2082 break;
2083
2084 if (i == nwords)
2085 {
2086 /* If we were going to use OUT as the reload reg
2087 and changed our mind, it means OUT is a dummy that
2088 dies here. So don't bother copying value to it. */
2089 if (for_real >= 0 && value == real_out)
2090 rld[for_real].out = 0;
2091 if (REG_P (real_in))
2092 value = real_in;
2093 else
2094 value = gen_rtx_REG (inmode, regno);
2095 }
2096 }
2097 }
2098
2099 return value;
2100 }
2101 \f
2102 /* This page contains subroutines used mainly for determining
2103 whether the IN or an OUT of a reload can serve as the
2104 reload register. */
2105
2106 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2107
2108 int
2109 earlyclobber_operand_p (rtx x)
2110 {
2111 int i;
2112
2113 for (i = 0; i < n_earlyclobbers; i++)
2114 if (reload_earlyclobbers[i] == x)
2115 return 1;
2116
2117 return 0;
2118 }
2119
2120 /* Return 1 if expression X alters a hard reg in the range
2121 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2122 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2123 X should be the body of an instruction. */
2124
2125 static int
2126 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2127 {
2128 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2129 {
2130 rtx op0 = SET_DEST (x);
2131
2132 while (GET_CODE (op0) == SUBREG)
2133 op0 = SUBREG_REG (op0);
2134 if (REG_P (op0))
2135 {
2136 unsigned int r = REGNO (op0);
2137
2138 /* See if this reg overlaps range under consideration. */
2139 if (r < end_regno
2140 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2141 return 1;
2142 }
2143 }
2144 else if (GET_CODE (x) == PARALLEL)
2145 {
2146 int i = XVECLEN (x, 0) - 1;
2147
2148 for (; i >= 0; i--)
2149 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2150 return 1;
2151 }
2152
2153 return 0;
2154 }
2155
2156 /* Return 1 if ADDR is a valid memory address for mode MODE
2157 in address space AS, and check that each pseudo reg has the
2158 proper kind of hard reg. */
2159
2160 int
2161 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2162 rtx addr, addr_space_t as)
2163 {
2164 #ifdef GO_IF_LEGITIMATE_ADDRESS
2165 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2166 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2167 return 0;
2168
2169 win:
2170 return 1;
2171 #else
2172 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2173 #endif
2174 }
2175 \f
2176 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2177 if they are the same hard reg, and has special hacks for
2178 autoincrement and autodecrement.
2179 This is specifically intended for find_reloads to use
2180 in determining whether two operands match.
2181 X is the operand whose number is the lower of the two.
2182
2183 The value is 2 if Y contains a pre-increment that matches
2184 a non-incrementing address in X. */
2185
2186 /* ??? To be completely correct, we should arrange to pass
2187 for X the output operand and for Y the input operand.
2188 For now, we assume that the output operand has the lower number
2189 because that is natural in (SET output (... input ...)). */
2190
2191 int
2192 operands_match_p (rtx x, rtx y)
2193 {
2194 int i;
2195 RTX_CODE code = GET_CODE (x);
2196 const char *fmt;
2197 int success_2;
2198
2199 if (x == y)
2200 return 1;
2201 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2202 && (REG_P (y) || (GET_CODE (y) == SUBREG
2203 && REG_P (SUBREG_REG (y)))))
2204 {
2205 int j;
2206
2207 if (code == SUBREG)
2208 {
2209 i = REGNO (SUBREG_REG (x));
2210 if (i >= FIRST_PSEUDO_REGISTER)
2211 goto slow;
2212 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2213 GET_MODE (SUBREG_REG (x)),
2214 SUBREG_BYTE (x),
2215 GET_MODE (x));
2216 }
2217 else
2218 i = REGNO (x);
2219
2220 if (GET_CODE (y) == SUBREG)
2221 {
2222 j = REGNO (SUBREG_REG (y));
2223 if (j >= FIRST_PSEUDO_REGISTER)
2224 goto slow;
2225 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2226 GET_MODE (SUBREG_REG (y)),
2227 SUBREG_BYTE (y),
2228 GET_MODE (y));
2229 }
2230 else
2231 j = REGNO (y);
2232
2233 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2234 multiple hard register group of scalar integer registers, so that
2235 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2236 register. */
2237 scalar_int_mode xmode;
2238 if (REG_WORDS_BIG_ENDIAN
2239 && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2240 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2241 && i < FIRST_PSEUDO_REGISTER)
2242 i += hard_regno_nregs (i, xmode) - 1;
2243 scalar_int_mode ymode;
2244 if (REG_WORDS_BIG_ENDIAN
2245 && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2246 && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2247 && j < FIRST_PSEUDO_REGISTER)
2248 j += hard_regno_nregs (j, ymode) - 1;
2249
2250 return i == j;
2251 }
2252 /* If two operands must match, because they are really a single
2253 operand of an assembler insn, then two postincrements are invalid
2254 because the assembler insn would increment only once.
2255 On the other hand, a postincrement matches ordinary indexing
2256 if the postincrement is the output operand. */
2257 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2258 return operands_match_p (XEXP (x, 0), y);
2259 /* Two preincrements are invalid
2260 because the assembler insn would increment only once.
2261 On the other hand, a preincrement matches ordinary indexing
2262 if the preincrement is the input operand.
2263 In this case, return 2, since some callers need to do special
2264 things when this happens. */
2265 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2266 || GET_CODE (y) == PRE_MODIFY)
2267 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2268
2269 slow:
2270
2271 /* Now we have disposed of all the cases in which different rtx codes
2272 can match. */
2273 if (code != GET_CODE (y))
2274 return 0;
2275
2276 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2277 if (GET_MODE (x) != GET_MODE (y))
2278 return 0;
2279
2280 /* MEMs referring to different address space are not equivalent. */
2281 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2282 return 0;
2283
2284 switch (code)
2285 {
2286 CASE_CONST_UNIQUE:
2287 return 0;
2288
2289 case LABEL_REF:
2290 return label_ref_label (x) == label_ref_label (y);
2291 case SYMBOL_REF:
2292 return XSTR (x, 0) == XSTR (y, 0);
2293
2294 default:
2295 break;
2296 }
2297
2298 /* Compare the elements. If any pair of corresponding elements
2299 fail to match, return 0 for the whole things. */
2300
2301 success_2 = 0;
2302 fmt = GET_RTX_FORMAT (code);
2303 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2304 {
2305 int val, j;
2306 switch (fmt[i])
2307 {
2308 case 'w':
2309 if (XWINT (x, i) != XWINT (y, i))
2310 return 0;
2311 break;
2312
2313 case 'i':
2314 if (XINT (x, i) != XINT (y, i))
2315 return 0;
2316 break;
2317
2318 case 'p':
2319 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2320 return 0;
2321 break;
2322
2323 case 'e':
2324 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2325 if (val == 0)
2326 return 0;
2327 /* If any subexpression returns 2,
2328 we should return 2 if we are successful. */
2329 if (val == 2)
2330 success_2 = 1;
2331 break;
2332
2333 case '0':
2334 break;
2335
2336 case 'E':
2337 if (XVECLEN (x, i) != XVECLEN (y, i))
2338 return 0;
2339 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2340 {
2341 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2342 if (val == 0)
2343 return 0;
2344 if (val == 2)
2345 success_2 = 1;
2346 }
2347 break;
2348
2349 /* It is believed that rtx's at this level will never
2350 contain anything but integers and other rtx's,
2351 except for within LABEL_REFs and SYMBOL_REFs. */
2352 default:
2353 gcc_unreachable ();
2354 }
2355 }
2356 return 1 + success_2;
2357 }
2358 \f
2359 /* Describe the range of registers or memory referenced by X.
2360 If X is a register, set REG_FLAG and put the first register
2361 number into START and the last plus one into END.
2362 If X is a memory reference, put a base address into BASE
2363 and a range of integer offsets into START and END.
2364 If X is pushing on the stack, we can assume it causes no trouble,
2365 so we set the SAFE field. */
2366
2367 static struct decomposition
2368 decompose (rtx x)
2369 {
2370 struct decomposition val;
2371 int all_const = 0, regno;
2372
2373 memset (&val, 0, sizeof (val));
2374
2375 switch (GET_CODE (x))
2376 {
2377 case MEM:
2378 {
2379 rtx base = NULL_RTX, offset = 0;
2380 rtx addr = XEXP (x, 0);
2381
2382 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2383 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2384 {
2385 val.base = XEXP (addr, 0);
2386 val.start = -GET_MODE_SIZE (GET_MODE (x));
2387 val.end = GET_MODE_SIZE (GET_MODE (x));
2388 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2389 return val;
2390 }
2391
2392 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2393 {
2394 if (GET_CODE (XEXP (addr, 1)) == PLUS
2395 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2396 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2397 {
2398 val.base = XEXP (addr, 0);
2399 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2400 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2401 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2402 return val;
2403 }
2404 }
2405
2406 if (GET_CODE (addr) == CONST)
2407 {
2408 addr = XEXP (addr, 0);
2409 all_const = 1;
2410 }
2411 if (GET_CODE (addr) == PLUS)
2412 {
2413 if (CONSTANT_P (XEXP (addr, 0)))
2414 {
2415 base = XEXP (addr, 1);
2416 offset = XEXP (addr, 0);
2417 }
2418 else if (CONSTANT_P (XEXP (addr, 1)))
2419 {
2420 base = XEXP (addr, 0);
2421 offset = XEXP (addr, 1);
2422 }
2423 }
2424
2425 if (offset == 0)
2426 {
2427 base = addr;
2428 offset = const0_rtx;
2429 }
2430 if (GET_CODE (offset) == CONST)
2431 offset = XEXP (offset, 0);
2432 if (GET_CODE (offset) == PLUS)
2433 {
2434 if (CONST_INT_P (XEXP (offset, 0)))
2435 {
2436 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2437 offset = XEXP (offset, 0);
2438 }
2439 else if (CONST_INT_P (XEXP (offset, 1)))
2440 {
2441 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2442 offset = XEXP (offset, 1);
2443 }
2444 else
2445 {
2446 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2447 offset = const0_rtx;
2448 }
2449 }
2450 else if (!CONST_INT_P (offset))
2451 {
2452 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2453 offset = const0_rtx;
2454 }
2455
2456 if (all_const && GET_CODE (base) == PLUS)
2457 base = gen_rtx_CONST (GET_MODE (base), base);
2458
2459 gcc_assert (CONST_INT_P (offset));
2460
2461 val.start = INTVAL (offset);
2462 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2463 val.base = base;
2464 }
2465 break;
2466
2467 case REG:
2468 val.reg_flag = 1;
2469 regno = true_regnum (x);
2470 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2471 {
2472 /* A pseudo with no hard reg. */
2473 val.start = REGNO (x);
2474 val.end = val.start + 1;
2475 }
2476 else
2477 {
2478 /* A hard reg. */
2479 val.start = regno;
2480 val.end = end_hard_regno (GET_MODE (x), regno);
2481 }
2482 break;
2483
2484 case SUBREG:
2485 if (!REG_P (SUBREG_REG (x)))
2486 /* This could be more precise, but it's good enough. */
2487 return decompose (SUBREG_REG (x));
2488 regno = true_regnum (x);
2489 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2490 return decompose (SUBREG_REG (x));
2491
2492 /* A hard reg. */
2493 val.reg_flag = 1;
2494 val.start = regno;
2495 val.end = regno + subreg_nregs (x);
2496 break;
2497
2498 case SCRATCH:
2499 /* This hasn't been assigned yet, so it can't conflict yet. */
2500 val.safe = 1;
2501 break;
2502
2503 default:
2504 gcc_assert (CONSTANT_P (x));
2505 val.safe = 1;
2506 break;
2507 }
2508 return val;
2509 }
2510
2511 /* Return 1 if altering Y will not modify the value of X.
2512 Y is also described by YDATA, which should be decompose (Y). */
2513
2514 static int
2515 immune_p (rtx x, rtx y, struct decomposition ydata)
2516 {
2517 struct decomposition xdata;
2518
2519 if (ydata.reg_flag)
2520 /* In this case the decomposition structure contains register
2521 numbers rather than byte offsets. */
2522 return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2523 ydata.end.to_constant (),
2524 x, (rtx *) 0);
2525 if (ydata.safe)
2526 return 1;
2527
2528 gcc_assert (MEM_P (y));
2529 /* If Y is memory and X is not, Y can't affect X. */
2530 if (!MEM_P (x))
2531 return 1;
2532
2533 xdata = decompose (x);
2534
2535 if (! rtx_equal_p (xdata.base, ydata.base))
2536 {
2537 /* If bases are distinct symbolic constants, there is no overlap. */
2538 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2539 return 1;
2540 /* Constants and stack slots never overlap. */
2541 if (CONSTANT_P (xdata.base)
2542 && (ydata.base == frame_pointer_rtx
2543 || ydata.base == hard_frame_pointer_rtx
2544 || ydata.base == stack_pointer_rtx))
2545 return 1;
2546 if (CONSTANT_P (ydata.base)
2547 && (xdata.base == frame_pointer_rtx
2548 || xdata.base == hard_frame_pointer_rtx
2549 || xdata.base == stack_pointer_rtx))
2550 return 1;
2551 /* If either base is variable, we don't know anything. */
2552 return 0;
2553 }
2554
2555 return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2556 }
2557
2558 /* Similar, but calls decompose. */
2559
2560 int
2561 safe_from_earlyclobber (rtx op, rtx clobber)
2562 {
2563 struct decomposition early_data;
2564
2565 early_data = decompose (clobber);
2566 return immune_p (op, clobber, early_data);
2567 }
2568 \f
2569 /* Main entry point of this file: search the body of INSN
2570 for values that need reloading and record them with push_reload.
2571 REPLACE nonzero means record also where the values occur
2572 so that subst_reloads can be used.
2573
2574 IND_LEVELS says how many levels of indirection are supported by this
2575 machine; a value of zero means that a memory reference is not a valid
2576 memory address.
2577
2578 LIVE_KNOWN says we have valid information about which hard
2579 regs are live at each point in the program; this is true when
2580 we are called from global_alloc but false when stupid register
2581 allocation has been done.
2582
2583 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2584 which is nonnegative if the reg has been commandeered for reloading into.
2585 It is copied into STATIC_RELOAD_REG_P and referenced from there
2586 by various subroutines.
2587
2588 Return TRUE if some operands need to be changed, because of swapping
2589 commutative operands, reg_equiv_address substitution, or whatever. */
2590
2591 int
2592 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2593 short *reload_reg_p)
2594 {
2595 int insn_code_number;
2596 int i, j;
2597 int noperands;
2598 /* These start out as the constraints for the insn
2599 and they are chewed up as we consider alternatives. */
2600 const char *constraints[MAX_RECOG_OPERANDS];
2601 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2602 a register. */
2603 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2604 char pref_or_nothing[MAX_RECOG_OPERANDS];
2605 /* Nonzero for a MEM operand whose entire address needs a reload.
2606 May be -1 to indicate the entire address may or may not need a reload. */
2607 int address_reloaded[MAX_RECOG_OPERANDS];
2608 /* Nonzero for an address operand that needs to be completely reloaded.
2609 May be -1 to indicate the entire operand may or may not need a reload. */
2610 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2611 /* Value of enum reload_type to use for operand. */
2612 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2613 /* Value of enum reload_type to use within address of operand. */
2614 enum reload_type address_type[MAX_RECOG_OPERANDS];
2615 /* Save the usage of each operand. */
2616 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2617 int no_input_reloads = 0, no_output_reloads = 0;
2618 int n_alternatives;
2619 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2620 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2621 char this_alternative_win[MAX_RECOG_OPERANDS];
2622 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2623 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2624 int this_alternative_matches[MAX_RECOG_OPERANDS];
2625 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2626 int this_alternative_number;
2627 int goal_alternative_number = 0;
2628 int operand_reloadnum[MAX_RECOG_OPERANDS];
2629 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2630 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2631 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2632 char goal_alternative_win[MAX_RECOG_OPERANDS];
2633 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2634 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2635 int goal_alternative_swapped;
2636 int best;
2637 int commutative;
2638 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2639 rtx substed_operand[MAX_RECOG_OPERANDS];
2640 rtx body = PATTERN (insn);
2641 rtx set = single_set (insn);
2642 int goal_earlyclobber = 0, this_earlyclobber;
2643 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2644 int retval = 0;
2645
2646 this_insn = insn;
2647 n_reloads = 0;
2648 n_replacements = 0;
2649 n_earlyclobbers = 0;
2650 replace_reloads = replace;
2651 hard_regs_live_known = live_known;
2652 static_reload_reg_p = reload_reg_p;
2653
2654 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2655 neither are insns that SET cc0. Insns that use CC0 are not allowed
2656 to have any input reloads. */
2657 if (JUMP_P (insn) || CALL_P (insn))
2658 no_output_reloads = 1;
2659
2660 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2661 no_input_reloads = 1;
2662 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2663 no_output_reloads = 1;
2664
2665 /* The eliminated forms of any secondary memory locations are per-insn, so
2666 clear them out here. */
2667
2668 if (secondary_memlocs_elim_used)
2669 {
2670 memset (secondary_memlocs_elim, 0,
2671 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2672 secondary_memlocs_elim_used = 0;
2673 }
2674
2675 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2676 is cheap to move between them. If it is not, there may not be an insn
2677 to do the copy, so we may need a reload. */
2678 if (GET_CODE (body) == SET
2679 && REG_P (SET_DEST (body))
2680 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2681 && REG_P (SET_SRC (body))
2682 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2683 && register_move_cost (GET_MODE (SET_SRC (body)),
2684 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2685 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2686 return 0;
2687
2688 extract_insn (insn);
2689
2690 noperands = reload_n_operands = recog_data.n_operands;
2691 n_alternatives = recog_data.n_alternatives;
2692
2693 /* Just return "no reloads" if insn has no operands with constraints. */
2694 if (noperands == 0 || n_alternatives == 0)
2695 return 0;
2696
2697 insn_code_number = INSN_CODE (insn);
2698 this_insn_is_asm = insn_code_number < 0;
2699
2700 memcpy (operand_mode, recog_data.operand_mode,
2701 noperands * sizeof (machine_mode));
2702 memcpy (constraints, recog_data.constraints,
2703 noperands * sizeof (const char *));
2704
2705 commutative = -1;
2706
2707 /* If we will need to know, later, whether some pair of operands
2708 are the same, we must compare them now and save the result.
2709 Reloading the base and index registers will clobber them
2710 and afterward they will fail to match. */
2711
2712 for (i = 0; i < noperands; i++)
2713 {
2714 const char *p;
2715 int c;
2716 char *end;
2717
2718 substed_operand[i] = recog_data.operand[i];
2719 p = constraints[i];
2720
2721 modified[i] = RELOAD_READ;
2722
2723 /* Scan this operand's constraint to see if it is an output operand,
2724 an in-out operand, is commutative, or should match another. */
2725
2726 while ((c = *p))
2727 {
2728 p += CONSTRAINT_LEN (c, p);
2729 switch (c)
2730 {
2731 case '=':
2732 modified[i] = RELOAD_WRITE;
2733 break;
2734 case '+':
2735 modified[i] = RELOAD_READ_WRITE;
2736 break;
2737 case '%':
2738 {
2739 /* The last operand should not be marked commutative. */
2740 gcc_assert (i != noperands - 1);
2741
2742 /* We currently only support one commutative pair of
2743 operands. Some existing asm code currently uses more
2744 than one pair. Previously, that would usually work,
2745 but sometimes it would crash the compiler. We
2746 continue supporting that case as well as we can by
2747 silently ignoring all but the first pair. In the
2748 future we may handle it correctly. */
2749 if (commutative < 0)
2750 commutative = i;
2751 else
2752 gcc_assert (this_insn_is_asm);
2753 }
2754 break;
2755 /* Use of ISDIGIT is tempting here, but it may get expensive because
2756 of locale support we don't want. */
2757 case '0': case '1': case '2': case '3': case '4':
2758 case '5': case '6': case '7': case '8': case '9':
2759 {
2760 c = strtoul (p - 1, &end, 10);
2761 p = end;
2762
2763 operands_match[c][i]
2764 = operands_match_p (recog_data.operand[c],
2765 recog_data.operand[i]);
2766
2767 /* An operand may not match itself. */
2768 gcc_assert (c != i);
2769
2770 /* If C can be commuted with C+1, and C might need to match I,
2771 then C+1 might also need to match I. */
2772 if (commutative >= 0)
2773 {
2774 if (c == commutative || c == commutative + 1)
2775 {
2776 int other = c + (c == commutative ? 1 : -1);
2777 operands_match[other][i]
2778 = operands_match_p (recog_data.operand[other],
2779 recog_data.operand[i]);
2780 }
2781 if (i == commutative || i == commutative + 1)
2782 {
2783 int other = i + (i == commutative ? 1 : -1);
2784 operands_match[c][other]
2785 = operands_match_p (recog_data.operand[c],
2786 recog_data.operand[other]);
2787 }
2788 /* Note that C is supposed to be less than I.
2789 No need to consider altering both C and I because in
2790 that case we would alter one into the other. */
2791 }
2792 }
2793 }
2794 }
2795 }
2796
2797 /* Examine each operand that is a memory reference or memory address
2798 and reload parts of the addresses into index registers.
2799 Also here any references to pseudo regs that didn't get hard regs
2800 but are equivalent to constants get replaced in the insn itself
2801 with those constants. Nobody will ever see them again.
2802
2803 Finally, set up the preferred classes of each operand. */
2804
2805 for (i = 0; i < noperands; i++)
2806 {
2807 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2808
2809 address_reloaded[i] = 0;
2810 address_operand_reloaded[i] = 0;
2811 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2812 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2813 : RELOAD_OTHER);
2814 address_type[i]
2815 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2816 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2817 : RELOAD_OTHER);
2818
2819 if (*constraints[i] == 0)
2820 /* Ignore things like match_operator operands. */
2821 ;
2822 else if (insn_extra_address_constraint
2823 (lookup_constraint (constraints[i])))
2824 {
2825 address_operand_reloaded[i]
2826 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2827 recog_data.operand[i],
2828 recog_data.operand_loc[i],
2829 i, operand_type[i], ind_levels, insn);
2830
2831 /* If we now have a simple operand where we used to have a
2832 PLUS or MULT, re-recognize and try again. */
2833 if ((OBJECT_P (*recog_data.operand_loc[i])
2834 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2835 && (GET_CODE (recog_data.operand[i]) == MULT
2836 || GET_CODE (recog_data.operand[i]) == PLUS))
2837 {
2838 INSN_CODE (insn) = -1;
2839 retval = find_reloads (insn, replace, ind_levels, live_known,
2840 reload_reg_p);
2841 return retval;
2842 }
2843
2844 recog_data.operand[i] = *recog_data.operand_loc[i];
2845 substed_operand[i] = recog_data.operand[i];
2846
2847 /* Address operands are reloaded in their existing mode,
2848 no matter what is specified in the machine description. */
2849 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2850
2851 /* If the address is a single CONST_INT pick address mode
2852 instead otherwise we will later not know in which mode
2853 the reload should be performed. */
2854 if (operand_mode[i] == VOIDmode)
2855 operand_mode[i] = Pmode;
2856
2857 }
2858 else if (code == MEM)
2859 {
2860 address_reloaded[i]
2861 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2862 recog_data.operand_loc[i],
2863 XEXP (recog_data.operand[i], 0),
2864 &XEXP (recog_data.operand[i], 0),
2865 i, address_type[i], ind_levels, insn);
2866 recog_data.operand[i] = *recog_data.operand_loc[i];
2867 substed_operand[i] = recog_data.operand[i];
2868 }
2869 else if (code == SUBREG)
2870 {
2871 rtx reg = SUBREG_REG (recog_data.operand[i]);
2872 rtx op
2873 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2874 ind_levels,
2875 set != 0
2876 && &SET_DEST (set) == recog_data.operand_loc[i],
2877 insn,
2878 &address_reloaded[i]);
2879
2880 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2881 that didn't get a hard register, emit a USE with a REG_EQUAL
2882 note in front so that we might inherit a previous, possibly
2883 wider reload. */
2884
2885 if (replace
2886 && MEM_P (op)
2887 && REG_P (reg)
2888 && (GET_MODE_SIZE (GET_MODE (reg))
2889 >= GET_MODE_SIZE (GET_MODE (op)))
2890 && reg_equiv_constant (REGNO (reg)) == 0)
2891 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2892 insn),
2893 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2894
2895 substed_operand[i] = recog_data.operand[i] = op;
2896 }
2897 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2898 /* We can get a PLUS as an "operand" as a result of register
2899 elimination. See eliminate_regs and gen_reload. We handle
2900 a unary operator by reloading the operand. */
2901 substed_operand[i] = recog_data.operand[i]
2902 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2903 ind_levels, 0, insn,
2904 &address_reloaded[i]);
2905 else if (code == REG)
2906 {
2907 /* This is equivalent to calling find_reloads_toplev.
2908 The code is duplicated for speed.
2909 When we find a pseudo always equivalent to a constant,
2910 we replace it by the constant. We must be sure, however,
2911 that we don't try to replace it in the insn in which it
2912 is being set. */
2913 int regno = REGNO (recog_data.operand[i]);
2914 if (reg_equiv_constant (regno) != 0
2915 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2916 {
2917 /* Record the existing mode so that the check if constants are
2918 allowed will work when operand_mode isn't specified. */
2919
2920 if (operand_mode[i] == VOIDmode)
2921 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2922
2923 substed_operand[i] = recog_data.operand[i]
2924 = reg_equiv_constant (regno);
2925 }
2926 if (reg_equiv_memory_loc (regno) != 0
2927 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2928 /* We need not give a valid is_set_dest argument since the case
2929 of a constant equivalence was checked above. */
2930 substed_operand[i] = recog_data.operand[i]
2931 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2932 ind_levels, 0, insn,
2933 &address_reloaded[i]);
2934 }
2935 /* If the operand is still a register (we didn't replace it with an
2936 equivalent), get the preferred class to reload it into. */
2937 code = GET_CODE (recog_data.operand[i]);
2938 preferred_class[i]
2939 = ((code == REG && REGNO (recog_data.operand[i])
2940 >= FIRST_PSEUDO_REGISTER)
2941 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2942 : NO_REGS);
2943 pref_or_nothing[i]
2944 = (code == REG
2945 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2946 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2947 }
2948
2949 /* If this is simply a copy from operand 1 to operand 0, merge the
2950 preferred classes for the operands. */
2951 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2952 && recog_data.operand[1] == SET_SRC (set))
2953 {
2954 preferred_class[0] = preferred_class[1]
2955 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2956 pref_or_nothing[0] |= pref_or_nothing[1];
2957 pref_or_nothing[1] |= pref_or_nothing[0];
2958 }
2959
2960 /* Now see what we need for pseudo-regs that didn't get hard regs
2961 or got the wrong kind of hard reg. For this, we must consider
2962 all the operands together against the register constraints. */
2963
2964 best = MAX_RECOG_OPERANDS * 2 + 600;
2965
2966 goal_alternative_swapped = 0;
2967
2968 /* The constraints are made of several alternatives.
2969 Each operand's constraint looks like foo,bar,... with commas
2970 separating the alternatives. The first alternatives for all
2971 operands go together, the second alternatives go together, etc.
2972
2973 First loop over alternatives. */
2974
2975 alternative_mask enabled = get_enabled_alternatives (insn);
2976 for (this_alternative_number = 0;
2977 this_alternative_number < n_alternatives;
2978 this_alternative_number++)
2979 {
2980 int swapped;
2981
2982 if (!TEST_BIT (enabled, this_alternative_number))
2983 {
2984 int i;
2985
2986 for (i = 0; i < recog_data.n_operands; i++)
2987 constraints[i] = skip_alternative (constraints[i]);
2988
2989 continue;
2990 }
2991
2992 /* If insn is commutative (it's safe to exchange a certain pair
2993 of operands) then we need to try each alternative twice, the
2994 second time matching those two operands as if we had
2995 exchanged them. To do this, really exchange them in
2996 operands. */
2997 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
2998 {
2999 /* Loop over operands for one constraint alternative. */
3000 /* LOSERS counts those that don't fit this alternative
3001 and would require loading. */
3002 int losers = 0;
3003 /* BAD is set to 1 if it some operand can't fit this alternative
3004 even after reloading. */
3005 int bad = 0;
3006 /* REJECT is a count of how undesirable this alternative says it is
3007 if any reloading is required. If the alternative matches exactly
3008 then REJECT is ignored, but otherwise it gets this much
3009 counted against it in addition to the reloading needed. Each
3010 ? counts three times here since we want the disparaging caused by
3011 a bad register class to only count 1/3 as much. */
3012 int reject = 0;
3013
3014 if (swapped)
3015 {
3016 recog_data.operand[commutative] = substed_operand[commutative + 1];
3017 recog_data.operand[commutative + 1] = substed_operand[commutative];
3018 /* Swap the duplicates too. */
3019 for (i = 0; i < recog_data.n_dups; i++)
3020 if (recog_data.dup_num[i] == commutative
3021 || recog_data.dup_num[i] == commutative + 1)
3022 *recog_data.dup_loc[i]
3023 = recog_data.operand[(int) recog_data.dup_num[i]];
3024
3025 std::swap (preferred_class[commutative],
3026 preferred_class[commutative + 1]);
3027 std::swap (pref_or_nothing[commutative],
3028 pref_or_nothing[commutative + 1]);
3029 std::swap (address_reloaded[commutative],
3030 address_reloaded[commutative + 1]);
3031 }
3032
3033 this_earlyclobber = 0;
3034
3035 for (i = 0; i < noperands; i++)
3036 {
3037 const char *p = constraints[i];
3038 char *end;
3039 int len;
3040 int win = 0;
3041 int did_match = 0;
3042 /* 0 => this operand can be reloaded somehow for this alternative. */
3043 int badop = 1;
3044 /* 0 => this operand can be reloaded if the alternative allows regs. */
3045 int winreg = 0;
3046 int c;
3047 int m;
3048 rtx operand = recog_data.operand[i];
3049 int offset = 0;
3050 /* Nonzero means this is a MEM that must be reloaded into a reg
3051 regardless of what the constraint says. */
3052 int force_reload = 0;
3053 int offmemok = 0;
3054 /* Nonzero if a constant forced into memory would be OK for this
3055 operand. */
3056 int constmemok = 0;
3057 int earlyclobber = 0;
3058 enum constraint_num cn;
3059 enum reg_class cl;
3060
3061 /* If the predicate accepts a unary operator, it means that
3062 we need to reload the operand, but do not do this for
3063 match_operator and friends. */
3064 if (UNARY_P (operand) && *p != 0)
3065 operand = XEXP (operand, 0);
3066
3067 /* If the operand is a SUBREG, extract
3068 the REG or MEM (or maybe even a constant) within.
3069 (Constants can occur as a result of reg_equiv_constant.) */
3070
3071 while (GET_CODE (operand) == SUBREG)
3072 {
3073 /* Offset only matters when operand is a REG and
3074 it is a hard reg. This is because it is passed
3075 to reg_fits_class_p if it is a REG and all pseudos
3076 return 0 from that function. */
3077 if (REG_P (SUBREG_REG (operand))
3078 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3079 {
3080 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3081 GET_MODE (SUBREG_REG (operand)),
3082 SUBREG_BYTE (operand),
3083 GET_MODE (operand)) < 0)
3084 force_reload = 1;
3085 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3086 GET_MODE (SUBREG_REG (operand)),
3087 SUBREG_BYTE (operand),
3088 GET_MODE (operand));
3089 }
3090 operand = SUBREG_REG (operand);
3091 /* Force reload if this is a constant or PLUS or if there may
3092 be a problem accessing OPERAND in the outer mode. */
3093 scalar_int_mode inner_mode;
3094 if (CONSTANT_P (operand)
3095 || GET_CODE (operand) == PLUS
3096 /* We must force a reload of paradoxical SUBREGs
3097 of a MEM because the alignment of the inner value
3098 may not be enough to do the outer reference. On
3099 big-endian machines, it may also reference outside
3100 the object.
3101
3102 On machines that extend byte operations and we have a
3103 SUBREG where both the inner and outer modes are no wider
3104 than a word and the inner mode is narrower, is integral,
3105 and gets extended when loaded from memory, combine.c has
3106 made assumptions about the behavior of the machine in such
3107 register access. If the data is, in fact, in memory we
3108 must always load using the size assumed to be in the
3109 register and let the insn do the different-sized
3110 accesses.
3111
3112 This is doubly true if WORD_REGISTER_OPERATIONS. In
3113 this case eliminate_regs has left non-paradoxical
3114 subregs for push_reload to see. Make sure it does
3115 by forcing the reload.
3116
3117 ??? When is it right at this stage to have a subreg
3118 of a mem that is _not_ to be handled specially? IMO
3119 those should have been reduced to just a mem. */
3120 || ((MEM_P (operand)
3121 || (REG_P (operand)
3122 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3123 && (WORD_REGISTER_OPERATIONS
3124 || ((GET_MODE_BITSIZE (GET_MODE (operand))
3125 < BIGGEST_ALIGNMENT)
3126 && paradoxical_subreg_p (operand_mode[i],
3127 GET_MODE (operand)))
3128 || BYTES_BIG_ENDIAN
3129 || ((GET_MODE_SIZE (operand_mode[i])
3130 <= UNITS_PER_WORD)
3131 && (is_a <scalar_int_mode>
3132 (GET_MODE (operand), &inner_mode))
3133 && (GET_MODE_SIZE (inner_mode)
3134 <= UNITS_PER_WORD)
3135 && paradoxical_subreg_p (operand_mode[i],
3136 inner_mode)
3137 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3138 )
3139 force_reload = 1;
3140 }
3141
3142 this_alternative[i] = NO_REGS;
3143 this_alternative_win[i] = 0;
3144 this_alternative_match_win[i] = 0;
3145 this_alternative_offmemok[i] = 0;
3146 this_alternative_earlyclobber[i] = 0;
3147 this_alternative_matches[i] = -1;
3148
3149 /* An empty constraint or empty alternative
3150 allows anything which matched the pattern. */
3151 if (*p == 0 || *p == ',')
3152 win = 1, badop = 0;
3153
3154 /* Scan this alternative's specs for this operand;
3155 set WIN if the operand fits any letter in this alternative.
3156 Otherwise, clear BADOP if this operand could
3157 fit some letter after reloads,
3158 or set WINREG if this operand could fit after reloads
3159 provided the constraint allows some registers. */
3160
3161 do
3162 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3163 {
3164 case '\0':
3165 len = 0;
3166 break;
3167 case ',':
3168 c = '\0';
3169 break;
3170
3171 case '?':
3172 reject += 6;
3173 break;
3174
3175 case '!':
3176 reject = 600;
3177 break;
3178
3179 case '#':
3180 /* Ignore rest of this alternative as far as
3181 reloading is concerned. */
3182 do
3183 p++;
3184 while (*p && *p != ',');
3185 len = 0;
3186 break;
3187
3188 case '0': case '1': case '2': case '3': case '4':
3189 case '5': case '6': case '7': case '8': case '9':
3190 m = strtoul (p, &end, 10);
3191 p = end;
3192 len = 0;
3193
3194 this_alternative_matches[i] = m;
3195 /* We are supposed to match a previous operand.
3196 If we do, we win if that one did.
3197 If we do not, count both of the operands as losers.
3198 (This is too conservative, since most of the time
3199 only a single reload insn will be needed to make
3200 the two operands win. As a result, this alternative
3201 may be rejected when it is actually desirable.) */
3202 if ((swapped && (m != commutative || i != commutative + 1))
3203 /* If we are matching as if two operands were swapped,
3204 also pretend that operands_match had been computed
3205 with swapped.
3206 But if I is the second of those and C is the first,
3207 don't exchange them, because operands_match is valid
3208 only on one side of its diagonal. */
3209 ? (operands_match
3210 [(m == commutative || m == commutative + 1)
3211 ? 2 * commutative + 1 - m : m]
3212 [(i == commutative || i == commutative + 1)
3213 ? 2 * commutative + 1 - i : i])
3214 : operands_match[m][i])
3215 {
3216 /* If we are matching a non-offsettable address where an
3217 offsettable address was expected, then we must reject
3218 this combination, because we can't reload it. */
3219 if (this_alternative_offmemok[m]
3220 && MEM_P (recog_data.operand[m])
3221 && this_alternative[m] == NO_REGS
3222 && ! this_alternative_win[m])
3223 bad = 1;
3224
3225 did_match = this_alternative_win[m];
3226 }
3227 else
3228 {
3229 /* Operands don't match. */
3230 rtx value;
3231 int loc1, loc2;
3232 /* Retroactively mark the operand we had to match
3233 as a loser, if it wasn't already. */
3234 if (this_alternative_win[m])
3235 losers++;
3236 this_alternative_win[m] = 0;
3237 if (this_alternative[m] == NO_REGS)
3238 bad = 1;
3239 /* But count the pair only once in the total badness of
3240 this alternative, if the pair can be a dummy reload.
3241 The pointers in operand_loc are not swapped; swap
3242 them by hand if necessary. */
3243 if (swapped && i == commutative)
3244 loc1 = commutative + 1;
3245 else if (swapped && i == commutative + 1)
3246 loc1 = commutative;
3247 else
3248 loc1 = i;
3249 if (swapped && m == commutative)
3250 loc2 = commutative + 1;
3251 else if (swapped && m == commutative + 1)
3252 loc2 = commutative;
3253 else
3254 loc2 = m;
3255 value
3256 = find_dummy_reload (recog_data.operand[i],
3257 recog_data.operand[m],
3258 recog_data.operand_loc[loc1],
3259 recog_data.operand_loc[loc2],
3260 operand_mode[i], operand_mode[m],
3261 this_alternative[m], -1,
3262 this_alternative_earlyclobber[m]);
3263
3264 if (value != 0)
3265 losers--;
3266 }
3267 /* This can be fixed with reloads if the operand
3268 we are supposed to match can be fixed with reloads. */
3269 badop = 0;
3270 this_alternative[i] = this_alternative[m];
3271
3272 /* If we have to reload this operand and some previous
3273 operand also had to match the same thing as this
3274 operand, we don't know how to do that. So reject this
3275 alternative. */
3276 if (! did_match || force_reload)
3277 for (j = 0; j < i; j++)
3278 if (this_alternative_matches[j]
3279 == this_alternative_matches[i])
3280 {
3281 badop = 1;
3282 break;
3283 }
3284 break;
3285
3286 case 'p':
3287 /* All necessary reloads for an address_operand
3288 were handled in find_reloads_address. */
3289 this_alternative[i]
3290 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3291 ADDRESS, SCRATCH);
3292 win = 1;
3293 badop = 0;
3294 break;
3295
3296 case TARGET_MEM_CONSTRAINT:
3297 if (force_reload)
3298 break;
3299 if (MEM_P (operand)
3300 || (REG_P (operand)
3301 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3302 && reg_renumber[REGNO (operand)] < 0))
3303 win = 1;
3304 if (CONST_POOL_OK_P (operand_mode[i], operand))
3305 badop = 0;
3306 constmemok = 1;
3307 break;
3308
3309 case '<':
3310 if (MEM_P (operand)
3311 && ! address_reloaded[i]
3312 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3313 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3314 win = 1;
3315 break;
3316
3317 case '>':
3318 if (MEM_P (operand)
3319 && ! address_reloaded[i]
3320 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3321 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3322 win = 1;
3323 break;
3324
3325 /* Memory operand whose address is not offsettable. */
3326 case 'V':
3327 if (force_reload)
3328 break;
3329 if (MEM_P (operand)
3330 && ! (ind_levels ? offsettable_memref_p (operand)
3331 : offsettable_nonstrict_memref_p (operand))
3332 /* Certain mem addresses will become offsettable
3333 after they themselves are reloaded. This is important;
3334 we don't want our own handling of unoffsettables
3335 to override the handling of reg_equiv_address. */
3336 && !(REG_P (XEXP (operand, 0))
3337 && (ind_levels == 0
3338 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3339 win = 1;
3340 break;
3341
3342 /* Memory operand whose address is offsettable. */
3343 case 'o':
3344 if (force_reload)
3345 break;
3346 if ((MEM_P (operand)
3347 /* If IND_LEVELS, find_reloads_address won't reload a
3348 pseudo that didn't get a hard reg, so we have to
3349 reject that case. */
3350 && ((ind_levels ? offsettable_memref_p (operand)
3351 : offsettable_nonstrict_memref_p (operand))
3352 /* A reloaded address is offsettable because it is now
3353 just a simple register indirect. */
3354 || address_reloaded[i] == 1))
3355 || (REG_P (operand)
3356 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3357 && reg_renumber[REGNO (operand)] < 0
3358 /* If reg_equiv_address is nonzero, we will be
3359 loading it into a register; hence it will be
3360 offsettable, but we cannot say that reg_equiv_mem
3361 is offsettable without checking. */
3362 && ((reg_equiv_mem (REGNO (operand)) != 0
3363 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3364 || (reg_equiv_address (REGNO (operand)) != 0))))
3365 win = 1;
3366 if (CONST_POOL_OK_P (operand_mode[i], operand)
3367 || MEM_P (operand))
3368 badop = 0;
3369 constmemok = 1;
3370 offmemok = 1;
3371 break;
3372
3373 case '&':
3374 /* Output operand that is stored before the need for the
3375 input operands (and their index registers) is over. */
3376 earlyclobber = 1, this_earlyclobber = 1;
3377 break;
3378
3379 case 'X':
3380 force_reload = 0;
3381 win = 1;
3382 break;
3383
3384 case 'g':
3385 if (! force_reload
3386 /* A PLUS is never a valid operand, but reload can make
3387 it from a register when eliminating registers. */
3388 && GET_CODE (operand) != PLUS
3389 /* A SCRATCH is not a valid operand. */
3390 && GET_CODE (operand) != SCRATCH
3391 && (! CONSTANT_P (operand)
3392 || ! flag_pic
3393 || LEGITIMATE_PIC_OPERAND_P (operand))
3394 && (GENERAL_REGS == ALL_REGS
3395 || !REG_P (operand)
3396 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3397 && reg_renumber[REGNO (operand)] < 0)))
3398 win = 1;
3399 cl = GENERAL_REGS;
3400 goto reg;
3401
3402 default:
3403 cn = lookup_constraint (p);
3404 switch (get_constraint_type (cn))
3405 {
3406 case CT_REGISTER:
3407 cl = reg_class_for_constraint (cn);
3408 if (cl != NO_REGS)
3409 goto reg;
3410 break;
3411
3412 case CT_CONST_INT:
3413 if (CONST_INT_P (operand)
3414 && (insn_const_int_ok_for_constraint
3415 (INTVAL (operand), cn)))
3416 win = true;
3417 break;
3418
3419 case CT_MEMORY:
3420 if (force_reload)
3421 break;
3422 if (constraint_satisfied_p (operand, cn))
3423 win = 1;
3424 /* If the address was already reloaded,
3425 we win as well. */
3426 else if (MEM_P (operand) && address_reloaded[i] == 1)
3427 win = 1;
3428 /* Likewise if the address will be reloaded because
3429 reg_equiv_address is nonzero. For reg_equiv_mem
3430 we have to check. */
3431 else if (REG_P (operand)
3432 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3433 && reg_renumber[REGNO (operand)] < 0
3434 && ((reg_equiv_mem (REGNO (operand)) != 0
3435 && (constraint_satisfied_p
3436 (reg_equiv_mem (REGNO (operand)),
3437 cn)))
3438 || (reg_equiv_address (REGNO (operand))
3439 != 0)))
3440 win = 1;
3441
3442 /* If we didn't already win, we can reload
3443 constants via force_const_mem, and other
3444 MEMs by reloading the address like for 'o'. */
3445 if (CONST_POOL_OK_P (operand_mode[i], operand)
3446 || MEM_P (operand))
3447 badop = 0;
3448 constmemok = 1;
3449 offmemok = 1;
3450 break;
3451
3452 case CT_SPECIAL_MEMORY:
3453 if (force_reload)
3454 break;
3455 if (constraint_satisfied_p (operand, cn))
3456 win = 1;
3457 /* Likewise if the address will be reloaded because
3458 reg_equiv_address is nonzero. For reg_equiv_mem
3459 we have to check. */
3460 else if (REG_P (operand)
3461 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3462 && reg_renumber[REGNO (operand)] < 0
3463 && reg_equiv_mem (REGNO (operand)) != 0
3464 && (constraint_satisfied_p
3465 (reg_equiv_mem (REGNO (operand)), cn)))
3466 win = 1;
3467 break;
3468
3469 case CT_ADDRESS:
3470 if (constraint_satisfied_p (operand, cn))
3471 win = 1;
3472
3473 /* If we didn't already win, we can reload
3474 the address into a base register. */
3475 this_alternative[i]
3476 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3477 ADDRESS, SCRATCH);
3478 badop = 0;
3479 break;
3480
3481 case CT_FIXED_FORM:
3482 if (constraint_satisfied_p (operand, cn))
3483 win = 1;
3484 break;
3485 }
3486 break;
3487
3488 reg:
3489 this_alternative[i]
3490 = reg_class_subunion[this_alternative[i]][cl];
3491 if (GET_MODE (operand) == BLKmode)
3492 break;
3493 winreg = 1;
3494 if (REG_P (operand)
3495 && reg_fits_class_p (operand, this_alternative[i],
3496 offset, GET_MODE (recog_data.operand[i])))
3497 win = 1;
3498 break;
3499 }
3500 while ((p += len), c);
3501
3502 if (swapped == (commutative >= 0 ? 1 : 0))
3503 constraints[i] = p;
3504
3505 /* If this operand could be handled with a reg,
3506 and some reg is allowed, then this operand can be handled. */
3507 if (winreg && this_alternative[i] != NO_REGS
3508 && (win || !class_only_fixed_regs[this_alternative[i]]))
3509 badop = 0;
3510
3511 /* Record which operands fit this alternative. */
3512 this_alternative_earlyclobber[i] = earlyclobber;
3513 if (win && ! force_reload)
3514 this_alternative_win[i] = 1;
3515 else if (did_match && ! force_reload)
3516 this_alternative_match_win[i] = 1;
3517 else
3518 {
3519 int const_to_mem = 0;
3520
3521 this_alternative_offmemok[i] = offmemok;
3522 losers++;
3523 if (badop)
3524 bad = 1;
3525 /* Alternative loses if it has no regs for a reg operand. */
3526 if (REG_P (operand)
3527 && this_alternative[i] == NO_REGS
3528 && this_alternative_matches[i] < 0)
3529 bad = 1;
3530
3531 /* If this is a constant that is reloaded into the desired
3532 class by copying it to memory first, count that as another
3533 reload. This is consistent with other code and is
3534 required to avoid choosing another alternative when
3535 the constant is moved into memory by this function on
3536 an early reload pass. Note that the test here is
3537 precisely the same as in the code below that calls
3538 force_const_mem. */
3539 if (CONST_POOL_OK_P (operand_mode[i], operand)
3540 && ((targetm.preferred_reload_class (operand,
3541 this_alternative[i])
3542 == NO_REGS)
3543 || no_input_reloads))
3544 {
3545 const_to_mem = 1;
3546 if (this_alternative[i] != NO_REGS)
3547 losers++;
3548 }
3549
3550 /* Alternative loses if it requires a type of reload not
3551 permitted for this insn. We can always reload SCRATCH
3552 and objects with a REG_UNUSED note. */
3553 if (GET_CODE (operand) != SCRATCH
3554 && modified[i] != RELOAD_READ && no_output_reloads
3555 && ! find_reg_note (insn, REG_UNUSED, operand))
3556 bad = 1;
3557 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3558 && ! const_to_mem)
3559 bad = 1;
3560
3561 /* If we can't reload this value at all, reject this
3562 alternative. Note that we could also lose due to
3563 LIMIT_RELOAD_CLASS, but we don't check that
3564 here. */
3565
3566 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3567 {
3568 if (targetm.preferred_reload_class (operand,
3569 this_alternative[i])
3570 == NO_REGS)
3571 reject = 600;
3572
3573 if (operand_type[i] == RELOAD_FOR_OUTPUT
3574 && (targetm.preferred_output_reload_class (operand,
3575 this_alternative[i])
3576 == NO_REGS))
3577 reject = 600;
3578 }
3579
3580 /* We prefer to reload pseudos over reloading other things,
3581 since such reloads may be able to be eliminated later.
3582 If we are reloading a SCRATCH, we won't be generating any
3583 insns, just using a register, so it is also preferred.
3584 So bump REJECT in other cases. Don't do this in the
3585 case where we are forcing a constant into memory and
3586 it will then win since we don't want to have a different
3587 alternative match then. */
3588 if (! (REG_P (operand)
3589 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3590 && GET_CODE (operand) != SCRATCH
3591 && ! (const_to_mem && constmemok))
3592 reject += 2;
3593
3594 /* Input reloads can be inherited more often than output
3595 reloads can be removed, so penalize output reloads. */
3596 if (operand_type[i] != RELOAD_FOR_INPUT
3597 && GET_CODE (operand) != SCRATCH)
3598 reject++;
3599 }
3600
3601 /* If this operand is a pseudo register that didn't get
3602 a hard reg and this alternative accepts some
3603 register, see if the class that we want is a subset
3604 of the preferred class for this register. If not,
3605 but it intersects that class, use the preferred class
3606 instead. If it does not intersect the preferred
3607 class, show that usage of this alternative should be
3608 discouraged; it will be discouraged more still if the
3609 register is `preferred or nothing'. We do this
3610 because it increases the chance of reusing our spill
3611 register in a later insn and avoiding a pair of
3612 memory stores and loads.
3613
3614 Don't bother with this if this alternative will
3615 accept this operand.
3616
3617 Don't do this for a multiword operand, since it is
3618 only a small win and has the risk of requiring more
3619 spill registers, which could cause a large loss.
3620
3621 Don't do this if the preferred class has only one
3622 register because we might otherwise exhaust the
3623 class. */
3624
3625 if (! win && ! did_match
3626 && this_alternative[i] != NO_REGS
3627 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3628 && reg_class_size [(int) preferred_class[i]] > 0
3629 && ! small_register_class_p (preferred_class[i]))
3630 {
3631 if (! reg_class_subset_p (this_alternative[i],
3632 preferred_class[i]))
3633 {
3634 /* Since we don't have a way of forming the intersection,
3635 we just do something special if the preferred class
3636 is a subset of the class we have; that's the most
3637 common case anyway. */
3638 if (reg_class_subset_p (preferred_class[i],
3639 this_alternative[i]))
3640 this_alternative[i] = preferred_class[i];
3641 else
3642 reject += (2 + 2 * pref_or_nothing[i]);
3643 }
3644 }
3645 }
3646
3647 /* Now see if any output operands that are marked "earlyclobber"
3648 in this alternative conflict with any input operands
3649 or any memory addresses. */
3650
3651 for (i = 0; i < noperands; i++)
3652 if (this_alternative_earlyclobber[i]
3653 && (this_alternative_win[i] || this_alternative_match_win[i]))
3654 {
3655 struct decomposition early_data;
3656
3657 early_data = decompose (recog_data.operand[i]);
3658
3659 gcc_assert (modified[i] != RELOAD_READ);
3660
3661 if (this_alternative[i] == NO_REGS)
3662 {
3663 this_alternative_earlyclobber[i] = 0;
3664 gcc_assert (this_insn_is_asm);
3665 error_for_asm (this_insn,
3666 "%<&%> constraint used with no register class");
3667 }
3668
3669 for (j = 0; j < noperands; j++)
3670 /* Is this an input operand or a memory ref? */
3671 if ((MEM_P (recog_data.operand[j])
3672 || modified[j] != RELOAD_WRITE)
3673 && j != i
3674 /* Ignore things like match_operator operands. */
3675 && !recog_data.is_operator[j]
3676 /* Don't count an input operand that is constrained to match
3677 the early clobber operand. */
3678 && ! (this_alternative_matches[j] == i
3679 && rtx_equal_p (recog_data.operand[i],
3680 recog_data.operand[j]))
3681 /* Is it altered by storing the earlyclobber operand? */
3682 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3683 early_data))
3684 {
3685 /* If the output is in a non-empty few-regs class,
3686 it's costly to reload it, so reload the input instead. */
3687 if (small_register_class_p (this_alternative[i])
3688 && (REG_P (recog_data.operand[j])
3689 || GET_CODE (recog_data.operand[j]) == SUBREG))
3690 {
3691 losers++;
3692 this_alternative_win[j] = 0;
3693 this_alternative_match_win[j] = 0;
3694 }
3695 else
3696 break;
3697 }
3698 /* If an earlyclobber operand conflicts with something,
3699 it must be reloaded, so request this and count the cost. */
3700 if (j != noperands)
3701 {
3702 losers++;
3703 this_alternative_win[i] = 0;
3704 this_alternative_match_win[j] = 0;
3705 for (j = 0; j < noperands; j++)
3706 if (this_alternative_matches[j] == i
3707 && this_alternative_match_win[j])
3708 {
3709 this_alternative_win[j] = 0;
3710 this_alternative_match_win[j] = 0;
3711 losers++;
3712 }
3713 }
3714 }
3715
3716 /* If one alternative accepts all the operands, no reload required,
3717 choose that alternative; don't consider the remaining ones. */
3718 if (losers == 0)
3719 {
3720 /* Unswap these so that they are never swapped at `finish'. */
3721 if (swapped)
3722 {
3723 recog_data.operand[commutative] = substed_operand[commutative];
3724 recog_data.operand[commutative + 1]
3725 = substed_operand[commutative + 1];
3726 }
3727 for (i = 0; i < noperands; i++)
3728 {
3729 goal_alternative_win[i] = this_alternative_win[i];
3730 goal_alternative_match_win[i] = this_alternative_match_win[i];
3731 goal_alternative[i] = this_alternative[i];
3732 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3733 goal_alternative_matches[i] = this_alternative_matches[i];
3734 goal_alternative_earlyclobber[i]
3735 = this_alternative_earlyclobber[i];
3736 }
3737 goal_alternative_number = this_alternative_number;
3738 goal_alternative_swapped = swapped;
3739 goal_earlyclobber = this_earlyclobber;
3740 goto finish;
3741 }
3742
3743 /* REJECT, set by the ! and ? constraint characters and when a register
3744 would be reloaded into a non-preferred class, discourages the use of
3745 this alternative for a reload goal. REJECT is incremented by six
3746 for each ? and two for each non-preferred class. */
3747 losers = losers * 6 + reject;
3748
3749 /* If this alternative can be made to work by reloading,
3750 and it needs less reloading than the others checked so far,
3751 record it as the chosen goal for reloading. */
3752 if (! bad)
3753 {
3754 if (best > losers)
3755 {
3756 for (i = 0; i < noperands; i++)
3757 {
3758 goal_alternative[i] = this_alternative[i];
3759 goal_alternative_win[i] = this_alternative_win[i];
3760 goal_alternative_match_win[i]
3761 = this_alternative_match_win[i];
3762 goal_alternative_offmemok[i]
3763 = this_alternative_offmemok[i];
3764 goal_alternative_matches[i] = this_alternative_matches[i];
3765 goal_alternative_earlyclobber[i]
3766 = this_alternative_earlyclobber[i];
3767 }
3768 goal_alternative_swapped = swapped;
3769 best = losers;
3770 goal_alternative_number = this_alternative_number;
3771 goal_earlyclobber = this_earlyclobber;
3772 }
3773 }
3774
3775 if (swapped)
3776 {
3777 /* If the commutative operands have been swapped, swap
3778 them back in order to check the next alternative. */
3779 recog_data.operand[commutative] = substed_operand[commutative];
3780 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3781 /* Unswap the duplicates too. */
3782 for (i = 0; i < recog_data.n_dups; i++)
3783 if (recog_data.dup_num[i] == commutative
3784 || recog_data.dup_num[i] == commutative + 1)
3785 *recog_data.dup_loc[i]
3786 = recog_data.operand[(int) recog_data.dup_num[i]];
3787
3788 /* Unswap the operand related information as well. */
3789 std::swap (preferred_class[commutative],
3790 preferred_class[commutative + 1]);
3791 std::swap (pref_or_nothing[commutative],
3792 pref_or_nothing[commutative + 1]);
3793 std::swap (address_reloaded[commutative],
3794 address_reloaded[commutative + 1]);
3795 }
3796 }
3797 }
3798
3799 /* The operands don't meet the constraints.
3800 goal_alternative describes the alternative
3801 that we could reach by reloading the fewest operands.
3802 Reload so as to fit it. */
3803
3804 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3805 {
3806 /* No alternative works with reloads?? */
3807 if (insn_code_number >= 0)
3808 fatal_insn ("unable to generate reloads for:", insn);
3809 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3810 /* Avoid further trouble with this insn. */
3811 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3812 n_reloads = 0;
3813 return 0;
3814 }
3815
3816 /* Jump to `finish' from above if all operands are valid already.
3817 In that case, goal_alternative_win is all 1. */
3818 finish:
3819
3820 /* Right now, for any pair of operands I and J that are required to match,
3821 with I < J,
3822 goal_alternative_matches[J] is I.
3823 Set up goal_alternative_matched as the inverse function:
3824 goal_alternative_matched[I] = J. */
3825
3826 for (i = 0; i < noperands; i++)
3827 goal_alternative_matched[i] = -1;
3828
3829 for (i = 0; i < noperands; i++)
3830 if (! goal_alternative_win[i]
3831 && goal_alternative_matches[i] >= 0)
3832 goal_alternative_matched[goal_alternative_matches[i]] = i;
3833
3834 for (i = 0; i < noperands; i++)
3835 goal_alternative_win[i] |= goal_alternative_match_win[i];
3836
3837 /* If the best alternative is with operands 1 and 2 swapped,
3838 consider them swapped before reporting the reloads. Update the
3839 operand numbers of any reloads already pushed. */
3840
3841 if (goal_alternative_swapped)
3842 {
3843 std::swap (substed_operand[commutative],
3844 substed_operand[commutative + 1]);
3845 std::swap (recog_data.operand[commutative],
3846 recog_data.operand[commutative + 1]);
3847 std::swap (*recog_data.operand_loc[commutative],
3848 *recog_data.operand_loc[commutative + 1]);
3849
3850 for (i = 0; i < recog_data.n_dups; i++)
3851 if (recog_data.dup_num[i] == commutative
3852 || recog_data.dup_num[i] == commutative + 1)
3853 *recog_data.dup_loc[i]
3854 = recog_data.operand[(int) recog_data.dup_num[i]];
3855
3856 for (i = 0; i < n_reloads; i++)
3857 {
3858 if (rld[i].opnum == commutative)
3859 rld[i].opnum = commutative + 1;
3860 else if (rld[i].opnum == commutative + 1)
3861 rld[i].opnum = commutative;
3862 }
3863 }
3864
3865 for (i = 0; i < noperands; i++)
3866 {
3867 operand_reloadnum[i] = -1;
3868
3869 /* If this is an earlyclobber operand, we need to widen the scope.
3870 The reload must remain valid from the start of the insn being
3871 reloaded until after the operand is stored into its destination.
3872 We approximate this with RELOAD_OTHER even though we know that we
3873 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3874
3875 One special case that is worth checking is when we have an
3876 output that is earlyclobber but isn't used past the insn (typically
3877 a SCRATCH). In this case, we only need have the reload live
3878 through the insn itself, but not for any of our input or output
3879 reloads.
3880 But we must not accidentally narrow the scope of an existing
3881 RELOAD_OTHER reload - leave these alone.
3882
3883 In any case, anything needed to address this operand can remain
3884 however they were previously categorized. */
3885
3886 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3887 operand_type[i]
3888 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3889 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3890 }
3891
3892 /* Any constants that aren't allowed and can't be reloaded
3893 into registers are here changed into memory references. */
3894 for (i = 0; i < noperands; i++)
3895 if (! goal_alternative_win[i])
3896 {
3897 rtx op = recog_data.operand[i];
3898 rtx subreg = NULL_RTX;
3899 rtx plus = NULL_RTX;
3900 machine_mode mode = operand_mode[i];
3901
3902 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3903 push_reload so we have to let them pass here. */
3904 if (GET_CODE (op) == SUBREG)
3905 {
3906 subreg = op;
3907 op = SUBREG_REG (op);
3908 mode = GET_MODE (op);
3909 }
3910
3911 if (GET_CODE (op) == PLUS)
3912 {
3913 plus = op;
3914 op = XEXP (op, 1);
3915 }
3916
3917 if (CONST_POOL_OK_P (mode, op)
3918 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3919 == NO_REGS)
3920 || no_input_reloads))
3921 {
3922 int this_address_reloaded;
3923 rtx tem = force_const_mem (mode, op);
3924
3925 /* If we stripped a SUBREG or a PLUS above add it back. */
3926 if (plus != NULL_RTX)
3927 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3928
3929 if (subreg != NULL_RTX)
3930 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3931
3932 this_address_reloaded = 0;
3933 substed_operand[i] = recog_data.operand[i]
3934 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3935 0, insn, &this_address_reloaded);
3936
3937 /* If the alternative accepts constant pool refs directly
3938 there will be no reload needed at all. */
3939 if (plus == NULL_RTX
3940 && subreg == NULL_RTX
3941 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3942 ? substed_operand[i]
3943 : NULL,
3944 recog_data.constraints[i],
3945 goal_alternative_number))
3946 goal_alternative_win[i] = 1;
3947 }
3948 }
3949
3950 /* Record the values of the earlyclobber operands for the caller. */
3951 if (goal_earlyclobber)
3952 for (i = 0; i < noperands; i++)
3953 if (goal_alternative_earlyclobber[i])
3954 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3955
3956 /* Now record reloads for all the operands that need them. */
3957 for (i = 0; i < noperands; i++)
3958 if (! goal_alternative_win[i])
3959 {
3960 /* Operands that match previous ones have already been handled. */
3961 if (goal_alternative_matches[i] >= 0)
3962 ;
3963 /* Handle an operand with a nonoffsettable address
3964 appearing where an offsettable address will do
3965 by reloading the address into a base register.
3966
3967 ??? We can also do this when the operand is a register and
3968 reg_equiv_mem is not offsettable, but this is a bit tricky,
3969 so we don't bother with it. It may not be worth doing. */
3970 else if (goal_alternative_matched[i] == -1
3971 && goal_alternative_offmemok[i]
3972 && MEM_P (recog_data.operand[i]))
3973 {
3974 /* If the address to be reloaded is a VOIDmode constant,
3975 use the default address mode as mode of the reload register,
3976 as would have been done by find_reloads_address. */
3977 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3978 machine_mode address_mode;
3979
3980 address_mode = get_address_mode (recog_data.operand[i]);
3981 operand_reloadnum[i]
3982 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3983 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3984 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3985 address_mode,
3986 VOIDmode, 0, 0, i, RELOAD_OTHER);
3987 rld[operand_reloadnum[i]].inc
3988 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3989
3990 /* If this operand is an output, we will have made any
3991 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3992 now we are treating part of the operand as an input, so
3993 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
3994
3995 if (modified[i] == RELOAD_WRITE)
3996 {
3997 for (j = 0; j < n_reloads; j++)
3998 {
3999 if (rld[j].opnum == i)
4000 {
4001 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4002 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4003 else if (rld[j].when_needed
4004 == RELOAD_FOR_OUTADDR_ADDRESS)
4005 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4006 }
4007 }
4008 }
4009 }
4010 else if (goal_alternative_matched[i] == -1)
4011 {
4012 operand_reloadnum[i]
4013 = push_reload ((modified[i] != RELOAD_WRITE
4014 ? recog_data.operand[i] : 0),
4015 (modified[i] != RELOAD_READ
4016 ? recog_data.operand[i] : 0),
4017 (modified[i] != RELOAD_WRITE
4018 ? recog_data.operand_loc[i] : 0),
4019 (modified[i] != RELOAD_READ
4020 ? recog_data.operand_loc[i] : 0),
4021 (enum reg_class) goal_alternative[i],
4022 (modified[i] == RELOAD_WRITE
4023 ? VOIDmode : operand_mode[i]),
4024 (modified[i] == RELOAD_READ
4025 ? VOIDmode : operand_mode[i]),
4026 (insn_code_number < 0 ? 0
4027 : insn_data[insn_code_number].operand[i].strict_low),
4028 0, i, operand_type[i]);
4029 }
4030 /* In a matching pair of operands, one must be input only
4031 and the other must be output only.
4032 Pass the input operand as IN and the other as OUT. */
4033 else if (modified[i] == RELOAD_READ
4034 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4035 {
4036 operand_reloadnum[i]
4037 = push_reload (recog_data.operand[i],
4038 recog_data.operand[goal_alternative_matched[i]],
4039 recog_data.operand_loc[i],
4040 recog_data.operand_loc[goal_alternative_matched[i]],
4041 (enum reg_class) goal_alternative[i],
4042 operand_mode[i],
4043 operand_mode[goal_alternative_matched[i]],
4044 0, 0, i, RELOAD_OTHER);
4045 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4046 }
4047 else if (modified[i] == RELOAD_WRITE
4048 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4049 {
4050 operand_reloadnum[goal_alternative_matched[i]]
4051 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4052 recog_data.operand[i],
4053 recog_data.operand_loc[goal_alternative_matched[i]],
4054 recog_data.operand_loc[i],
4055 (enum reg_class) goal_alternative[i],
4056 operand_mode[goal_alternative_matched[i]],
4057 operand_mode[i],
4058 0, 0, i, RELOAD_OTHER);
4059 operand_reloadnum[i] = output_reloadnum;
4060 }
4061 else
4062 {
4063 gcc_assert (insn_code_number < 0);
4064 error_for_asm (insn, "inconsistent operand constraints "
4065 "in an %<asm%>");
4066 /* Avoid further trouble with this insn. */
4067 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4068 n_reloads = 0;
4069 return 0;
4070 }
4071 }
4072 else if (goal_alternative_matched[i] < 0
4073 && goal_alternative_matches[i] < 0
4074 && address_operand_reloaded[i] != 1
4075 && optimize)
4076 {
4077 /* For each non-matching operand that's a MEM or a pseudo-register
4078 that didn't get a hard register, make an optional reload.
4079 This may get done even if the insn needs no reloads otherwise. */
4080
4081 rtx operand = recog_data.operand[i];
4082
4083 while (GET_CODE (operand) == SUBREG)
4084 operand = SUBREG_REG (operand);
4085 if ((MEM_P (operand)
4086 || (REG_P (operand)
4087 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4088 /* If this is only for an output, the optional reload would not
4089 actually cause us to use a register now, just note that
4090 something is stored here. */
4091 && (goal_alternative[i] != NO_REGS
4092 || modified[i] == RELOAD_WRITE)
4093 && ! no_input_reloads
4094 /* An optional output reload might allow to delete INSN later.
4095 We mustn't make in-out reloads on insns that are not permitted
4096 output reloads.
4097 If this is an asm, we can't delete it; we must not even call
4098 push_reload for an optional output reload in this case,
4099 because we can't be sure that the constraint allows a register,
4100 and push_reload verifies the constraints for asms. */
4101 && (modified[i] == RELOAD_READ
4102 || (! no_output_reloads && ! this_insn_is_asm)))
4103 operand_reloadnum[i]
4104 = push_reload ((modified[i] != RELOAD_WRITE
4105 ? recog_data.operand[i] : 0),
4106 (modified[i] != RELOAD_READ
4107 ? recog_data.operand[i] : 0),
4108 (modified[i] != RELOAD_WRITE
4109 ? recog_data.operand_loc[i] : 0),
4110 (modified[i] != RELOAD_READ
4111 ? recog_data.operand_loc[i] : 0),
4112 (enum reg_class) goal_alternative[i],
4113 (modified[i] == RELOAD_WRITE
4114 ? VOIDmode : operand_mode[i]),
4115 (modified[i] == RELOAD_READ
4116 ? VOIDmode : operand_mode[i]),
4117 (insn_code_number < 0 ? 0
4118 : insn_data[insn_code_number].operand[i].strict_low),
4119 1, i, operand_type[i]);
4120 /* If a memory reference remains (either as a MEM or a pseudo that
4121 did not get a hard register), yet we can't make an optional
4122 reload, check if this is actually a pseudo register reference;
4123 we then need to emit a USE and/or a CLOBBER so that reload
4124 inheritance will do the right thing. */
4125 else if (replace
4126 && (MEM_P (operand)
4127 || (REG_P (operand)
4128 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4129 && reg_renumber [REGNO (operand)] < 0)))
4130 {
4131 operand = *recog_data.operand_loc[i];
4132
4133 while (GET_CODE (operand) == SUBREG)
4134 operand = SUBREG_REG (operand);
4135 if (REG_P (operand))
4136 {
4137 if (modified[i] != RELOAD_WRITE)
4138 /* We mark the USE with QImode so that we recognize
4139 it as one that can be safely deleted at the end
4140 of reload. */
4141 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4142 insn), QImode);
4143 if (modified[i] != RELOAD_READ)
4144 emit_insn_after (gen_clobber (operand), insn);
4145 }
4146 }
4147 }
4148 else if (goal_alternative_matches[i] >= 0
4149 && goal_alternative_win[goal_alternative_matches[i]]
4150 && modified[i] == RELOAD_READ
4151 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4152 && ! no_input_reloads && ! no_output_reloads
4153 && optimize)
4154 {
4155 /* Similarly, make an optional reload for a pair of matching
4156 objects that are in MEM or a pseudo that didn't get a hard reg. */
4157
4158 rtx operand = recog_data.operand[i];
4159
4160 while (GET_CODE (operand) == SUBREG)
4161 operand = SUBREG_REG (operand);
4162 if ((MEM_P (operand)
4163 || (REG_P (operand)
4164 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4165 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4166 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4167 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4168 recog_data.operand[i],
4169 recog_data.operand_loc[goal_alternative_matches[i]],
4170 recog_data.operand_loc[i],
4171 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4172 operand_mode[goal_alternative_matches[i]],
4173 operand_mode[i],
4174 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4175 }
4176
4177 /* Perform whatever substitutions on the operands we are supposed
4178 to make due to commutativity or replacement of registers
4179 with equivalent constants or memory slots. */
4180
4181 for (i = 0; i < noperands; i++)
4182 {
4183 /* We only do this on the last pass through reload, because it is
4184 possible for some data (like reg_equiv_address) to be changed during
4185 later passes. Moreover, we lose the opportunity to get a useful
4186 reload_{in,out}_reg when we do these replacements. */
4187
4188 if (replace)
4189 {
4190 rtx substitution = substed_operand[i];
4191
4192 *recog_data.operand_loc[i] = substitution;
4193
4194 /* If we're replacing an operand with a LABEL_REF, we need to
4195 make sure that there's a REG_LABEL_OPERAND note attached to
4196 this instruction. */
4197 if (GET_CODE (substitution) == LABEL_REF
4198 && !find_reg_note (insn, REG_LABEL_OPERAND,
4199 label_ref_label (substitution))
4200 /* For a JUMP_P, if it was a branch target it must have
4201 already been recorded as such. */
4202 && (!JUMP_P (insn)
4203 || !label_is_jump_target_p (label_ref_label (substitution),
4204 insn)))
4205 {
4206 add_reg_note (insn, REG_LABEL_OPERAND,
4207 label_ref_label (substitution));
4208 if (LABEL_P (label_ref_label (substitution)))
4209 ++LABEL_NUSES (label_ref_label (substitution));
4210 }
4211
4212 }
4213 else
4214 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4215 }
4216
4217 /* If this insn pattern contains any MATCH_DUP's, make sure that
4218 they will be substituted if the operands they match are substituted.
4219 Also do now any substitutions we already did on the operands.
4220
4221 Don't do this if we aren't making replacements because we might be
4222 propagating things allocated by frame pointer elimination into places
4223 it doesn't expect. */
4224
4225 if (insn_code_number >= 0 && replace)
4226 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4227 {
4228 int opno = recog_data.dup_num[i];
4229 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4230 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4231 }
4232
4233 #if 0
4234 /* This loses because reloading of prior insns can invalidate the equivalence
4235 (or at least find_equiv_reg isn't smart enough to find it any more),
4236 causing this insn to need more reload regs than it needed before.
4237 It may be too late to make the reload regs available.
4238 Now this optimization is done safely in choose_reload_regs. */
4239
4240 /* For each reload of a reg into some other class of reg,
4241 search for an existing equivalent reg (same value now) in the right class.
4242 We can use it as long as we don't need to change its contents. */
4243 for (i = 0; i < n_reloads; i++)
4244 if (rld[i].reg_rtx == 0
4245 && rld[i].in != 0
4246 && REG_P (rld[i].in)
4247 && rld[i].out == 0)
4248 {
4249 rld[i].reg_rtx
4250 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4251 static_reload_reg_p, 0, rld[i].inmode);
4252 /* Prevent generation of insn to load the value
4253 because the one we found already has the value. */
4254 if (rld[i].reg_rtx)
4255 rld[i].in = rld[i].reg_rtx;
4256 }
4257 #endif
4258
4259 /* If we detected error and replaced asm instruction by USE, forget about the
4260 reloads. */
4261 if (GET_CODE (PATTERN (insn)) == USE
4262 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4263 n_reloads = 0;
4264
4265 /* Perhaps an output reload can be combined with another
4266 to reduce needs by one. */
4267 if (!goal_earlyclobber)
4268 combine_reloads ();
4269
4270 /* If we have a pair of reloads for parts of an address, they are reloading
4271 the same object, the operands themselves were not reloaded, and they
4272 are for two operands that are supposed to match, merge the reloads and
4273 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4274
4275 for (i = 0; i < n_reloads; i++)
4276 {
4277 int k;
4278
4279 for (j = i + 1; j < n_reloads; j++)
4280 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4281 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4282 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4283 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4284 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4285 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4286 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4287 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4288 && rtx_equal_p (rld[i].in, rld[j].in)
4289 && (operand_reloadnum[rld[i].opnum] < 0
4290 || rld[operand_reloadnum[rld[i].opnum]].optional)
4291 && (operand_reloadnum[rld[j].opnum] < 0
4292 || rld[operand_reloadnum[rld[j].opnum]].optional)
4293 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4294 || (goal_alternative_matches[rld[j].opnum]
4295 == rld[i].opnum)))
4296 {
4297 for (k = 0; k < n_replacements; k++)
4298 if (replacements[k].what == j)
4299 replacements[k].what = i;
4300
4301 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4302 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4303 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4304 else
4305 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4306 rld[j].in = 0;
4307 }
4308 }
4309
4310 /* Scan all the reloads and update their type.
4311 If a reload is for the address of an operand and we didn't reload
4312 that operand, change the type. Similarly, change the operand number
4313 of a reload when two operands match. If a reload is optional, treat it
4314 as though the operand isn't reloaded.
4315
4316 ??? This latter case is somewhat odd because if we do the optional
4317 reload, it means the object is hanging around. Thus we need only
4318 do the address reload if the optional reload was NOT done.
4319
4320 Change secondary reloads to be the address type of their operand, not
4321 the normal type.
4322
4323 If an operand's reload is now RELOAD_OTHER, change any
4324 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4325 RELOAD_FOR_OTHER_ADDRESS. */
4326
4327 for (i = 0; i < n_reloads; i++)
4328 {
4329 if (rld[i].secondary_p
4330 && rld[i].when_needed == operand_type[rld[i].opnum])
4331 rld[i].when_needed = address_type[rld[i].opnum];
4332
4333 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4334 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4335 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4336 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4337 && (operand_reloadnum[rld[i].opnum] < 0
4338 || rld[operand_reloadnum[rld[i].opnum]].optional))
4339 {
4340 /* If we have a secondary reload to go along with this reload,
4341 change its type to RELOAD_FOR_OPADDR_ADDR. */
4342
4343 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4344 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4345 && rld[i].secondary_in_reload != -1)
4346 {
4347 int secondary_in_reload = rld[i].secondary_in_reload;
4348
4349 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4350
4351 /* If there's a tertiary reload we have to change it also. */
4352 if (secondary_in_reload > 0
4353 && rld[secondary_in_reload].secondary_in_reload != -1)
4354 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4355 = RELOAD_FOR_OPADDR_ADDR;
4356 }
4357
4358 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4359 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4360 && rld[i].secondary_out_reload != -1)
4361 {
4362 int secondary_out_reload = rld[i].secondary_out_reload;
4363
4364 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4365
4366 /* If there's a tertiary reload we have to change it also. */
4367 if (secondary_out_reload
4368 && rld[secondary_out_reload].secondary_out_reload != -1)
4369 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4370 = RELOAD_FOR_OPADDR_ADDR;
4371 }
4372
4373 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4374 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4375 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4376 else
4377 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4378 }
4379
4380 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4381 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4382 && operand_reloadnum[rld[i].opnum] >= 0
4383 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4384 == RELOAD_OTHER))
4385 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4386
4387 if (goal_alternative_matches[rld[i].opnum] >= 0)
4388 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4389 }
4390
4391 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4392 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4393 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4394
4395 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4396 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4397 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4398 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4399 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4400 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4401 This is complicated by the fact that a single operand can have more
4402 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4403 choose_reload_regs without affecting code quality, and cases that
4404 actually fail are extremely rare, so it turns out to be better to fix
4405 the problem here by not generating cases that choose_reload_regs will
4406 fail for. */
4407 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4408 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4409 a single operand.
4410 We can reduce the register pressure by exploiting that a
4411 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4412 does not conflict with any of them, if it is only used for the first of
4413 the RELOAD_FOR_X_ADDRESS reloads. */
4414 {
4415 int first_op_addr_num = -2;
4416 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4417 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4418 int need_change = 0;
4419 /* We use last_op_addr_reload and the contents of the above arrays
4420 first as flags - -2 means no instance encountered, -1 means exactly
4421 one instance encountered.
4422 If more than one instance has been encountered, we store the reload
4423 number of the first reload of the kind in question; reload numbers
4424 are known to be non-negative. */
4425 for (i = 0; i < noperands; i++)
4426 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4427 for (i = n_reloads - 1; i >= 0; i--)
4428 {
4429 switch (rld[i].when_needed)
4430 {
4431 case RELOAD_FOR_OPERAND_ADDRESS:
4432 if (++first_op_addr_num >= 0)
4433 {
4434 first_op_addr_num = i;
4435 need_change = 1;
4436 }
4437 break;
4438 case RELOAD_FOR_INPUT_ADDRESS:
4439 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4440 {
4441 first_inpaddr_num[rld[i].opnum] = i;
4442 need_change = 1;
4443 }
4444 break;
4445 case RELOAD_FOR_OUTPUT_ADDRESS:
4446 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4447 {
4448 first_outpaddr_num[rld[i].opnum] = i;
4449 need_change = 1;
4450 }
4451 break;
4452 default:
4453 break;
4454 }
4455 }
4456
4457 if (need_change)
4458 {
4459 for (i = 0; i < n_reloads; i++)
4460 {
4461 int first_num;
4462 enum reload_type type;
4463
4464 switch (rld[i].when_needed)
4465 {
4466 case RELOAD_FOR_OPADDR_ADDR:
4467 first_num = first_op_addr_num;
4468 type = RELOAD_FOR_OPERAND_ADDRESS;
4469 break;
4470 case RELOAD_FOR_INPADDR_ADDRESS:
4471 first_num = first_inpaddr_num[rld[i].opnum];
4472 type = RELOAD_FOR_INPUT_ADDRESS;
4473 break;
4474 case RELOAD_FOR_OUTADDR_ADDRESS:
4475 first_num = first_outpaddr_num[rld[i].opnum];
4476 type = RELOAD_FOR_OUTPUT_ADDRESS;
4477 break;
4478 default:
4479 continue;
4480 }
4481 if (first_num < 0)
4482 continue;
4483 else if (i > first_num)
4484 rld[i].when_needed = type;
4485 else
4486 {
4487 /* Check if the only TYPE reload that uses reload I is
4488 reload FIRST_NUM. */
4489 for (j = n_reloads - 1; j > first_num; j--)
4490 {
4491 if (rld[j].when_needed == type
4492 && (rld[i].secondary_p
4493 ? rld[j].secondary_in_reload == i
4494 : reg_mentioned_p (rld[i].in, rld[j].in)))
4495 {
4496 rld[i].when_needed = type;
4497 break;
4498 }
4499 }
4500 }
4501 }
4502 }
4503 }
4504
4505 /* See if we have any reloads that are now allowed to be merged
4506 because we've changed when the reload is needed to
4507 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4508 check for the most common cases. */
4509
4510 for (i = 0; i < n_reloads; i++)
4511 if (rld[i].in != 0 && rld[i].out == 0
4512 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4513 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4514 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4515 for (j = 0; j < n_reloads; j++)
4516 if (i != j && rld[j].in != 0 && rld[j].out == 0
4517 && rld[j].when_needed == rld[i].when_needed
4518 && MATCHES (rld[i].in, rld[j].in)
4519 && rld[i].rclass == rld[j].rclass
4520 && !rld[i].nocombine && !rld[j].nocombine
4521 && rld[i].reg_rtx == rld[j].reg_rtx)
4522 {
4523 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4524 transfer_replacements (i, j);
4525 rld[j].in = 0;
4526 }
4527
4528 /* If we made any reloads for addresses, see if they violate a
4529 "no input reloads" requirement for this insn. But loads that we
4530 do after the insn (such as for output addresses) are fine. */
4531 if (HAVE_cc0 && no_input_reloads)
4532 for (i = 0; i < n_reloads; i++)
4533 gcc_assert (rld[i].in == 0
4534 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4535 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4536
4537 /* Compute reload_mode and reload_nregs. */
4538 for (i = 0; i < n_reloads; i++)
4539 {
4540 rld[i].mode = rld[i].inmode;
4541 if (rld[i].mode == VOIDmode
4542 || partial_subreg_p (rld[i].mode, rld[i].outmode))
4543 rld[i].mode = rld[i].outmode;
4544
4545 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4546 }
4547
4548 /* Special case a simple move with an input reload and a
4549 destination of a hard reg, if the hard reg is ok, use it. */
4550 for (i = 0; i < n_reloads; i++)
4551 if (rld[i].when_needed == RELOAD_FOR_INPUT
4552 && GET_CODE (PATTERN (insn)) == SET
4553 && REG_P (SET_DEST (PATTERN (insn)))
4554 && (SET_SRC (PATTERN (insn)) == rld[i].in
4555 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4556 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4557 {
4558 rtx dest = SET_DEST (PATTERN (insn));
4559 unsigned int regno = REGNO (dest);
4560
4561 if (regno < FIRST_PSEUDO_REGISTER
4562 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4563 && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4564 {
4565 int nr = hard_regno_nregs (regno, rld[i].mode);
4566 int ok = 1, nri;
4567
4568 for (nri = 1; nri < nr; nri ++)
4569 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4570 {
4571 ok = 0;
4572 break;
4573 }
4574
4575 if (ok)
4576 rld[i].reg_rtx = dest;
4577 }
4578 }
4579
4580 return retval;
4581 }
4582
4583 /* Return true if alternative number ALTNUM in constraint-string
4584 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4585 MEM gives the reference if its address hasn't been fully reloaded,
4586 otherwise it is NULL. */
4587
4588 static bool
4589 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4590 const char *constraint, int altnum)
4591 {
4592 int c;
4593
4594 /* Skip alternatives before the one requested. */
4595 while (altnum > 0)
4596 {
4597 while (*constraint++ != ',')
4598 ;
4599 altnum--;
4600 }
4601 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4602 If one of them is present, this alternative accepts the result of
4603 passing a constant-pool reference through find_reloads_toplev.
4604
4605 The same is true of extra memory constraints if the address
4606 was reloaded into a register. However, the target may elect
4607 to disallow the original constant address, forcing it to be
4608 reloaded into a register instead. */
4609 for (; (c = *constraint) && c != ',' && c != '#';
4610 constraint += CONSTRAINT_LEN (c, constraint))
4611 {
4612 enum constraint_num cn = lookup_constraint (constraint);
4613 if (insn_extra_memory_constraint (cn)
4614 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4615 return true;
4616 }
4617 return false;
4618 }
4619 \f
4620 /* Scan X for memory references and scan the addresses for reloading.
4621 Also checks for references to "constant" regs that we want to eliminate
4622 and replaces them with the values they stand for.
4623 We may alter X destructively if it contains a reference to such.
4624 If X is just a constant reg, we return the equivalent value
4625 instead of X.
4626
4627 IND_LEVELS says how many levels of indirect addressing this machine
4628 supports.
4629
4630 OPNUM and TYPE identify the purpose of the reload.
4631
4632 IS_SET_DEST is true if X is the destination of a SET, which is not
4633 appropriate to be replaced by a constant.
4634
4635 INSN, if nonzero, is the insn in which we do the reload. It is used
4636 to determine if we may generate output reloads, and where to put USEs
4637 for pseudos that we have to replace with stack slots.
4638
4639 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4640 result of find_reloads_address. */
4641
4642 static rtx
4643 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4644 int ind_levels, int is_set_dest, rtx_insn *insn,
4645 int *address_reloaded)
4646 {
4647 RTX_CODE code = GET_CODE (x);
4648
4649 const char *fmt = GET_RTX_FORMAT (code);
4650 int i;
4651 int copied;
4652
4653 if (code == REG)
4654 {
4655 /* This code is duplicated for speed in find_reloads. */
4656 int regno = REGNO (x);
4657 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4658 x = reg_equiv_constant (regno);
4659 #if 0
4660 /* This creates (subreg (mem...)) which would cause an unnecessary
4661 reload of the mem. */
4662 else if (reg_equiv_mem (regno) != 0)
4663 x = reg_equiv_mem (regno);
4664 #endif
4665 else if (reg_equiv_memory_loc (regno)
4666 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4667 {
4668 rtx mem = make_memloc (x, regno);
4669 if (reg_equiv_address (regno)
4670 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4671 {
4672 /* If this is not a toplevel operand, find_reloads doesn't see
4673 this substitution. We have to emit a USE of the pseudo so
4674 that delete_output_reload can see it. */
4675 if (replace_reloads && recog_data.operand[opnum] != x)
4676 /* We mark the USE with QImode so that we recognize it
4677 as one that can be safely deleted at the end of
4678 reload. */
4679 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4680 QImode);
4681 x = mem;
4682 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4683 opnum, type, ind_levels, insn);
4684 if (!rtx_equal_p (x, mem))
4685 push_reg_equiv_alt_mem (regno, x);
4686 if (address_reloaded)
4687 *address_reloaded = i;
4688 }
4689 }
4690 return x;
4691 }
4692 if (code == MEM)
4693 {
4694 rtx tem = x;
4695
4696 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4697 opnum, type, ind_levels, insn);
4698 if (address_reloaded)
4699 *address_reloaded = i;
4700
4701 return tem;
4702 }
4703
4704 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4705 {
4706 /* Check for SUBREG containing a REG that's equivalent to a
4707 constant. If the constant has a known value, truncate it
4708 right now. Similarly if we are extracting a single-word of a
4709 multi-word constant. If the constant is symbolic, allow it
4710 to be substituted normally. push_reload will strip the
4711 subreg later. The constant must not be VOIDmode, because we
4712 will lose the mode of the register (this should never happen
4713 because one of the cases above should handle it). */
4714
4715 int regno = REGNO (SUBREG_REG (x));
4716 rtx tem;
4717
4718 if (regno >= FIRST_PSEUDO_REGISTER
4719 && reg_renumber[regno] < 0
4720 && reg_equiv_constant (regno) != 0)
4721 {
4722 tem =
4723 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4724 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4725 gcc_assert (tem);
4726 if (CONSTANT_P (tem)
4727 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4728 {
4729 tem = force_const_mem (GET_MODE (x), tem);
4730 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4731 &XEXP (tem, 0), opnum, type,
4732 ind_levels, insn);
4733 if (address_reloaded)
4734 *address_reloaded = i;
4735 }
4736 return tem;
4737 }
4738
4739 /* If the subreg contains a reg that will be converted to a mem,
4740 attempt to convert the whole subreg to a (narrower or wider)
4741 memory reference instead. If this succeeds, we're done --
4742 otherwise fall through to check whether the inner reg still
4743 needs address reloads anyway. */
4744
4745 if (regno >= FIRST_PSEUDO_REGISTER
4746 && reg_equiv_memory_loc (regno) != 0)
4747 {
4748 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4749 insn, address_reloaded);
4750 if (tem)
4751 return tem;
4752 }
4753 }
4754
4755 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4756 {
4757 if (fmt[i] == 'e')
4758 {
4759 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4760 ind_levels, is_set_dest, insn,
4761 address_reloaded);
4762 /* If we have replaced a reg with it's equivalent memory loc -
4763 that can still be handled here e.g. if it's in a paradoxical
4764 subreg - we must make the change in a copy, rather than using
4765 a destructive change. This way, find_reloads can still elect
4766 not to do the change. */
4767 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4768 {
4769 x = shallow_copy_rtx (x);
4770 copied = 1;
4771 }
4772 XEXP (x, i) = new_part;
4773 }
4774 }
4775 return x;
4776 }
4777
4778 /* Return a mem ref for the memory equivalent of reg REGNO.
4779 This mem ref is not shared with anything. */
4780
4781 static rtx
4782 make_memloc (rtx ad, int regno)
4783 {
4784 /* We must rerun eliminate_regs, in case the elimination
4785 offsets have changed. */
4786 rtx tem
4787 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4788 0);
4789
4790 /* If TEM might contain a pseudo, we must copy it to avoid
4791 modifying it when we do the substitution for the reload. */
4792 if (rtx_varies_p (tem, 0))
4793 tem = copy_rtx (tem);
4794
4795 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4796 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4797
4798 /* Copy the result if it's still the same as the equivalence, to avoid
4799 modifying it when we do the substitution for the reload. */
4800 if (tem == reg_equiv_memory_loc (regno))
4801 tem = copy_rtx (tem);
4802 return tem;
4803 }
4804
4805 /* Returns true if AD could be turned into a valid memory reference
4806 to mode MODE in address space AS by reloading the part pointed to
4807 by PART into a register. */
4808
4809 static int
4810 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4811 addr_space_t as, rtx *part)
4812 {
4813 int retv;
4814 rtx tem = *part;
4815 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4816
4817 *part = reg;
4818 retv = memory_address_addr_space_p (mode, ad, as);
4819 *part = tem;
4820
4821 return retv;
4822 }
4823
4824 /* Record all reloads needed for handling memory address AD
4825 which appears in *LOC in a memory reference to mode MODE
4826 which itself is found in location *MEMREFLOC.
4827 Note that we take shortcuts assuming that no multi-reg machine mode
4828 occurs as part of an address.
4829
4830 OPNUM and TYPE specify the purpose of this reload.
4831
4832 IND_LEVELS says how many levels of indirect addressing this machine
4833 supports.
4834
4835 INSN, if nonzero, is the insn in which we do the reload. It is used
4836 to determine if we may generate output reloads, and where to put USEs
4837 for pseudos that we have to replace with stack slots.
4838
4839 Value is one if this address is reloaded or replaced as a whole; it is
4840 zero if the top level of this address was not reloaded or replaced, and
4841 it is -1 if it may or may not have been reloaded or replaced.
4842
4843 Note that there is no verification that the address will be valid after
4844 this routine does its work. Instead, we rely on the fact that the address
4845 was valid when reload started. So we need only undo things that reload
4846 could have broken. These are wrong register types, pseudos not allocated
4847 to a hard register, and frame pointer elimination. */
4848
4849 static int
4850 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4851 rtx *loc, int opnum, enum reload_type type,
4852 int ind_levels, rtx_insn *insn)
4853 {
4854 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4855 : ADDR_SPACE_GENERIC;
4856 int regno;
4857 int removed_and = 0;
4858 int op_index;
4859 rtx tem;
4860
4861 /* If the address is a register, see if it is a legitimate address and
4862 reload if not. We first handle the cases where we need not reload
4863 or where we must reload in a non-standard way. */
4864
4865 if (REG_P (ad))
4866 {
4867 regno = REGNO (ad);
4868
4869 if (reg_equiv_constant (regno) != 0)
4870 {
4871 find_reloads_address_part (reg_equiv_constant (regno), loc,
4872 base_reg_class (mode, as, MEM, SCRATCH),
4873 GET_MODE (ad), opnum, type, ind_levels);
4874 return 1;
4875 }
4876
4877 tem = reg_equiv_memory_loc (regno);
4878 if (tem != 0)
4879 {
4880 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4881 {
4882 tem = make_memloc (ad, regno);
4883 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4884 XEXP (tem, 0),
4885 MEM_ADDR_SPACE (tem)))
4886 {
4887 rtx orig = tem;
4888
4889 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4890 &XEXP (tem, 0), opnum,
4891 ADDR_TYPE (type), ind_levels, insn);
4892 if (!rtx_equal_p (tem, orig))
4893 push_reg_equiv_alt_mem (regno, tem);
4894 }
4895 /* We can avoid a reload if the register's equivalent memory
4896 expression is valid as an indirect memory address.
4897 But not all addresses are valid in a mem used as an indirect
4898 address: only reg or reg+constant. */
4899
4900 if (ind_levels > 0
4901 && strict_memory_address_addr_space_p (mode, tem, as)
4902 && (REG_P (XEXP (tem, 0))
4903 || (GET_CODE (XEXP (tem, 0)) == PLUS
4904 && REG_P (XEXP (XEXP (tem, 0), 0))
4905 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4906 {
4907 /* TEM is not the same as what we'll be replacing the
4908 pseudo with after reload, put a USE in front of INSN
4909 in the final reload pass. */
4910 if (replace_reloads
4911 && num_not_at_initial_offset
4912 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4913 {
4914 *loc = tem;
4915 /* We mark the USE with QImode so that we
4916 recognize it as one that can be safely
4917 deleted at the end of reload. */
4918 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4919 insn), QImode);
4920
4921 /* This doesn't really count as replacing the address
4922 as a whole, since it is still a memory access. */
4923 }
4924 return 0;
4925 }
4926 ad = tem;
4927 }
4928 }
4929
4930 /* The only remaining case where we can avoid a reload is if this is a
4931 hard register that is valid as a base register and which is not the
4932 subject of a CLOBBER in this insn. */
4933
4934 else if (regno < FIRST_PSEUDO_REGISTER
4935 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4936 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4937 return 0;
4938
4939 /* If we do not have one of the cases above, we must do the reload. */
4940 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4941 base_reg_class (mode, as, MEM, SCRATCH),
4942 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4943 return 1;
4944 }
4945
4946 if (strict_memory_address_addr_space_p (mode, ad, as))
4947 {
4948 /* The address appears valid, so reloads are not needed.
4949 But the address may contain an eliminable register.
4950 This can happen because a machine with indirect addressing
4951 may consider a pseudo register by itself a valid address even when
4952 it has failed to get a hard reg.
4953 So do a tree-walk to find and eliminate all such regs. */
4954
4955 /* But first quickly dispose of a common case. */
4956 if (GET_CODE (ad) == PLUS
4957 && CONST_INT_P (XEXP (ad, 1))
4958 && REG_P (XEXP (ad, 0))
4959 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4960 return 0;
4961
4962 subst_reg_equivs_changed = 0;
4963 *loc = subst_reg_equivs (ad, insn);
4964
4965 if (! subst_reg_equivs_changed)
4966 return 0;
4967
4968 /* Check result for validity after substitution. */
4969 if (strict_memory_address_addr_space_p (mode, ad, as))
4970 return 0;
4971 }
4972
4973 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4974 do
4975 {
4976 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4977 {
4978 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4979 ind_levels, win);
4980 }
4981 break;
4982 win:
4983 *memrefloc = copy_rtx (*memrefloc);
4984 XEXP (*memrefloc, 0) = ad;
4985 move_replacements (&ad, &XEXP (*memrefloc, 0));
4986 return -1;
4987 }
4988 while (0);
4989 #endif
4990
4991 /* The address is not valid. We have to figure out why. First see if
4992 we have an outer AND and remove it if so. Then analyze what's inside. */
4993
4994 if (GET_CODE (ad) == AND)
4995 {
4996 removed_and = 1;
4997 loc = &XEXP (ad, 0);
4998 ad = *loc;
4999 }
5000
5001 /* One possibility for why the address is invalid is that it is itself
5002 a MEM. This can happen when the frame pointer is being eliminated, a
5003 pseudo is not allocated to a hard register, and the offset between the
5004 frame and stack pointers is not its initial value. In that case the
5005 pseudo will have been replaced by a MEM referring to the
5006 stack pointer. */
5007 if (MEM_P (ad))
5008 {
5009 /* First ensure that the address in this MEM is valid. Then, unless
5010 indirect addresses are valid, reload the MEM into a register. */
5011 tem = ad;
5012 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5013 opnum, ADDR_TYPE (type),
5014 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5015
5016 /* If tem was changed, then we must create a new memory reference to
5017 hold it and store it back into memrefloc. */
5018 if (tem != ad && memrefloc)
5019 {
5020 *memrefloc = copy_rtx (*memrefloc);
5021 copy_replacements (tem, XEXP (*memrefloc, 0));
5022 loc = &XEXP (*memrefloc, 0);
5023 if (removed_and)
5024 loc = &XEXP (*loc, 0);
5025 }
5026
5027 /* Check similar cases as for indirect addresses as above except
5028 that we can allow pseudos and a MEM since they should have been
5029 taken care of above. */
5030
5031 if (ind_levels == 0
5032 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5033 || MEM_P (XEXP (tem, 0))
5034 || ! (REG_P (XEXP (tem, 0))
5035 || (GET_CODE (XEXP (tem, 0)) == PLUS
5036 && REG_P (XEXP (XEXP (tem, 0), 0))
5037 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5038 {
5039 /* Must use TEM here, not AD, since it is the one that will
5040 have any subexpressions reloaded, if needed. */
5041 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5042 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5043 VOIDmode, 0,
5044 0, opnum, type);
5045 return ! removed_and;
5046 }
5047 else
5048 return 0;
5049 }
5050
5051 /* If we have address of a stack slot but it's not valid because the
5052 displacement is too large, compute the sum in a register.
5053 Handle all base registers here, not just fp/ap/sp, because on some
5054 targets (namely SH) we can also get too large displacements from
5055 big-endian corrections. */
5056 else if (GET_CODE (ad) == PLUS
5057 && REG_P (XEXP (ad, 0))
5058 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5059 && CONST_INT_P (XEXP (ad, 1))
5060 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5061 CONST_INT)
5062 /* Similarly, if we were to reload the base register and the
5063 mem+offset address is still invalid, then we want to reload
5064 the whole address, not just the base register. */
5065 || ! maybe_memory_address_addr_space_p
5066 (mode, ad, as, &(XEXP (ad, 0)))))
5067
5068 {
5069 /* Unshare the MEM rtx so we can safely alter it. */
5070 if (memrefloc)
5071 {
5072 *memrefloc = copy_rtx (*memrefloc);
5073 loc = &XEXP (*memrefloc, 0);
5074 if (removed_and)
5075 loc = &XEXP (*loc, 0);
5076 }
5077
5078 if (double_reg_address_ok[mode]
5079 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5080 PLUS, CONST_INT))
5081 {
5082 /* Unshare the sum as well. */
5083 *loc = ad = copy_rtx (ad);
5084
5085 /* Reload the displacement into an index reg.
5086 We assume the frame pointer or arg pointer is a base reg. */
5087 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5088 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5089 type, ind_levels);
5090 return 0;
5091 }
5092 else
5093 {
5094 /* If the sum of two regs is not necessarily valid,
5095 reload the sum into a base reg.
5096 That will at least work. */
5097 find_reloads_address_part (ad, loc,
5098 base_reg_class (mode, as, MEM, SCRATCH),
5099 GET_MODE (ad), opnum, type, ind_levels);
5100 }
5101 return ! removed_and;
5102 }
5103
5104 /* If we have an indexed stack slot, there are three possible reasons why
5105 it might be invalid: The index might need to be reloaded, the address
5106 might have been made by frame pointer elimination and hence have a
5107 constant out of range, or both reasons might apply.
5108
5109 We can easily check for an index needing reload, but even if that is the
5110 case, we might also have an invalid constant. To avoid making the
5111 conservative assumption and requiring two reloads, we see if this address
5112 is valid when not interpreted strictly. If it is, the only problem is
5113 that the index needs a reload and find_reloads_address_1 will take care
5114 of it.
5115
5116 Handle all base registers here, not just fp/ap/sp, because on some
5117 targets (namely SPARC) we can also get invalid addresses from preventive
5118 subreg big-endian corrections made by find_reloads_toplev. We
5119 can also get expressions involving LO_SUM (rather than PLUS) from
5120 find_reloads_subreg_address.
5121
5122 If we decide to do something, it must be that `double_reg_address_ok'
5123 is true. We generate a reload of the base register + constant and
5124 rework the sum so that the reload register will be added to the index.
5125 This is safe because we know the address isn't shared.
5126
5127 We check for the base register as both the first and second operand of
5128 the innermost PLUS and/or LO_SUM. */
5129
5130 for (op_index = 0; op_index < 2; ++op_index)
5131 {
5132 rtx operand, addend;
5133 enum rtx_code inner_code;
5134
5135 if (GET_CODE (ad) != PLUS)
5136 continue;
5137
5138 inner_code = GET_CODE (XEXP (ad, 0));
5139 if (!(GET_CODE (ad) == PLUS
5140 && CONST_INT_P (XEXP (ad, 1))
5141 && (inner_code == PLUS || inner_code == LO_SUM)))
5142 continue;
5143
5144 operand = XEXP (XEXP (ad, 0), op_index);
5145 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5146 continue;
5147
5148 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5149
5150 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5151 GET_CODE (addend))
5152 || operand == frame_pointer_rtx
5153 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5154 && operand == hard_frame_pointer_rtx)
5155 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5156 && operand == arg_pointer_rtx)
5157 || operand == stack_pointer_rtx)
5158 && ! maybe_memory_address_addr_space_p
5159 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5160 {
5161 rtx offset_reg;
5162 enum reg_class cls;
5163
5164 offset_reg = plus_constant (GET_MODE (ad), operand,
5165 INTVAL (XEXP (ad, 1)));
5166
5167 /* Form the adjusted address. */
5168 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5169 ad = gen_rtx_PLUS (GET_MODE (ad),
5170 op_index == 0 ? offset_reg : addend,
5171 op_index == 0 ? addend : offset_reg);
5172 else
5173 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5174 op_index == 0 ? offset_reg : addend,
5175 op_index == 0 ? addend : offset_reg);
5176 *loc = ad;
5177
5178 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5179 find_reloads_address_part (XEXP (ad, op_index),
5180 &XEXP (ad, op_index), cls,
5181 GET_MODE (ad), opnum, type, ind_levels);
5182 find_reloads_address_1 (mode, as,
5183 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5184 GET_CODE (XEXP (ad, op_index)),
5185 &XEXP (ad, 1 - op_index), opnum,
5186 type, 0, insn);
5187
5188 return 0;
5189 }
5190 }
5191
5192 /* See if address becomes valid when an eliminable register
5193 in a sum is replaced. */
5194
5195 tem = ad;
5196 if (GET_CODE (ad) == PLUS)
5197 tem = subst_indexed_address (ad);
5198 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5199 {
5200 /* Ok, we win that way. Replace any additional eliminable
5201 registers. */
5202
5203 subst_reg_equivs_changed = 0;
5204 tem = subst_reg_equivs (tem, insn);
5205
5206 /* Make sure that didn't make the address invalid again. */
5207
5208 if (! subst_reg_equivs_changed
5209 || strict_memory_address_addr_space_p (mode, tem, as))
5210 {
5211 *loc = tem;
5212 return 0;
5213 }
5214 }
5215
5216 /* If constants aren't valid addresses, reload the constant address
5217 into a register. */
5218 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5219 {
5220 machine_mode address_mode = GET_MODE (ad);
5221 if (address_mode == VOIDmode)
5222 address_mode = targetm.addr_space.address_mode (as);
5223
5224 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5225 Unshare it so we can safely alter it. */
5226 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5227 && CONSTANT_POOL_ADDRESS_P (ad))
5228 {
5229 *memrefloc = copy_rtx (*memrefloc);
5230 loc = &XEXP (*memrefloc, 0);
5231 if (removed_and)
5232 loc = &XEXP (*loc, 0);
5233 }
5234
5235 find_reloads_address_part (ad, loc,
5236 base_reg_class (mode, as, MEM, SCRATCH),
5237 address_mode, opnum, type, ind_levels);
5238 return ! removed_and;
5239 }
5240
5241 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5242 opnum, type, ind_levels, insn);
5243 }
5244 \f
5245 /* Find all pseudo regs appearing in AD
5246 that are eliminable in favor of equivalent values
5247 and do not have hard regs; replace them by their equivalents.
5248 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5249 front of it for pseudos that we have to replace with stack slots. */
5250
5251 static rtx
5252 subst_reg_equivs (rtx ad, rtx_insn *insn)
5253 {
5254 RTX_CODE code = GET_CODE (ad);
5255 int i;
5256 const char *fmt;
5257
5258 switch (code)
5259 {
5260 case HIGH:
5261 case CONST:
5262 CASE_CONST_ANY:
5263 case SYMBOL_REF:
5264 case LABEL_REF:
5265 case PC:
5266 case CC0:
5267 return ad;
5268
5269 case REG:
5270 {
5271 int regno = REGNO (ad);
5272
5273 if (reg_equiv_constant (regno) != 0)
5274 {
5275 subst_reg_equivs_changed = 1;
5276 return reg_equiv_constant (regno);
5277 }
5278 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5279 {
5280 rtx mem = make_memloc (ad, regno);
5281 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5282 {
5283 subst_reg_equivs_changed = 1;
5284 /* We mark the USE with QImode so that we recognize it
5285 as one that can be safely deleted at the end of
5286 reload. */
5287 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5288 QImode);
5289 return mem;
5290 }
5291 }
5292 }
5293 return ad;
5294
5295 case PLUS:
5296 /* Quickly dispose of a common case. */
5297 if (XEXP (ad, 0) == frame_pointer_rtx
5298 && CONST_INT_P (XEXP (ad, 1)))
5299 return ad;
5300 break;
5301
5302 default:
5303 break;
5304 }
5305
5306 fmt = GET_RTX_FORMAT (code);
5307 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5308 if (fmt[i] == 'e')
5309 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5310 return ad;
5311 }
5312 \f
5313 /* Compute the sum of X and Y, making canonicalizations assumed in an
5314 address, namely: sum constant integers, surround the sum of two
5315 constants with a CONST, put the constant as the second operand, and
5316 group the constant on the outermost sum.
5317
5318 This routine assumes both inputs are already in canonical form. */
5319
5320 rtx
5321 form_sum (machine_mode mode, rtx x, rtx y)
5322 {
5323 rtx tem;
5324
5325 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5326 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5327
5328 if (CONST_INT_P (x))
5329 return plus_constant (mode, y, INTVAL (x));
5330 else if (CONST_INT_P (y))
5331 return plus_constant (mode, x, INTVAL (y));
5332 else if (CONSTANT_P (x))
5333 tem = x, x = y, y = tem;
5334
5335 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5336 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5337
5338 /* Note that if the operands of Y are specified in the opposite
5339 order in the recursive calls below, infinite recursion will occur. */
5340 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5341 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5342
5343 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5344 constant will have been placed second. */
5345 if (CONSTANT_P (x) && CONSTANT_P (y))
5346 {
5347 if (GET_CODE (x) == CONST)
5348 x = XEXP (x, 0);
5349 if (GET_CODE (y) == CONST)
5350 y = XEXP (y, 0);
5351
5352 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5353 }
5354
5355 return gen_rtx_PLUS (mode, x, y);
5356 }
5357 \f
5358 /* If ADDR is a sum containing a pseudo register that should be
5359 replaced with a constant (from reg_equiv_constant),
5360 return the result of doing so, and also apply the associative
5361 law so that the result is more likely to be a valid address.
5362 (But it is not guaranteed to be one.)
5363
5364 Note that at most one register is replaced, even if more are
5365 replaceable. Also, we try to put the result into a canonical form
5366 so it is more likely to be a valid address.
5367
5368 In all other cases, return ADDR. */
5369
5370 static rtx
5371 subst_indexed_address (rtx addr)
5372 {
5373 rtx op0 = 0, op1 = 0, op2 = 0;
5374 rtx tem;
5375 int regno;
5376
5377 if (GET_CODE (addr) == PLUS)
5378 {
5379 /* Try to find a register to replace. */
5380 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5381 if (REG_P (op0)
5382 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5383 && reg_renumber[regno] < 0
5384 && reg_equiv_constant (regno) != 0)
5385 op0 = reg_equiv_constant (regno);
5386 else if (REG_P (op1)
5387 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5388 && reg_renumber[regno] < 0
5389 && reg_equiv_constant (regno) != 0)
5390 op1 = reg_equiv_constant (regno);
5391 else if (GET_CODE (op0) == PLUS
5392 && (tem = subst_indexed_address (op0)) != op0)
5393 op0 = tem;
5394 else if (GET_CODE (op1) == PLUS
5395 && (tem = subst_indexed_address (op1)) != op1)
5396 op1 = tem;
5397 else
5398 return addr;
5399
5400 /* Pick out up to three things to add. */
5401 if (GET_CODE (op1) == PLUS)
5402 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5403 else if (GET_CODE (op0) == PLUS)
5404 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5405
5406 /* Compute the sum. */
5407 if (op2 != 0)
5408 op1 = form_sum (GET_MODE (addr), op1, op2);
5409 if (op1 != 0)
5410 op0 = form_sum (GET_MODE (addr), op0, op1);
5411
5412 return op0;
5413 }
5414 return addr;
5415 }
5416 \f
5417 /* Update the REG_INC notes for an insn. It updates all REG_INC
5418 notes for the instruction which refer to REGNO the to refer
5419 to the reload number.
5420
5421 INSN is the insn for which any REG_INC notes need updating.
5422
5423 REGNO is the register number which has been reloaded.
5424
5425 RELOADNUM is the reload number. */
5426
5427 static void
5428 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5429 int reloadnum ATTRIBUTE_UNUSED)
5430 {
5431 if (!AUTO_INC_DEC)
5432 return;
5433
5434 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5435 if (REG_NOTE_KIND (link) == REG_INC
5436 && (int) REGNO (XEXP (link, 0)) == regno)
5437 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5438 }
5439 \f
5440 /* Record the pseudo registers we must reload into hard registers in a
5441 subexpression of a would-be memory address, X referring to a value
5442 in mode MODE. (This function is not called if the address we find
5443 is strictly valid.)
5444
5445 CONTEXT = 1 means we are considering regs as index regs,
5446 = 0 means we are considering them as base regs.
5447 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5448 or an autoinc code.
5449 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5450 is the code of the index part of the address. Otherwise, pass SCRATCH
5451 for this argument.
5452 OPNUM and TYPE specify the purpose of any reloads made.
5453
5454 IND_LEVELS says how many levels of indirect addressing are
5455 supported at this point in the address.
5456
5457 INSN, if nonzero, is the insn in which we do the reload. It is used
5458 to determine if we may generate output reloads.
5459
5460 We return nonzero if X, as a whole, is reloaded or replaced. */
5461
5462 /* Note that we take shortcuts assuming that no multi-reg machine mode
5463 occurs as part of an address.
5464 Also, this is not fully machine-customizable; it works for machines
5465 such as VAXen and 68000's and 32000's, but other possible machines
5466 could have addressing modes that this does not handle right.
5467 If you add push_reload calls here, you need to make sure gen_reload
5468 handles those cases gracefully. */
5469
5470 static int
5471 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5472 rtx x, int context,
5473 enum rtx_code outer_code, enum rtx_code index_code,
5474 rtx *loc, int opnum, enum reload_type type,
5475 int ind_levels, rtx_insn *insn)
5476 {
5477 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5478 ((CONTEXT) == 0 \
5479 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5480 : REGNO_OK_FOR_INDEX_P (REGNO))
5481
5482 enum reg_class context_reg_class;
5483 RTX_CODE code = GET_CODE (x);
5484 bool reloaded_inner_of_autoinc = false;
5485
5486 if (context == 1)
5487 context_reg_class = INDEX_REG_CLASS;
5488 else
5489 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5490
5491 switch (code)
5492 {
5493 case PLUS:
5494 {
5495 rtx orig_op0 = XEXP (x, 0);
5496 rtx orig_op1 = XEXP (x, 1);
5497 RTX_CODE code0 = GET_CODE (orig_op0);
5498 RTX_CODE code1 = GET_CODE (orig_op1);
5499 rtx op0 = orig_op0;
5500 rtx op1 = orig_op1;
5501
5502 if (GET_CODE (op0) == SUBREG)
5503 {
5504 op0 = SUBREG_REG (op0);
5505 code0 = GET_CODE (op0);
5506 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5507 op0 = gen_rtx_REG (word_mode,
5508 (REGNO (op0) +
5509 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5510 GET_MODE (SUBREG_REG (orig_op0)),
5511 SUBREG_BYTE (orig_op0),
5512 GET_MODE (orig_op0))));
5513 }
5514
5515 if (GET_CODE (op1) == SUBREG)
5516 {
5517 op1 = SUBREG_REG (op1);
5518 code1 = GET_CODE (op1);
5519 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5520 /* ??? Why is this given op1's mode and above for
5521 ??? op0 SUBREGs we use word_mode? */
5522 op1 = gen_rtx_REG (GET_MODE (op1),
5523 (REGNO (op1) +
5524 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5525 GET_MODE (SUBREG_REG (orig_op1)),
5526 SUBREG_BYTE (orig_op1),
5527 GET_MODE (orig_op1))));
5528 }
5529 /* Plus in the index register may be created only as a result of
5530 register rematerialization for expression like &localvar*4. Reload it.
5531 It may be possible to combine the displacement on the outer level,
5532 but it is probably not worthwhile to do so. */
5533 if (context == 1)
5534 {
5535 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5536 opnum, ADDR_TYPE (type), ind_levels, insn);
5537 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5538 context_reg_class,
5539 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5540 return 1;
5541 }
5542
5543 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5544 || code0 == ZERO_EXTEND || code1 == MEM)
5545 {
5546 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5547 &XEXP (x, 0), opnum, type, ind_levels,
5548 insn);
5549 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5550 &XEXP (x, 1), opnum, type, ind_levels,
5551 insn);
5552 }
5553
5554 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5555 || code1 == ZERO_EXTEND || code0 == MEM)
5556 {
5557 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5558 &XEXP (x, 0), opnum, type, ind_levels,
5559 insn);
5560 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5561 &XEXP (x, 1), opnum, type, ind_levels,
5562 insn);
5563 }
5564
5565 else if (code0 == CONST_INT || code0 == CONST
5566 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5567 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5568 &XEXP (x, 1), opnum, type, ind_levels,
5569 insn);
5570
5571 else if (code1 == CONST_INT || code1 == CONST
5572 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5573 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5574 &XEXP (x, 0), opnum, type, ind_levels,
5575 insn);
5576
5577 else if (code0 == REG && code1 == REG)
5578 {
5579 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5580 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5581 return 0;
5582 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5583 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5584 return 0;
5585 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5586 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5587 &XEXP (x, 1), opnum, type, ind_levels,
5588 insn);
5589 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5590 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5591 &XEXP (x, 0), opnum, type, ind_levels,
5592 insn);
5593 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5594 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5595 &XEXP (x, 0), opnum, type, ind_levels,
5596 insn);
5597 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5598 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5599 &XEXP (x, 1), opnum, type, ind_levels,
5600 insn);
5601 else
5602 {
5603 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5604 &XEXP (x, 0), opnum, type, ind_levels,
5605 insn);
5606 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5607 &XEXP (x, 1), opnum, type, ind_levels,
5608 insn);
5609 }
5610 }
5611
5612 else if (code0 == REG)
5613 {
5614 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5615 &XEXP (x, 0), opnum, type, ind_levels,
5616 insn);
5617 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5618 &XEXP (x, 1), opnum, type, ind_levels,
5619 insn);
5620 }
5621
5622 else if (code1 == REG)
5623 {
5624 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5625 &XEXP (x, 1), opnum, type, ind_levels,
5626 insn);
5627 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 }
5631 }
5632
5633 return 0;
5634
5635 case POST_MODIFY:
5636 case PRE_MODIFY:
5637 {
5638 rtx op0 = XEXP (x, 0);
5639 rtx op1 = XEXP (x, 1);
5640 enum rtx_code index_code;
5641 int regno;
5642 int reloadnum;
5643
5644 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5645 return 0;
5646
5647 /* Currently, we only support {PRE,POST}_MODIFY constructs
5648 where a base register is {inc,dec}remented by the contents
5649 of another register or by a constant value. Thus, these
5650 operands must match. */
5651 gcc_assert (op0 == XEXP (op1, 0));
5652
5653 /* Require index register (or constant). Let's just handle the
5654 register case in the meantime... If the target allows
5655 auto-modify by a constant then we could try replacing a pseudo
5656 register with its equivalent constant where applicable.
5657
5658 We also handle the case where the register was eliminated
5659 resulting in a PLUS subexpression.
5660
5661 If we later decide to reload the whole PRE_MODIFY or
5662 POST_MODIFY, inc_for_reload might clobber the reload register
5663 before reading the index. The index register might therefore
5664 need to live longer than a TYPE reload normally would, so be
5665 conservative and class it as RELOAD_OTHER. */
5666 if ((REG_P (XEXP (op1, 1))
5667 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5668 || GET_CODE (XEXP (op1, 1)) == PLUS)
5669 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5670 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5671 ind_levels, insn);
5672
5673 gcc_assert (REG_P (XEXP (op1, 0)));
5674
5675 regno = REGNO (XEXP (op1, 0));
5676 index_code = GET_CODE (XEXP (op1, 1));
5677
5678 /* A register that is incremented cannot be constant! */
5679 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5680 || reg_equiv_constant (regno) == 0);
5681
5682 /* Handle a register that is equivalent to a memory location
5683 which cannot be addressed directly. */
5684 if (reg_equiv_memory_loc (regno) != 0
5685 && (reg_equiv_address (regno) != 0
5686 || num_not_at_initial_offset))
5687 {
5688 rtx tem = make_memloc (XEXP (x, 0), regno);
5689
5690 if (reg_equiv_address (regno)
5691 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5692 {
5693 rtx orig = tem;
5694
5695 /* First reload the memory location's address.
5696 We can't use ADDR_TYPE (type) here, because we need to
5697 write back the value after reading it, hence we actually
5698 need two registers. */
5699 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5700 &XEXP (tem, 0), opnum,
5701 RELOAD_OTHER,
5702 ind_levels, insn);
5703
5704 if (!rtx_equal_p (tem, orig))
5705 push_reg_equiv_alt_mem (regno, tem);
5706
5707 /* Then reload the memory location into a base
5708 register. */
5709 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5710 &XEXP (op1, 0),
5711 base_reg_class (mode, as,
5712 code, index_code),
5713 GET_MODE (x), GET_MODE (x), 0,
5714 0, opnum, RELOAD_OTHER);
5715
5716 update_auto_inc_notes (this_insn, regno, reloadnum);
5717 return 0;
5718 }
5719 }
5720
5721 if (reg_renumber[regno] >= 0)
5722 regno = reg_renumber[regno];
5723
5724 /* We require a base register here... */
5725 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5726 {
5727 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5728 &XEXP (op1, 0), &XEXP (x, 0),
5729 base_reg_class (mode, as,
5730 code, index_code),
5731 GET_MODE (x), GET_MODE (x), 0, 0,
5732 opnum, RELOAD_OTHER);
5733
5734 update_auto_inc_notes (this_insn, regno, reloadnum);
5735 return 0;
5736 }
5737 }
5738 return 0;
5739
5740 case POST_INC:
5741 case POST_DEC:
5742 case PRE_INC:
5743 case PRE_DEC:
5744 if (REG_P (XEXP (x, 0)))
5745 {
5746 int regno = REGNO (XEXP (x, 0));
5747 int value = 0;
5748 rtx x_orig = x;
5749
5750 /* A register that is incremented cannot be constant! */
5751 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5752 || reg_equiv_constant (regno) == 0);
5753
5754 /* Handle a register that is equivalent to a memory location
5755 which cannot be addressed directly. */
5756 if (reg_equiv_memory_loc (regno) != 0
5757 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5758 {
5759 rtx tem = make_memloc (XEXP (x, 0), regno);
5760 if (reg_equiv_address (regno)
5761 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5762 {
5763 rtx orig = tem;
5764
5765 /* First reload the memory location's address.
5766 We can't use ADDR_TYPE (type) here, because we need to
5767 write back the value after reading it, hence we actually
5768 need two registers. */
5769 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5770 &XEXP (tem, 0), opnum, type,
5771 ind_levels, insn);
5772 reloaded_inner_of_autoinc = true;
5773 if (!rtx_equal_p (tem, orig))
5774 push_reg_equiv_alt_mem (regno, tem);
5775 /* Put this inside a new increment-expression. */
5776 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5777 /* Proceed to reload that, as if it contained a register. */
5778 }
5779 }
5780
5781 /* If we have a hard register that is ok in this incdec context,
5782 don't make a reload. If the register isn't nice enough for
5783 autoincdec, we can reload it. But, if an autoincrement of a
5784 register that we here verified as playing nice, still outside
5785 isn't "valid", it must be that no autoincrement is "valid".
5786 If that is true and something made an autoincrement anyway,
5787 this must be a special context where one is allowed.
5788 (For example, a "push" instruction.)
5789 We can't improve this address, so leave it alone. */
5790
5791 /* Otherwise, reload the autoincrement into a suitable hard reg
5792 and record how much to increment by. */
5793
5794 if (reg_renumber[regno] >= 0)
5795 regno = reg_renumber[regno];
5796 if (regno >= FIRST_PSEUDO_REGISTER
5797 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5798 index_code))
5799 {
5800 int reloadnum;
5801
5802 /* If we can output the register afterwards, do so, this
5803 saves the extra update.
5804 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5805 CALL_INSN - and it does not set CC0.
5806 But don't do this if we cannot directly address the
5807 memory location, since this will make it harder to
5808 reuse address reloads, and increases register pressure.
5809 Also don't do this if we can probably update x directly. */
5810 rtx equiv = (MEM_P (XEXP (x, 0))
5811 ? XEXP (x, 0)
5812 : reg_equiv_mem (regno));
5813 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5814 if (insn && NONJUMP_INSN_P (insn)
5815 #if HAVE_cc0
5816 && ! sets_cc0_p (PATTERN (insn))
5817 #endif
5818 && (regno < FIRST_PSEUDO_REGISTER
5819 || (equiv
5820 && memory_operand (equiv, GET_MODE (equiv))
5821 && ! (icode != CODE_FOR_nothing
5822 && insn_operand_matches (icode, 0, equiv)
5823 && insn_operand_matches (icode, 1, equiv))))
5824 /* Using RELOAD_OTHER means we emit this and the reload we
5825 made earlier in the wrong order. */
5826 && !reloaded_inner_of_autoinc)
5827 {
5828 /* We use the original pseudo for loc, so that
5829 emit_reload_insns() knows which pseudo this
5830 reload refers to and updates the pseudo rtx, not
5831 its equivalent memory location, as well as the
5832 corresponding entry in reg_last_reload_reg. */
5833 loc = &XEXP (x_orig, 0);
5834 x = XEXP (x, 0);
5835 reloadnum
5836 = push_reload (x, x, loc, loc,
5837 context_reg_class,
5838 GET_MODE (x), GET_MODE (x), 0, 0,
5839 opnum, RELOAD_OTHER);
5840 }
5841 else
5842 {
5843 reloadnum
5844 = push_reload (x, x, loc, (rtx*) 0,
5845 context_reg_class,
5846 GET_MODE (x), GET_MODE (x), 0, 0,
5847 opnum, type);
5848 rld[reloadnum].inc
5849 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5850
5851 value = 1;
5852 }
5853
5854 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5855 reloadnum);
5856 }
5857 return value;
5858 }
5859 return 0;
5860
5861 case TRUNCATE:
5862 case SIGN_EXTEND:
5863 case ZERO_EXTEND:
5864 /* Look for parts to reload in the inner expression and reload them
5865 too, in addition to this operation. Reloading all inner parts in
5866 addition to this one shouldn't be necessary, but at this point,
5867 we don't know if we can possibly omit any part that *can* be
5868 reloaded. Targets that are better off reloading just either part
5869 (or perhaps even a different part of an outer expression), should
5870 define LEGITIMIZE_RELOAD_ADDRESS. */
5871 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5872 context, code, SCRATCH, &XEXP (x, 0), opnum,
5873 type, ind_levels, insn);
5874 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5875 context_reg_class,
5876 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5877 return 1;
5878
5879 case MEM:
5880 /* This is probably the result of a substitution, by eliminate_regs, of
5881 an equivalent address for a pseudo that was not allocated to a hard
5882 register. Verify that the specified address is valid and reload it
5883 into a register.
5884
5885 Since we know we are going to reload this item, don't decrement for
5886 the indirection level.
5887
5888 Note that this is actually conservative: it would be slightly more
5889 efficient to use the value of SPILL_INDIRECT_LEVELS from
5890 reload1.c here. */
5891
5892 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5893 opnum, ADDR_TYPE (type), ind_levels, insn);
5894 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5895 context_reg_class,
5896 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5897 return 1;
5898
5899 case REG:
5900 {
5901 int regno = REGNO (x);
5902
5903 if (reg_equiv_constant (regno) != 0)
5904 {
5905 find_reloads_address_part (reg_equiv_constant (regno), loc,
5906 context_reg_class,
5907 GET_MODE (x), opnum, type, ind_levels);
5908 return 1;
5909 }
5910
5911 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5912 that feeds this insn. */
5913 if (reg_equiv_mem (regno) != 0)
5914 {
5915 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5916 context_reg_class,
5917 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5918 return 1;
5919 }
5920 #endif
5921
5922 if (reg_equiv_memory_loc (regno)
5923 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5924 {
5925 rtx tem = make_memloc (x, regno);
5926 if (reg_equiv_address (regno) != 0
5927 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5928 {
5929 x = tem;
5930 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5931 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5932 ind_levels, insn);
5933 if (!rtx_equal_p (x, tem))
5934 push_reg_equiv_alt_mem (regno, x);
5935 }
5936 }
5937
5938 if (reg_renumber[regno] >= 0)
5939 regno = reg_renumber[regno];
5940
5941 if (regno >= FIRST_PSEUDO_REGISTER
5942 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5943 index_code))
5944 {
5945 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5946 context_reg_class,
5947 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5948 return 1;
5949 }
5950
5951 /* If a register appearing in an address is the subject of a CLOBBER
5952 in this insn, reload it into some other register to be safe.
5953 The CLOBBER is supposed to make the register unavailable
5954 from before this insn to after it. */
5955 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5956 {
5957 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5958 context_reg_class,
5959 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5960 return 1;
5961 }
5962 }
5963 return 0;
5964
5965 case SUBREG:
5966 if (REG_P (SUBREG_REG (x)))
5967 {
5968 /* If this is a SUBREG of a hard register and the resulting register
5969 is of the wrong class, reload the whole SUBREG. This avoids
5970 needless copies if SUBREG_REG is multi-word. */
5971 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5972 {
5973 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5974
5975 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5976 index_code))
5977 {
5978 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5979 context_reg_class,
5980 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5981 return 1;
5982 }
5983 }
5984 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5985 is larger than the class size, then reload the whole SUBREG. */
5986 else
5987 {
5988 enum reg_class rclass = context_reg_class;
5989 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
5990 > reg_class_size[(int) rclass])
5991 {
5992 /* If the inner register will be replaced by a memory
5993 reference, we can do this only if we can replace the
5994 whole subreg by a (narrower) memory reference. If
5995 this is not possible, fall through and reload just
5996 the inner register (including address reloads). */
5997 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
5998 {
5999 rtx tem = find_reloads_subreg_address (x, opnum,
6000 ADDR_TYPE (type),
6001 ind_levels, insn,
6002 NULL);
6003 if (tem)
6004 {
6005 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6006 GET_MODE (tem), VOIDmode, 0, 0,
6007 opnum, type);
6008 return 1;
6009 }
6010 }
6011 else
6012 {
6013 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6014 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6015 return 1;
6016 }
6017 }
6018 }
6019 }
6020 break;
6021
6022 default:
6023 break;
6024 }
6025
6026 {
6027 const char *fmt = GET_RTX_FORMAT (code);
6028 int i;
6029
6030 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6031 {
6032 if (fmt[i] == 'e')
6033 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6034 we get here. */
6035 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6036 code, SCRATCH, &XEXP (x, i),
6037 opnum, type, ind_levels, insn);
6038 }
6039 }
6040
6041 #undef REG_OK_FOR_CONTEXT
6042 return 0;
6043 }
6044 \f
6045 /* X, which is found at *LOC, is a part of an address that needs to be
6046 reloaded into a register of class RCLASS. If X is a constant, or if
6047 X is a PLUS that contains a constant, check that the constant is a
6048 legitimate operand and that we are supposed to be able to load
6049 it into the register.
6050
6051 If not, force the constant into memory and reload the MEM instead.
6052
6053 MODE is the mode to use, in case X is an integer constant.
6054
6055 OPNUM and TYPE describe the purpose of any reloads made.
6056
6057 IND_LEVELS says how many levels of indirect addressing this machine
6058 supports. */
6059
6060 static void
6061 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6062 machine_mode mode, int opnum,
6063 enum reload_type type, int ind_levels)
6064 {
6065 if (CONSTANT_P (x)
6066 && (!targetm.legitimate_constant_p (mode, x)
6067 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6068 {
6069 x = force_const_mem (mode, x);
6070 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6071 opnum, type, ind_levels, 0);
6072 }
6073
6074 else if (GET_CODE (x) == PLUS
6075 && CONSTANT_P (XEXP (x, 1))
6076 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6077 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6078 == NO_REGS))
6079 {
6080 rtx tem;
6081
6082 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6083 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6084 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6085 opnum, type, ind_levels, 0);
6086 }
6087
6088 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6089 mode, VOIDmode, 0, 0, opnum, type);
6090 }
6091 \f
6092 /* X, a subreg of a pseudo, is a part of an address that needs to be
6093 reloaded, and the pseusdo is equivalent to a memory location.
6094
6095 Attempt to replace the whole subreg by a (possibly narrower or wider)
6096 memory reference. If this is possible, return this new memory
6097 reference, and push all required address reloads. Otherwise,
6098 return NULL.
6099
6100 OPNUM and TYPE identify the purpose of the reload.
6101
6102 IND_LEVELS says how many levels of indirect addressing are
6103 supported at this point in the address.
6104
6105 INSN, if nonzero, is the insn in which we do the reload. It is used
6106 to determine where to put USEs for pseudos that we have to replace with
6107 stack slots. */
6108
6109 static rtx
6110 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6111 int ind_levels, rtx_insn *insn,
6112 int *address_reloaded)
6113 {
6114 machine_mode outer_mode = GET_MODE (x);
6115 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6116 int regno = REGNO (SUBREG_REG (x));
6117 int reloaded = 0;
6118 rtx tem, orig;
6119 poly_int64 offset;
6120
6121 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6122
6123 /* We cannot replace the subreg with a modified memory reference if:
6124
6125 - we have a paradoxical subreg that implicitly acts as a zero or
6126 sign extension operation due to LOAD_EXTEND_OP;
6127
6128 - we have a subreg that is implicitly supposed to act on the full
6129 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6130
6131 - the address of the equivalent memory location is mode-dependent; or
6132
6133 - we have a paradoxical subreg and the resulting memory is not
6134 sufficiently aligned to allow access in the wider mode.
6135
6136 In addition, we choose not to perform the replacement for *any*
6137 paradoxical subreg, even if it were possible in principle. This
6138 is to avoid generating wider memory references than necessary.
6139
6140 This corresponds to how previous versions of reload used to handle
6141 paradoxical subregs where no address reload was required. */
6142
6143 if (paradoxical_subreg_p (x))
6144 return NULL;
6145
6146 if (WORD_REGISTER_OPERATIONS
6147 && partial_subreg_p (outer_mode, inner_mode)
6148 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6149 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6150 return NULL;
6151
6152 /* Since we don't attempt to handle paradoxical subregs, we can just
6153 call into simplify_subreg, which will handle all remaining checks
6154 for us. */
6155 orig = make_memloc (SUBREG_REG (x), regno);
6156 offset = SUBREG_BYTE (x);
6157 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6158 if (!tem || !MEM_P (tem))
6159 return NULL;
6160
6161 /* Now push all required address reloads, if any. */
6162 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6163 XEXP (tem, 0), &XEXP (tem, 0),
6164 opnum, type, ind_levels, insn);
6165 /* ??? Do we need to handle nonzero offsets somehow? */
6166 if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6167 push_reg_equiv_alt_mem (regno, tem);
6168
6169 /* For some processors an address may be valid in the original mode but
6170 not in a smaller mode. For example, ARM accepts a scaled index register
6171 in SImode but not in HImode. Note that this is only a problem if the
6172 address in reg_equiv_mem is already invalid in the new mode; other
6173 cases would be fixed by find_reloads_address as usual.
6174
6175 ??? We attempt to handle such cases here by doing an additional reload
6176 of the full address after the usual processing by find_reloads_address.
6177 Note that this may not work in the general case, but it seems to cover
6178 the cases where this situation currently occurs. A more general fix
6179 might be to reload the *value* instead of the address, but this would
6180 not be expected by the callers of this routine as-is.
6181
6182 If find_reloads_address already completed replaced the address, there
6183 is nothing further to do. */
6184 if (reloaded == 0
6185 && reg_equiv_mem (regno) != 0
6186 && !strict_memory_address_addr_space_p
6187 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6188 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6189 {
6190 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6191 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6192 MEM, SCRATCH),
6193 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6194 reloaded = 1;
6195 }
6196
6197 /* If this is not a toplevel operand, find_reloads doesn't see this
6198 substitution. We have to emit a USE of the pseudo so that
6199 delete_output_reload can see it. */
6200 if (replace_reloads && recog_data.operand[opnum] != x)
6201 /* We mark the USE with QImode so that we recognize it as one that
6202 can be safely deleted at the end of reload. */
6203 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6204 QImode);
6205
6206 if (address_reloaded)
6207 *address_reloaded = reloaded;
6208
6209 return tem;
6210 }
6211 \f
6212 /* Substitute into the current INSN the registers into which we have reloaded
6213 the things that need reloading. The array `replacements'
6214 contains the locations of all pointers that must be changed
6215 and says what to replace them with.
6216
6217 Return the rtx that X translates into; usually X, but modified. */
6218
6219 void
6220 subst_reloads (rtx_insn *insn)
6221 {
6222 int i;
6223
6224 for (i = 0; i < n_replacements; i++)
6225 {
6226 struct replacement *r = &replacements[i];
6227 rtx reloadreg = rld[r->what].reg_rtx;
6228 if (reloadreg)
6229 {
6230 #ifdef DEBUG_RELOAD
6231 /* This checking takes a very long time on some platforms
6232 causing the gcc.c-torture/compile/limits-fnargs.c test
6233 to time out during testing. See PR 31850.
6234
6235 Internal consistency test. Check that we don't modify
6236 anything in the equivalence arrays. Whenever something from
6237 those arrays needs to be reloaded, it must be unshared before
6238 being substituted into; the equivalence must not be modified.
6239 Otherwise, if the equivalence is used after that, it will
6240 have been modified, and the thing substituted (probably a
6241 register) is likely overwritten and not a usable equivalence. */
6242 int check_regno;
6243
6244 for (check_regno = 0; check_regno < max_regno; check_regno++)
6245 {
6246 #define CHECK_MODF(ARRAY) \
6247 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6248 || !loc_mentioned_in_p (r->where, \
6249 (*reg_equivs)[check_regno].ARRAY))
6250
6251 CHECK_MODF (constant);
6252 CHECK_MODF (memory_loc);
6253 CHECK_MODF (address);
6254 CHECK_MODF (mem);
6255 #undef CHECK_MODF
6256 }
6257 #endif /* DEBUG_RELOAD */
6258
6259 /* If we're replacing a LABEL_REF with a register, there must
6260 already be an indication (to e.g. flow) which label this
6261 register refers to. */
6262 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6263 || !JUMP_P (insn)
6264 || find_reg_note (insn,
6265 REG_LABEL_OPERAND,
6266 XEXP (*r->where, 0))
6267 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6268
6269 /* Encapsulate RELOADREG so its machine mode matches what
6270 used to be there. Note that gen_lowpart_common will
6271 do the wrong thing if RELOADREG is multi-word. RELOADREG
6272 will always be a REG here. */
6273 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6274 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6275
6276 *r->where = reloadreg;
6277 }
6278 /* If reload got no reg and isn't optional, something's wrong. */
6279 else
6280 gcc_assert (rld[r->what].optional);
6281 }
6282 }
6283 \f
6284 /* Make a copy of any replacements being done into X and move those
6285 copies to locations in Y, a copy of X. */
6286
6287 void
6288 copy_replacements (rtx x, rtx y)
6289 {
6290 copy_replacements_1 (&x, &y, n_replacements);
6291 }
6292
6293 static void
6294 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6295 {
6296 int i, j;
6297 rtx x, y;
6298 struct replacement *r;
6299 enum rtx_code code;
6300 const char *fmt;
6301
6302 for (j = 0; j < orig_replacements; j++)
6303 if (replacements[j].where == px)
6304 {
6305 r = &replacements[n_replacements++];
6306 r->where = py;
6307 r->what = replacements[j].what;
6308 r->mode = replacements[j].mode;
6309 }
6310
6311 x = *px;
6312 y = *py;
6313 code = GET_CODE (x);
6314 fmt = GET_RTX_FORMAT (code);
6315
6316 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6317 {
6318 if (fmt[i] == 'e')
6319 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6320 else if (fmt[i] == 'E')
6321 for (j = XVECLEN (x, i); --j >= 0; )
6322 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6323 orig_replacements);
6324 }
6325 }
6326
6327 /* Change any replacements being done to *X to be done to *Y. */
6328
6329 void
6330 move_replacements (rtx *x, rtx *y)
6331 {
6332 int i;
6333
6334 for (i = 0; i < n_replacements; i++)
6335 if (replacements[i].where == x)
6336 replacements[i].where = y;
6337 }
6338 \f
6339 /* If LOC was scheduled to be replaced by something, return the replacement.
6340 Otherwise, return *LOC. */
6341
6342 rtx
6343 find_replacement (rtx *loc)
6344 {
6345 struct replacement *r;
6346
6347 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6348 {
6349 rtx reloadreg = rld[r->what].reg_rtx;
6350
6351 if (reloadreg && r->where == loc)
6352 {
6353 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6354 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6355
6356 return reloadreg;
6357 }
6358 else if (reloadreg && GET_CODE (*loc) == SUBREG
6359 && r->where == &SUBREG_REG (*loc))
6360 {
6361 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6362 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6363
6364 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6365 GET_MODE (SUBREG_REG (*loc)),
6366 SUBREG_BYTE (*loc));
6367 }
6368 }
6369
6370 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6371 what's inside and make a new rtl if so. */
6372 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6373 || GET_CODE (*loc) == MULT)
6374 {
6375 rtx x = find_replacement (&XEXP (*loc, 0));
6376 rtx y = find_replacement (&XEXP (*loc, 1));
6377
6378 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6379 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6380 }
6381
6382 return *loc;
6383 }
6384 \f
6385 /* Return nonzero if register in range [REGNO, ENDREGNO)
6386 appears either explicitly or implicitly in X
6387 other than being stored into (except for earlyclobber operands).
6388
6389 References contained within the substructure at LOC do not count.
6390 LOC may be zero, meaning don't ignore anything.
6391
6392 This is similar to refers_to_regno_p in rtlanal.c except that we
6393 look at equivalences for pseudos that didn't get hard registers. */
6394
6395 static int
6396 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6397 rtx x, rtx *loc)
6398 {
6399 int i;
6400 unsigned int r;
6401 RTX_CODE code;
6402 const char *fmt;
6403
6404 if (x == 0)
6405 return 0;
6406
6407 repeat:
6408 code = GET_CODE (x);
6409
6410 switch (code)
6411 {
6412 case REG:
6413 r = REGNO (x);
6414
6415 /* If this is a pseudo, a hard register must not have been allocated.
6416 X must therefore either be a constant or be in memory. */
6417 if (r >= FIRST_PSEUDO_REGISTER)
6418 {
6419 if (reg_equiv_memory_loc (r))
6420 return refers_to_regno_for_reload_p (regno, endregno,
6421 reg_equiv_memory_loc (r),
6422 (rtx*) 0);
6423
6424 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6425 return 0;
6426 }
6427
6428 return endregno > r && regno < END_REGNO (x);
6429
6430 case SUBREG:
6431 /* If this is a SUBREG of a hard reg, we can see exactly which
6432 registers are being modified. Otherwise, handle normally. */
6433 if (REG_P (SUBREG_REG (x))
6434 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6435 {
6436 unsigned int inner_regno = subreg_regno (x);
6437 unsigned int inner_endregno
6438 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6439 ? subreg_nregs (x) : 1);
6440
6441 return endregno > inner_regno && regno < inner_endregno;
6442 }
6443 break;
6444
6445 case CLOBBER:
6446 case SET:
6447 if (&SET_DEST (x) != loc
6448 /* Note setting a SUBREG counts as referring to the REG it is in for
6449 a pseudo but not for hard registers since we can
6450 treat each word individually. */
6451 && ((GET_CODE (SET_DEST (x)) == SUBREG
6452 && loc != &SUBREG_REG (SET_DEST (x))
6453 && REG_P (SUBREG_REG (SET_DEST (x)))
6454 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6455 && refers_to_regno_for_reload_p (regno, endregno,
6456 SUBREG_REG (SET_DEST (x)),
6457 loc))
6458 /* If the output is an earlyclobber operand, this is
6459 a conflict. */
6460 || ((!REG_P (SET_DEST (x))
6461 || earlyclobber_operand_p (SET_DEST (x)))
6462 && refers_to_regno_for_reload_p (regno, endregno,
6463 SET_DEST (x), loc))))
6464 return 1;
6465
6466 if (code == CLOBBER || loc == &SET_SRC (x))
6467 return 0;
6468 x = SET_SRC (x);
6469 goto repeat;
6470
6471 default:
6472 break;
6473 }
6474
6475 /* X does not match, so try its subexpressions. */
6476
6477 fmt = GET_RTX_FORMAT (code);
6478 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6479 {
6480 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6481 {
6482 if (i == 0)
6483 {
6484 x = XEXP (x, 0);
6485 goto repeat;
6486 }
6487 else
6488 if (refers_to_regno_for_reload_p (regno, endregno,
6489 XEXP (x, i), loc))
6490 return 1;
6491 }
6492 else if (fmt[i] == 'E')
6493 {
6494 int j;
6495 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6496 if (loc != &XVECEXP (x, i, j)
6497 && refers_to_regno_for_reload_p (regno, endregno,
6498 XVECEXP (x, i, j), loc))
6499 return 1;
6500 }
6501 }
6502 return 0;
6503 }
6504
6505 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6506 we check if any register number in X conflicts with the relevant register
6507 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6508 contains a MEM (we don't bother checking for memory addresses that can't
6509 conflict because we expect this to be a rare case.
6510
6511 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6512 that we look at equivalences for pseudos that didn't get hard registers. */
6513
6514 int
6515 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6516 {
6517 int regno, endregno;
6518
6519 /* Overly conservative. */
6520 if (GET_CODE (x) == STRICT_LOW_PART
6521 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6522 x = XEXP (x, 0);
6523
6524 /* If either argument is a constant, then modifying X can not affect IN. */
6525 if (CONSTANT_P (x) || CONSTANT_P (in))
6526 return 0;
6527 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6528 return refers_to_mem_for_reload_p (in);
6529 else if (GET_CODE (x) == SUBREG)
6530 {
6531 regno = REGNO (SUBREG_REG (x));
6532 if (regno < FIRST_PSEUDO_REGISTER)
6533 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6534 GET_MODE (SUBREG_REG (x)),
6535 SUBREG_BYTE (x),
6536 GET_MODE (x));
6537 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6538 ? subreg_nregs (x) : 1);
6539
6540 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6541 }
6542 else if (REG_P (x))
6543 {
6544 regno = REGNO (x);
6545
6546 /* If this is a pseudo, it must not have been assigned a hard register.
6547 Therefore, it must either be in memory or be a constant. */
6548
6549 if (regno >= FIRST_PSEUDO_REGISTER)
6550 {
6551 if (reg_equiv_memory_loc (regno))
6552 return refers_to_mem_for_reload_p (in);
6553 gcc_assert (reg_equiv_constant (regno));
6554 return 0;
6555 }
6556
6557 endregno = END_REGNO (x);
6558
6559 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6560 }
6561 else if (MEM_P (x))
6562 return refers_to_mem_for_reload_p (in);
6563 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6564 || GET_CODE (x) == CC0)
6565 return reg_mentioned_p (x, in);
6566 else
6567 {
6568 gcc_assert (GET_CODE (x) == PLUS);
6569
6570 /* We actually want to know if X is mentioned somewhere inside IN.
6571 We must not say that (plus (sp) (const_int 124)) is in
6572 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6573 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6574 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6575 while (MEM_P (in))
6576 in = XEXP (in, 0);
6577 if (REG_P (in))
6578 return 0;
6579 else if (GET_CODE (in) == PLUS)
6580 return (rtx_equal_p (x, in)
6581 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6582 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6583 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6584 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6585 }
6586
6587 gcc_unreachable ();
6588 }
6589
6590 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6591 registers. */
6592
6593 static int
6594 refers_to_mem_for_reload_p (rtx x)
6595 {
6596 const char *fmt;
6597 int i;
6598
6599 if (MEM_P (x))
6600 return 1;
6601
6602 if (REG_P (x))
6603 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6604 && reg_equiv_memory_loc (REGNO (x)));
6605
6606 fmt = GET_RTX_FORMAT (GET_CODE (x));
6607 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6608 if (fmt[i] == 'e'
6609 && (MEM_P (XEXP (x, i))
6610 || refers_to_mem_for_reload_p (XEXP (x, i))))
6611 return 1;
6612
6613 return 0;
6614 }
6615 \f
6616 /* Check the insns before INSN to see if there is a suitable register
6617 containing the same value as GOAL.
6618 If OTHER is -1, look for a register in class RCLASS.
6619 Otherwise, just see if register number OTHER shares GOAL's value.
6620
6621 Return an rtx for the register found, or zero if none is found.
6622
6623 If RELOAD_REG_P is (short *)1,
6624 we reject any hard reg that appears in reload_reg_rtx
6625 because such a hard reg is also needed coming into this insn.
6626
6627 If RELOAD_REG_P is any other nonzero value,
6628 it is a vector indexed by hard reg number
6629 and we reject any hard reg whose element in the vector is nonnegative
6630 as well as any that appears in reload_reg_rtx.
6631
6632 If GOAL is zero, then GOALREG is a register number; we look
6633 for an equivalent for that register.
6634
6635 MODE is the machine mode of the value we want an equivalence for.
6636 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6637
6638 This function is used by jump.c as well as in the reload pass.
6639
6640 If GOAL is the sum of the stack pointer and a constant, we treat it
6641 as if it were a constant except that sp is required to be unchanging. */
6642
6643 rtx
6644 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6645 short *reload_reg_p, int goalreg, machine_mode mode)
6646 {
6647 rtx_insn *p = insn;
6648 rtx goaltry, valtry, value;
6649 rtx_insn *where;
6650 rtx pat;
6651 int regno = -1;
6652 int valueno;
6653 int goal_mem = 0;
6654 int goal_const = 0;
6655 int goal_mem_addr_varies = 0;
6656 int need_stable_sp = 0;
6657 int nregs;
6658 int valuenregs;
6659 int num = 0;
6660
6661 if (goal == 0)
6662 regno = goalreg;
6663 else if (REG_P (goal))
6664 regno = REGNO (goal);
6665 else if (MEM_P (goal))
6666 {
6667 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6668 if (MEM_VOLATILE_P (goal))
6669 return 0;
6670 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6671 return 0;
6672 /* An address with side effects must be reexecuted. */
6673 switch (code)
6674 {
6675 case POST_INC:
6676 case PRE_INC:
6677 case POST_DEC:
6678 case PRE_DEC:
6679 case POST_MODIFY:
6680 case PRE_MODIFY:
6681 return 0;
6682 default:
6683 break;
6684 }
6685 goal_mem = 1;
6686 }
6687 else if (CONSTANT_P (goal))
6688 goal_const = 1;
6689 else if (GET_CODE (goal) == PLUS
6690 && XEXP (goal, 0) == stack_pointer_rtx
6691 && CONSTANT_P (XEXP (goal, 1)))
6692 goal_const = need_stable_sp = 1;
6693 else if (GET_CODE (goal) == PLUS
6694 && XEXP (goal, 0) == frame_pointer_rtx
6695 && CONSTANT_P (XEXP (goal, 1)))
6696 goal_const = 1;
6697 else
6698 return 0;
6699
6700 num = 0;
6701 /* Scan insns back from INSN, looking for one that copies
6702 a value into or out of GOAL.
6703 Stop and give up if we reach a label. */
6704
6705 while (1)
6706 {
6707 p = PREV_INSN (p);
6708 if (p && DEBUG_INSN_P (p))
6709 continue;
6710 num++;
6711 if (p == 0 || LABEL_P (p)
6712 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6713 return 0;
6714
6715 /* Don't reuse register contents from before a setjmp-type
6716 function call; on the second return (from the longjmp) it
6717 might have been clobbered by a later reuse. It doesn't
6718 seem worthwhile to actually go and see if it is actually
6719 reused even if that information would be readily available;
6720 just don't reuse it across the setjmp call. */
6721 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6722 return 0;
6723
6724 if (NONJUMP_INSN_P (p)
6725 /* If we don't want spill regs ... */
6726 && (! (reload_reg_p != 0
6727 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6728 /* ... then ignore insns introduced by reload; they aren't
6729 useful and can cause results in reload_as_needed to be
6730 different from what they were when calculating the need for
6731 spills. If we notice an input-reload insn here, we will
6732 reject it below, but it might hide a usable equivalent.
6733 That makes bad code. It may even fail: perhaps no reg was
6734 spilled for this insn because it was assumed we would find
6735 that equivalent. */
6736 || INSN_UID (p) < reload_first_uid))
6737 {
6738 rtx tem;
6739 pat = single_set (p);
6740
6741 /* First check for something that sets some reg equal to GOAL. */
6742 if (pat != 0
6743 && ((regno >= 0
6744 && true_regnum (SET_SRC (pat)) == regno
6745 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6746 ||
6747 (regno >= 0
6748 && true_regnum (SET_DEST (pat)) == regno
6749 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6750 ||
6751 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6752 /* When looking for stack pointer + const,
6753 make sure we don't use a stack adjust. */
6754 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6755 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6756 || (goal_mem
6757 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6758 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6759 || (goal_mem
6760 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6761 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6762 /* If we are looking for a constant,
6763 and something equivalent to that constant was copied
6764 into a reg, we can use that reg. */
6765 || (goal_const && REG_NOTES (p) != 0
6766 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6767 && ((rtx_equal_p (XEXP (tem, 0), goal)
6768 && (valueno
6769 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6770 || (REG_P (SET_DEST (pat))
6771 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6772 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6773 && CONST_INT_P (goal)
6774 && (goaltry = operand_subword (XEXP (tem, 0), 0,
6775 0, VOIDmode)) != 0
6776 && rtx_equal_p (goal, goaltry)
6777 && (valtry
6778 = operand_subword (SET_DEST (pat), 0, 0,
6779 VOIDmode))
6780 && (valueno = true_regnum (valtry)) >= 0)))
6781 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6782 NULL_RTX))
6783 && REG_P (SET_DEST (pat))
6784 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6785 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6786 && CONST_INT_P (goal)
6787 && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6788 VOIDmode)) != 0
6789 && rtx_equal_p (goal, goaltry)
6790 && (valtry
6791 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6792 && (valueno = true_regnum (valtry)) >= 0)))
6793 {
6794 if (other >= 0)
6795 {
6796 if (valueno != other)
6797 continue;
6798 }
6799 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6800 continue;
6801 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6802 mode, valueno))
6803 continue;
6804 value = valtry;
6805 where = p;
6806 break;
6807 }
6808 }
6809 }
6810
6811 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6812 (or copying VALUE into GOAL, if GOAL is also a register).
6813 Now verify that VALUE is really valid. */
6814
6815 /* VALUENO is the register number of VALUE; a hard register. */
6816
6817 /* Don't try to re-use something that is killed in this insn. We want
6818 to be able to trust REG_UNUSED notes. */
6819 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6820 return 0;
6821
6822 /* If we propose to get the value from the stack pointer or if GOAL is
6823 a MEM based on the stack pointer, we need a stable SP. */
6824 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6825 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6826 goal)))
6827 need_stable_sp = 1;
6828
6829 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6830 if (GET_MODE (value) != mode)
6831 return 0;
6832
6833 /* Reject VALUE if it was loaded from GOAL
6834 and is also a register that appears in the address of GOAL. */
6835
6836 if (goal_mem && value == SET_DEST (single_set (where))
6837 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6838 goal, (rtx*) 0))
6839 return 0;
6840
6841 /* Reject registers that overlap GOAL. */
6842
6843 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6844 nregs = hard_regno_nregs (regno, mode);
6845 else
6846 nregs = 1;
6847 valuenregs = hard_regno_nregs (valueno, mode);
6848
6849 if (!goal_mem && !goal_const
6850 && regno + nregs > valueno && regno < valueno + valuenregs)
6851 return 0;
6852
6853 /* Reject VALUE if it is one of the regs reserved for reloads.
6854 Reload1 knows how to reuse them anyway, and it would get
6855 confused if we allocated one without its knowledge.
6856 (Now that insns introduced by reload are ignored above,
6857 this case shouldn't happen, but I'm not positive.) */
6858
6859 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6860 {
6861 int i;
6862 for (i = 0; i < valuenregs; ++i)
6863 if (reload_reg_p[valueno + i] >= 0)
6864 return 0;
6865 }
6866
6867 /* Reject VALUE if it is a register being used for an input reload
6868 even if it is not one of those reserved. */
6869
6870 if (reload_reg_p != 0)
6871 {
6872 int i;
6873 for (i = 0; i < n_reloads; i++)
6874 if (rld[i].reg_rtx != 0
6875 && rld[i].in
6876 && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6877 && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6878 return 0;
6879 }
6880
6881 if (goal_mem)
6882 /* We must treat frame pointer as varying here,
6883 since it can vary--in a nonlocal goto as generated by expand_goto. */
6884 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6885
6886 /* Now verify that the values of GOAL and VALUE remain unaltered
6887 until INSN is reached. */
6888
6889 p = insn;
6890 while (1)
6891 {
6892 p = PREV_INSN (p);
6893 if (p == where)
6894 return value;
6895
6896 /* Don't trust the conversion past a function call
6897 if either of the two is in a call-clobbered register, or memory. */
6898 if (CALL_P (p))
6899 {
6900 int i;
6901
6902 if (goal_mem || need_stable_sp)
6903 return 0;
6904
6905 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6906 for (i = 0; i < nregs; ++i)
6907 if (call_used_regs[regno + i]
6908 || targetm.hard_regno_call_part_clobbered (regno + i, mode))
6909 return 0;
6910
6911 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6912 for (i = 0; i < valuenregs; ++i)
6913 if (call_used_regs[valueno + i]
6914 || targetm.hard_regno_call_part_clobbered (valueno + i,
6915 mode))
6916 return 0;
6917 }
6918
6919 if (INSN_P (p))
6920 {
6921 pat = PATTERN (p);
6922
6923 /* Watch out for unspec_volatile, and volatile asms. */
6924 if (volatile_insn_p (pat))
6925 return 0;
6926
6927 /* If this insn P stores in either GOAL or VALUE, return 0.
6928 If GOAL is a memory ref and this insn writes memory, return 0.
6929 If GOAL is a memory ref and its address is not constant,
6930 and this insn P changes a register used in GOAL, return 0. */
6931
6932 if (GET_CODE (pat) == COND_EXEC)
6933 pat = COND_EXEC_CODE (pat);
6934 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6935 {
6936 rtx dest = SET_DEST (pat);
6937 while (GET_CODE (dest) == SUBREG
6938 || GET_CODE (dest) == ZERO_EXTRACT
6939 || GET_CODE (dest) == STRICT_LOW_PART)
6940 dest = XEXP (dest, 0);
6941 if (REG_P (dest))
6942 {
6943 int xregno = REGNO (dest);
6944 int end_xregno = END_REGNO (dest);
6945 if (xregno < regno + nregs && end_xregno > regno)
6946 return 0;
6947 if (xregno < valueno + valuenregs
6948 && end_xregno > valueno)
6949 return 0;
6950 if (goal_mem_addr_varies
6951 && reg_overlap_mentioned_for_reload_p (dest, goal))
6952 return 0;
6953 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6954 return 0;
6955 }
6956 else if (goal_mem && MEM_P (dest)
6957 && ! push_operand (dest, GET_MODE (dest)))
6958 return 0;
6959 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6960 && reg_equiv_memory_loc (regno) != 0)
6961 return 0;
6962 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6963 return 0;
6964 }
6965 else if (GET_CODE (pat) == PARALLEL)
6966 {
6967 int i;
6968 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6969 {
6970 rtx v1 = XVECEXP (pat, 0, i);
6971 if (GET_CODE (v1) == COND_EXEC)
6972 v1 = COND_EXEC_CODE (v1);
6973 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6974 {
6975 rtx dest = SET_DEST (v1);
6976 while (GET_CODE (dest) == SUBREG
6977 || GET_CODE (dest) == ZERO_EXTRACT
6978 || GET_CODE (dest) == STRICT_LOW_PART)
6979 dest = XEXP (dest, 0);
6980 if (REG_P (dest))
6981 {
6982 int xregno = REGNO (dest);
6983 int end_xregno = END_REGNO (dest);
6984 if (xregno < regno + nregs
6985 && end_xregno > regno)
6986 return 0;
6987 if (xregno < valueno + valuenregs
6988 && end_xregno > valueno)
6989 return 0;
6990 if (goal_mem_addr_varies
6991 && reg_overlap_mentioned_for_reload_p (dest,
6992 goal))
6993 return 0;
6994 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6995 return 0;
6996 }
6997 else if (goal_mem && MEM_P (dest)
6998 && ! push_operand (dest, GET_MODE (dest)))
6999 return 0;
7000 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7001 && reg_equiv_memory_loc (regno) != 0)
7002 return 0;
7003 else if (need_stable_sp
7004 && push_operand (dest, GET_MODE (dest)))
7005 return 0;
7006 }
7007 }
7008 }
7009
7010 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7011 {
7012 rtx link;
7013
7014 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7015 link = XEXP (link, 1))
7016 {
7017 pat = XEXP (link, 0);
7018 if (GET_CODE (pat) == CLOBBER)
7019 {
7020 rtx dest = SET_DEST (pat);
7021
7022 if (REG_P (dest))
7023 {
7024 int xregno = REGNO (dest);
7025 int end_xregno = END_REGNO (dest);
7026
7027 if (xregno < regno + nregs
7028 && end_xregno > regno)
7029 return 0;
7030 else if (xregno < valueno + valuenregs
7031 && end_xregno > valueno)
7032 return 0;
7033 else if (goal_mem_addr_varies
7034 && reg_overlap_mentioned_for_reload_p (dest,
7035 goal))
7036 return 0;
7037 }
7038
7039 else if (goal_mem && MEM_P (dest)
7040 && ! push_operand (dest, GET_MODE (dest)))
7041 return 0;
7042 else if (need_stable_sp
7043 && push_operand (dest, GET_MODE (dest)))
7044 return 0;
7045 }
7046 }
7047 }
7048
7049 #if AUTO_INC_DEC
7050 /* If this insn auto-increments or auto-decrements
7051 either regno or valueno, return 0 now.
7052 If GOAL is a memory ref and its address is not constant,
7053 and this insn P increments a register used in GOAL, return 0. */
7054 {
7055 rtx link;
7056
7057 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7058 if (REG_NOTE_KIND (link) == REG_INC
7059 && REG_P (XEXP (link, 0)))
7060 {
7061 int incno = REGNO (XEXP (link, 0));
7062 if (incno < regno + nregs && incno >= regno)
7063 return 0;
7064 if (incno < valueno + valuenregs && incno >= valueno)
7065 return 0;
7066 if (goal_mem_addr_varies
7067 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7068 goal))
7069 return 0;
7070 }
7071 }
7072 #endif
7073 }
7074 }
7075 }
7076 \f
7077 /* Find a place where INCED appears in an increment or decrement operator
7078 within X, and return the amount INCED is incremented or decremented by.
7079 The value is always positive. */
7080
7081 static poly_int64
7082 find_inc_amount (rtx x, rtx inced)
7083 {
7084 enum rtx_code code = GET_CODE (x);
7085 const char *fmt;
7086 int i;
7087
7088 if (code == MEM)
7089 {
7090 rtx addr = XEXP (x, 0);
7091 if ((GET_CODE (addr) == PRE_DEC
7092 || GET_CODE (addr) == POST_DEC
7093 || GET_CODE (addr) == PRE_INC
7094 || GET_CODE (addr) == POST_INC)
7095 && XEXP (addr, 0) == inced)
7096 return GET_MODE_SIZE (GET_MODE (x));
7097 else if ((GET_CODE (addr) == PRE_MODIFY
7098 || GET_CODE (addr) == POST_MODIFY)
7099 && GET_CODE (XEXP (addr, 1)) == PLUS
7100 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7101 && XEXP (addr, 0) == inced
7102 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7103 {
7104 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7105 return i < 0 ? -i : i;
7106 }
7107 }
7108
7109 fmt = GET_RTX_FORMAT (code);
7110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7111 {
7112 if (fmt[i] == 'e')
7113 {
7114 poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7115 if (maybe_ne (tem, 0))
7116 return tem;
7117 }
7118 if (fmt[i] == 'E')
7119 {
7120 int j;
7121 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7122 {
7123 poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7124 if (maybe_ne (tem, 0))
7125 return tem;
7126 }
7127 }
7128 }
7129
7130 return 0;
7131 }
7132 \f
7133 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7134 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7135
7136 static int
7137 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7138 rtx insn)
7139 {
7140 rtx link;
7141
7142 if (!AUTO_INC_DEC)
7143 return 0;
7144
7145 gcc_assert (insn);
7146
7147 if (! INSN_P (insn))
7148 return 0;
7149
7150 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7151 if (REG_NOTE_KIND (link) == REG_INC)
7152 {
7153 unsigned int test = (int) REGNO (XEXP (link, 0));
7154 if (test >= regno && test < endregno)
7155 return 1;
7156 }
7157 return 0;
7158 }
7159
7160 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7161 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7162 REG_INC. REGNO must refer to a hard register. */
7163
7164 int
7165 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7166 int sets)
7167 {
7168 /* regno must be a hard register. */
7169 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7170
7171 unsigned int endregno = end_hard_regno (mode, regno);
7172
7173 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7174 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7175 && REG_P (XEXP (PATTERN (insn), 0)))
7176 {
7177 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7178
7179 return test >= regno && test < endregno;
7180 }
7181
7182 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7183 return 1;
7184
7185 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7186 {
7187 int i = XVECLEN (PATTERN (insn), 0) - 1;
7188
7189 for (; i >= 0; i--)
7190 {
7191 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7192 if ((GET_CODE (elt) == CLOBBER
7193 || (sets == 1 && GET_CODE (elt) == SET))
7194 && REG_P (XEXP (elt, 0)))
7195 {
7196 unsigned int test = REGNO (XEXP (elt, 0));
7197
7198 if (test >= regno && test < endregno)
7199 return 1;
7200 }
7201 if (sets == 2
7202 && reg_inc_found_and_valid_p (regno, endregno, elt))
7203 return 1;
7204 }
7205 }
7206
7207 return 0;
7208 }
7209
7210 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7211 rtx
7212 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7213 {
7214 int regno;
7215
7216 if (GET_MODE (reloadreg) == mode)
7217 return reloadreg;
7218
7219 regno = REGNO (reloadreg);
7220
7221 if (REG_WORDS_BIG_ENDIAN)
7222 regno += ((int) REG_NREGS (reloadreg)
7223 - (int) hard_regno_nregs (regno, mode));
7224
7225 return gen_rtx_REG (mode, regno);
7226 }
7227
7228 static const char *const reload_when_needed_name[] =
7229 {
7230 "RELOAD_FOR_INPUT",
7231 "RELOAD_FOR_OUTPUT",
7232 "RELOAD_FOR_INSN",
7233 "RELOAD_FOR_INPUT_ADDRESS",
7234 "RELOAD_FOR_INPADDR_ADDRESS",
7235 "RELOAD_FOR_OUTPUT_ADDRESS",
7236 "RELOAD_FOR_OUTADDR_ADDRESS",
7237 "RELOAD_FOR_OPERAND_ADDRESS",
7238 "RELOAD_FOR_OPADDR_ADDR",
7239 "RELOAD_OTHER",
7240 "RELOAD_FOR_OTHER_ADDRESS"
7241 };
7242
7243 /* These functions are used to print the variables set by 'find_reloads' */
7244
7245 DEBUG_FUNCTION void
7246 debug_reload_to_stream (FILE *f)
7247 {
7248 int r;
7249 const char *prefix;
7250
7251 if (! f)
7252 f = stderr;
7253 for (r = 0; r < n_reloads; r++)
7254 {
7255 fprintf (f, "Reload %d: ", r);
7256
7257 if (rld[r].in != 0)
7258 {
7259 fprintf (f, "reload_in (%s) = ",
7260 GET_MODE_NAME (rld[r].inmode));
7261 print_inline_rtx (f, rld[r].in, 24);
7262 fprintf (f, "\n\t");
7263 }
7264
7265 if (rld[r].out != 0)
7266 {
7267 fprintf (f, "reload_out (%s) = ",
7268 GET_MODE_NAME (rld[r].outmode));
7269 print_inline_rtx (f, rld[r].out, 24);
7270 fprintf (f, "\n\t");
7271 }
7272
7273 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7274
7275 fprintf (f, "%s (opnum = %d)",
7276 reload_when_needed_name[(int) rld[r].when_needed],
7277 rld[r].opnum);
7278
7279 if (rld[r].optional)
7280 fprintf (f, ", optional");
7281
7282 if (rld[r].nongroup)
7283 fprintf (f, ", nongroup");
7284
7285 if (maybe_ne (rld[r].inc, 0))
7286 {
7287 fprintf (f, ", inc by ");
7288 print_dec (rld[r].inc, f, SIGNED);
7289 }
7290
7291 if (rld[r].nocombine)
7292 fprintf (f, ", can't combine");
7293
7294 if (rld[r].secondary_p)
7295 fprintf (f, ", secondary_reload_p");
7296
7297 if (rld[r].in_reg != 0)
7298 {
7299 fprintf (f, "\n\treload_in_reg: ");
7300 print_inline_rtx (f, rld[r].in_reg, 24);
7301 }
7302
7303 if (rld[r].out_reg != 0)
7304 {
7305 fprintf (f, "\n\treload_out_reg: ");
7306 print_inline_rtx (f, rld[r].out_reg, 24);
7307 }
7308
7309 if (rld[r].reg_rtx != 0)
7310 {
7311 fprintf (f, "\n\treload_reg_rtx: ");
7312 print_inline_rtx (f, rld[r].reg_rtx, 24);
7313 }
7314
7315 prefix = "\n\t";
7316 if (rld[r].secondary_in_reload != -1)
7317 {
7318 fprintf (f, "%ssecondary_in_reload = %d",
7319 prefix, rld[r].secondary_in_reload);
7320 prefix = ", ";
7321 }
7322
7323 if (rld[r].secondary_out_reload != -1)
7324 fprintf (f, "%ssecondary_out_reload = %d\n",
7325 prefix, rld[r].secondary_out_reload);
7326
7327 prefix = "\n\t";
7328 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7329 {
7330 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7331 insn_data[rld[r].secondary_in_icode].name);
7332 prefix = ", ";
7333 }
7334
7335 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7336 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7337 insn_data[rld[r].secondary_out_icode].name);
7338
7339 fprintf (f, "\n");
7340 }
7341 }
7342
7343 DEBUG_FUNCTION void
7344 debug_reload (void)
7345 {
7346 debug_reload_to_stream (stderr);
7347 }