]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload.c
2015-10-29 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "tm_p.h"
100 #include "expmed.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "flags.h"
107 #include "alias.h"
108 #include "dojump.h"
109 #include "explow.h"
110 #include "calls.h"
111 #include "varasm.h"
112 #include "stmt.h"
113 #include "expr.h"
114 #include "reload.h"
115 #include "addresses.h"
116 #include "params.h"
117
118 /* True if X is a constant that can be forced into the constant pool.
119 MODE is the mode of the operand, or VOIDmode if not known. */
120 #define CONST_POOL_OK_P(MODE, X) \
121 ((MODE) != VOIDmode \
122 && CONSTANT_P (X) \
123 && GET_CODE (X) != HIGH \
124 && !targetm.cannot_force_const_mem (MODE, X))
125
126 /* True if C is a non-empty register class that has too few registers
127 to be safely used as a reload target class. */
128
129 static inline bool
130 small_register_class_p (reg_class_t rclass)
131 {
132 return (reg_class_size [(int) rclass] == 1
133 || (reg_class_size [(int) rclass] >= 1
134 && targetm.class_likely_spilled_p (rclass)));
135 }
136
137 \f
138 /* All reloads of the current insn are recorded here. See reload.h for
139 comments. */
140 int n_reloads;
141 struct reload rld[MAX_RELOADS];
142
143 /* All the "earlyclobber" operands of the current insn
144 are recorded here. */
145 int n_earlyclobbers;
146 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
147
148 int reload_n_operands;
149
150 /* Replacing reloads.
151
152 If `replace_reloads' is nonzero, then as each reload is recorded
153 an entry is made for it in the table `replacements'.
154 Then later `subst_reloads' can look through that table and
155 perform all the replacements needed. */
156
157 /* Nonzero means record the places to replace. */
158 static int replace_reloads;
159
160 /* Each replacement is recorded with a structure like this. */
161 struct replacement
162 {
163 rtx *where; /* Location to store in */
164 int what; /* which reload this is for */
165 machine_mode mode; /* mode it must have */
166 };
167
168 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
169
170 /* Number of replacements currently recorded. */
171 static int n_replacements;
172
173 /* Used to track what is modified by an operand. */
174 struct decomposition
175 {
176 int reg_flag; /* Nonzero if referencing a register. */
177 int safe; /* Nonzero if this can't conflict with anything. */
178 rtx base; /* Base address for MEM. */
179 HOST_WIDE_INT start; /* Starting offset or register number. */
180 HOST_WIDE_INT end; /* Ending offset or register number. */
181 };
182
183 #ifdef SECONDARY_MEMORY_NEEDED
184
185 /* Save MEMs needed to copy from one class of registers to another. One MEM
186 is used per mode, but normally only one or two modes are ever used.
187
188 We keep two versions, before and after register elimination. The one
189 after register elimination is record separately for each operand. This
190 is done in case the address is not valid to be sure that we separately
191 reload each. */
192
193 static rtx secondary_memlocs[NUM_MACHINE_MODES];
194 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
195 static int secondary_memlocs_elim_used = 0;
196 #endif
197
198 /* The instruction we are doing reloads for;
199 so we can test whether a register dies in it. */
200 static rtx_insn *this_insn;
201
202 /* Nonzero if this instruction is a user-specified asm with operands. */
203 static int this_insn_is_asm;
204
205 /* If hard_regs_live_known is nonzero,
206 we can tell which hard regs are currently live,
207 at least enough to succeed in choosing dummy reloads. */
208 static int hard_regs_live_known;
209
210 /* Indexed by hard reg number,
211 element is nonnegative if hard reg has been spilled.
212 This vector is passed to `find_reloads' as an argument
213 and is not changed here. */
214 static short *static_reload_reg_p;
215
216 /* Set to 1 in subst_reg_equivs if it changes anything. */
217 static int subst_reg_equivs_changed;
218
219 /* On return from push_reload, holds the reload-number for the OUT
220 operand, which can be different for that from the input operand. */
221 static int output_reloadnum;
222
223 /* Compare two RTX's. */
224 #define MATCHES(x, y) \
225 (x == y || (x != 0 && (REG_P (x) \
226 ? REG_P (y) && REGNO (x) == REGNO (y) \
227 : rtx_equal_p (x, y) && ! side_effects_p (x))))
228
229 /* Indicates if two reloads purposes are for similar enough things that we
230 can merge their reloads. */
231 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
232 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
233 || ((when1) == (when2) && (op1) == (op2)) \
234 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
235 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
236 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
237 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
238 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
239
240 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
241 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
242 ((when1) != (when2) \
243 || ! ((op1) == (op2) \
244 || (when1) == RELOAD_FOR_INPUT \
245 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
246 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
247
248 /* If we are going to reload an address, compute the reload type to
249 use. */
250 #define ADDR_TYPE(type) \
251 ((type) == RELOAD_FOR_INPUT_ADDRESS \
252 ? RELOAD_FOR_INPADDR_ADDRESS \
253 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
254 ? RELOAD_FOR_OUTADDR_ADDRESS \
255 : (type)))
256
257 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
258 machine_mode, enum reload_type,
259 enum insn_code *, secondary_reload_info *);
260 static enum reg_class find_valid_class (machine_mode, machine_mode,
261 int, unsigned int);
262 static void push_replacement (rtx *, int, machine_mode);
263 static void dup_replacements (rtx *, rtx *);
264 static void combine_reloads (void);
265 static int find_reusable_reload (rtx *, rtx, enum reg_class,
266 enum reload_type, int, int);
267 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
268 machine_mode, reg_class_t, int, int);
269 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
270 static struct decomposition decompose (rtx);
271 static int immune_p (rtx, rtx, struct decomposition);
272 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
273 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
274 rtx_insn *, int *);
275 static rtx make_memloc (rtx, int);
276 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
277 addr_space_t, rtx *);
278 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
279 int, enum reload_type, int, rtx_insn *);
280 static rtx subst_reg_equivs (rtx, rtx_insn *);
281 static rtx subst_indexed_address (rtx);
282 static void update_auto_inc_notes (rtx_insn *, int, int);
283 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
284 enum rtx_code, enum rtx_code, rtx *,
285 int, enum reload_type,int, rtx_insn *);
286 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
287 machine_mode, int,
288 enum reload_type, int);
289 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
290 int, rtx_insn *, int *);
291 static void copy_replacements_1 (rtx *, rtx *, int);
292 static int find_inc_amount (rtx, rtx);
293 static int refers_to_mem_for_reload_p (rtx);
294 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
295 rtx, rtx *);
296
297 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
298 list yet. */
299
300 static void
301 push_reg_equiv_alt_mem (int regno, rtx mem)
302 {
303 rtx it;
304
305 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
306 if (rtx_equal_p (XEXP (it, 0), mem))
307 return;
308
309 reg_equiv_alt_mem_list (regno)
310 = alloc_EXPR_LIST (REG_EQUIV, mem,
311 reg_equiv_alt_mem_list (regno));
312 }
313 \f
314 /* Determine if any secondary reloads are needed for loading (if IN_P is
315 nonzero) or storing (if IN_P is zero) X to or from a reload register of
316 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
317 are needed, push them.
318
319 Return the reload number of the secondary reload we made, or -1 if
320 we didn't need one. *PICODE is set to the insn_code to use if we do
321 need a secondary reload. */
322
323 static int
324 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
325 enum reg_class reload_class,
326 machine_mode reload_mode, enum reload_type type,
327 enum insn_code *picode, secondary_reload_info *prev_sri)
328 {
329 enum reg_class rclass = NO_REGS;
330 enum reg_class scratch_class;
331 machine_mode mode = reload_mode;
332 enum insn_code icode = CODE_FOR_nothing;
333 enum insn_code t_icode = CODE_FOR_nothing;
334 enum reload_type secondary_type;
335 int s_reload, t_reload = -1;
336 const char *scratch_constraint;
337 secondary_reload_info sri;
338
339 if (type == RELOAD_FOR_INPUT_ADDRESS
340 || type == RELOAD_FOR_OUTPUT_ADDRESS
341 || type == RELOAD_FOR_INPADDR_ADDRESS
342 || type == RELOAD_FOR_OUTADDR_ADDRESS)
343 secondary_type = type;
344 else
345 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
346
347 *picode = CODE_FOR_nothing;
348
349 /* If X is a paradoxical SUBREG, use the inner value to determine both the
350 mode and object being reloaded. */
351 if (paradoxical_subreg_p (x))
352 {
353 x = SUBREG_REG (x);
354 reload_mode = GET_MODE (x);
355 }
356
357 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
358 is still a pseudo-register by now, it *must* have an equivalent MEM
359 but we don't want to assume that), use that equivalent when seeing if
360 a secondary reload is needed since whether or not a reload is needed
361 might be sensitive to the form of the MEM. */
362
363 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
364 && reg_equiv_mem (REGNO (x)))
365 x = reg_equiv_mem (REGNO (x));
366
367 sri.icode = CODE_FOR_nothing;
368 sri.prev_sri = prev_sri;
369 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
370 reload_mode, &sri);
371 icode = (enum insn_code) sri.icode;
372
373 /* If we don't need any secondary registers, done. */
374 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
375 return -1;
376
377 if (rclass != NO_REGS)
378 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
379 reload_mode, type, &t_icode, &sri);
380
381 /* If we will be using an insn, the secondary reload is for a
382 scratch register. */
383
384 if (icode != CODE_FOR_nothing)
385 {
386 /* If IN_P is nonzero, the reload register will be the output in
387 operand 0. If IN_P is zero, the reload register will be the input
388 in operand 1. Outputs should have an initial "=", which we must
389 skip. */
390
391 /* ??? It would be useful to be able to handle only two, or more than
392 three, operands, but for now we can only handle the case of having
393 exactly three: output, input and one temp/scratch. */
394 gcc_assert (insn_data[(int) icode].n_operands == 3);
395
396 /* ??? We currently have no way to represent a reload that needs
397 an icode to reload from an intermediate tertiary reload register.
398 We should probably have a new field in struct reload to tag a
399 chain of scratch operand reloads onto. */
400 gcc_assert (rclass == NO_REGS);
401
402 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
403 gcc_assert (*scratch_constraint == '=');
404 scratch_constraint++;
405 if (*scratch_constraint == '&')
406 scratch_constraint++;
407 scratch_class = (reg_class_for_constraint
408 (lookup_constraint (scratch_constraint)));
409
410 rclass = scratch_class;
411 mode = insn_data[(int) icode].operand[2].mode;
412 }
413
414 /* This case isn't valid, so fail. Reload is allowed to use the same
415 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
416 in the case of a secondary register, we actually need two different
417 registers for correct code. We fail here to prevent the possibility of
418 silently generating incorrect code later.
419
420 The convention is that secondary input reloads are valid only if the
421 secondary_class is different from class. If you have such a case, you
422 can not use secondary reloads, you must work around the problem some
423 other way.
424
425 Allow this when a reload_in/out pattern is being used. I.e. assume
426 that the generated code handles this case. */
427
428 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
429 || t_icode != CODE_FOR_nothing);
430
431 /* See if we can reuse an existing secondary reload. */
432 for (s_reload = 0; s_reload < n_reloads; s_reload++)
433 if (rld[s_reload].secondary_p
434 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
435 || reg_class_subset_p (rld[s_reload].rclass, rclass))
436 && ((in_p && rld[s_reload].inmode == mode)
437 || (! in_p && rld[s_reload].outmode == mode))
438 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
439 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
440 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
441 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
442 && (small_register_class_p (rclass)
443 || targetm.small_register_classes_for_mode_p (VOIDmode))
444 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
445 opnum, rld[s_reload].opnum))
446 {
447 if (in_p)
448 rld[s_reload].inmode = mode;
449 if (! in_p)
450 rld[s_reload].outmode = mode;
451
452 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
453 rld[s_reload].rclass = rclass;
454
455 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
456 rld[s_reload].optional &= optional;
457 rld[s_reload].secondary_p = 1;
458 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
459 opnum, rld[s_reload].opnum))
460 rld[s_reload].when_needed = RELOAD_OTHER;
461
462 break;
463 }
464
465 if (s_reload == n_reloads)
466 {
467 #ifdef SECONDARY_MEMORY_NEEDED
468 /* If we need a memory location to copy between the two reload regs,
469 set it up now. Note that we do the input case before making
470 the reload and the output case after. This is due to the
471 way reloads are output. */
472
473 if (in_p && icode == CODE_FOR_nothing
474 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
475 {
476 get_secondary_mem (x, reload_mode, opnum, type);
477
478 /* We may have just added new reloads. Make sure we add
479 the new reload at the end. */
480 s_reload = n_reloads;
481 }
482 #endif
483
484 /* We need to make a new secondary reload for this register class. */
485 rld[s_reload].in = rld[s_reload].out = 0;
486 rld[s_reload].rclass = rclass;
487
488 rld[s_reload].inmode = in_p ? mode : VOIDmode;
489 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
490 rld[s_reload].reg_rtx = 0;
491 rld[s_reload].optional = optional;
492 rld[s_reload].inc = 0;
493 /* Maybe we could combine these, but it seems too tricky. */
494 rld[s_reload].nocombine = 1;
495 rld[s_reload].in_reg = 0;
496 rld[s_reload].out_reg = 0;
497 rld[s_reload].opnum = opnum;
498 rld[s_reload].when_needed = secondary_type;
499 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
500 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
501 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
502 rld[s_reload].secondary_out_icode
503 = ! in_p ? t_icode : CODE_FOR_nothing;
504 rld[s_reload].secondary_p = 1;
505
506 n_reloads++;
507
508 #ifdef SECONDARY_MEMORY_NEEDED
509 if (! in_p && icode == CODE_FOR_nothing
510 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
511 get_secondary_mem (x, mode, opnum, type);
512 #endif
513 }
514
515 *picode = icode;
516 return s_reload;
517 }
518
519 /* If a secondary reload is needed, return its class. If both an intermediate
520 register and a scratch register is needed, we return the class of the
521 intermediate register. */
522 reg_class_t
523 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
524 rtx x)
525 {
526 enum insn_code icode;
527 secondary_reload_info sri;
528
529 sri.icode = CODE_FOR_nothing;
530 sri.prev_sri = NULL;
531 rclass
532 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
533 icode = (enum insn_code) sri.icode;
534
535 /* If there are no secondary reloads at all, we return NO_REGS.
536 If an intermediate register is needed, we return its class. */
537 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
538 return rclass;
539
540 /* No intermediate register is needed, but we have a special reload
541 pattern, which we assume for now needs a scratch register. */
542 return scratch_reload_class (icode);
543 }
544
545 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
546 three operands, verify that operand 2 is an output operand, and return
547 its register class.
548 ??? We'd like to be able to handle any pattern with at least 2 operands,
549 for zero or more scratch registers, but that needs more infrastructure. */
550 enum reg_class
551 scratch_reload_class (enum insn_code icode)
552 {
553 const char *scratch_constraint;
554 enum reg_class rclass;
555
556 gcc_assert (insn_data[(int) icode].n_operands == 3);
557 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
558 gcc_assert (*scratch_constraint == '=');
559 scratch_constraint++;
560 if (*scratch_constraint == '&')
561 scratch_constraint++;
562 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
563 gcc_assert (rclass != NO_REGS);
564 return rclass;
565 }
566 \f
567 #ifdef SECONDARY_MEMORY_NEEDED
568
569 /* Return a memory location that will be used to copy X in mode MODE.
570 If we haven't already made a location for this mode in this insn,
571 call find_reloads_address on the location being returned. */
572
573 rtx
574 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
575 int opnum, enum reload_type type)
576 {
577 rtx loc;
578 int mem_valid;
579
580 /* By default, if MODE is narrower than a word, widen it to a word.
581 This is required because most machines that require these memory
582 locations do not support short load and stores from all registers
583 (e.g., FP registers). */
584
585 #ifdef SECONDARY_MEMORY_NEEDED_MODE
586 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
587 #else
588 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
589 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
590 #endif
591
592 /* If we already have made a MEM for this operand in MODE, return it. */
593 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
594 return secondary_memlocs_elim[(int) mode][opnum];
595
596 /* If this is the first time we've tried to get a MEM for this mode,
597 allocate a new one. `something_changed' in reload will get set
598 by noticing that the frame size has changed. */
599
600 if (secondary_memlocs[(int) mode] == 0)
601 {
602 #ifdef SECONDARY_MEMORY_NEEDED_RTX
603 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
604 #else
605 secondary_memlocs[(int) mode]
606 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
607 #endif
608 }
609
610 /* Get a version of the address doing any eliminations needed. If that
611 didn't give us a new MEM, make a new one if it isn't valid. */
612
613 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
614 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
615 MEM_ADDR_SPACE (loc));
616
617 if (! mem_valid && loc == secondary_memlocs[(int) mode])
618 loc = copy_rtx (loc);
619
620 /* The only time the call below will do anything is if the stack
621 offset is too large. In that case IND_LEVELS doesn't matter, so we
622 can just pass a zero. Adjust the type to be the address of the
623 corresponding object. If the address was valid, save the eliminated
624 address. If it wasn't valid, we need to make a reload each time, so
625 don't save it. */
626
627 if (! mem_valid)
628 {
629 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
630 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
631 : RELOAD_OTHER);
632
633 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
634 opnum, type, 0, 0);
635 }
636
637 secondary_memlocs_elim[(int) mode][opnum] = loc;
638 if (secondary_memlocs_elim_used <= (int)mode)
639 secondary_memlocs_elim_used = (int)mode + 1;
640 return loc;
641 }
642
643 /* Clear any secondary memory locations we've made. */
644
645 void
646 clear_secondary_mem (void)
647 {
648 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
649 }
650 #endif /* SECONDARY_MEMORY_NEEDED */
651 \f
652
653 /* Find the largest class which has at least one register valid in
654 mode INNER, and which for every such register, that register number
655 plus N is also valid in OUTER (if in range) and is cheap to move
656 into REGNO. Such a class must exist. */
657
658 static enum reg_class
659 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
660 machine_mode inner ATTRIBUTE_UNUSED, int n,
661 unsigned int dest_regno ATTRIBUTE_UNUSED)
662 {
663 int best_cost = -1;
664 int rclass;
665 int regno;
666 enum reg_class best_class = NO_REGS;
667 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
668 unsigned int best_size = 0;
669 int cost;
670
671 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
672 {
673 int bad = 0;
674 int good = 0;
675 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
676 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
677 {
678 if (HARD_REGNO_MODE_OK (regno, inner))
679 {
680 good = 1;
681 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
682 && ! HARD_REGNO_MODE_OK (regno + n, outer))
683 bad = 1;
684 }
685 }
686
687 if (bad || !good)
688 continue;
689 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
690
691 if ((reg_class_size[rclass] > best_size
692 && (best_cost < 0 || best_cost >= cost))
693 || best_cost > cost)
694 {
695 best_class = (enum reg_class) rclass;
696 best_size = reg_class_size[rclass];
697 best_cost = register_move_cost (outer, (enum reg_class) rclass,
698 dest_class);
699 }
700 }
701
702 gcc_assert (best_size != 0);
703
704 return best_class;
705 }
706
707 /* We are trying to reload a subreg of something that is not a register.
708 Find the largest class which contains only registers valid in
709 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
710 which we would eventually like to obtain the object. */
711
712 static enum reg_class
713 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
714 machine_mode mode ATTRIBUTE_UNUSED,
715 enum reg_class dest_class ATTRIBUTE_UNUSED)
716 {
717 int best_cost = -1;
718 int rclass;
719 int regno;
720 enum reg_class best_class = NO_REGS;
721 unsigned int best_size = 0;
722 int cost;
723
724 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
725 {
726 int bad = 0;
727 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
728 {
729 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
730 && !HARD_REGNO_MODE_OK (regno, mode))
731 bad = 1;
732 }
733
734 if (bad)
735 continue;
736
737 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
738
739 if ((reg_class_size[rclass] > best_size
740 && (best_cost < 0 || best_cost >= cost))
741 || best_cost > cost)
742 {
743 best_class = (enum reg_class) rclass;
744 best_size = reg_class_size[rclass];
745 best_cost = register_move_cost (outer, (enum reg_class) rclass,
746 dest_class);
747 }
748 }
749
750 gcc_assert (best_size != 0);
751
752 #ifdef LIMIT_RELOAD_CLASS
753 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
754 #endif
755 return best_class;
756 }
757 \f
758 /* Return the number of a previously made reload that can be combined with
759 a new one, or n_reloads if none of the existing reloads can be used.
760 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
761 push_reload, they determine the kind of the new reload that we try to
762 combine. P_IN points to the corresponding value of IN, which can be
763 modified by this function.
764 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
765
766 static int
767 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
768 enum reload_type type, int opnum, int dont_share)
769 {
770 rtx in = *p_in;
771 int i;
772 /* We can't merge two reloads if the output of either one is
773 earlyclobbered. */
774
775 if (earlyclobber_operand_p (out))
776 return n_reloads;
777
778 /* We can use an existing reload if the class is right
779 and at least one of IN and OUT is a match
780 and the other is at worst neutral.
781 (A zero compared against anything is neutral.)
782
783 For targets with small register classes, don't use existing reloads
784 unless they are for the same thing since that can cause us to need
785 more reload registers than we otherwise would. */
786
787 for (i = 0; i < n_reloads; i++)
788 if ((reg_class_subset_p (rclass, rld[i].rclass)
789 || reg_class_subset_p (rld[i].rclass, rclass))
790 /* If the existing reload has a register, it must fit our class. */
791 && (rld[i].reg_rtx == 0
792 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
793 true_regnum (rld[i].reg_rtx)))
794 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
795 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
796 || (out != 0 && MATCHES (rld[i].out, out)
797 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
798 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
799 && (small_register_class_p (rclass)
800 || targetm.small_register_classes_for_mode_p (VOIDmode))
801 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
802 return i;
803
804 /* Reloading a plain reg for input can match a reload to postincrement
805 that reg, since the postincrement's value is the right value.
806 Likewise, it can match a preincrement reload, since we regard
807 the preincrementation as happening before any ref in this insn
808 to that register. */
809 for (i = 0; i < n_reloads; i++)
810 if ((reg_class_subset_p (rclass, rld[i].rclass)
811 || reg_class_subset_p (rld[i].rclass, rclass))
812 /* If the existing reload has a register, it must fit our
813 class. */
814 && (rld[i].reg_rtx == 0
815 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
816 true_regnum (rld[i].reg_rtx)))
817 && out == 0 && rld[i].out == 0 && rld[i].in != 0
818 && ((REG_P (in)
819 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
820 && MATCHES (XEXP (rld[i].in, 0), in))
821 || (REG_P (rld[i].in)
822 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
823 && MATCHES (XEXP (in, 0), rld[i].in)))
824 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
825 && (small_register_class_p (rclass)
826 || targetm.small_register_classes_for_mode_p (VOIDmode))
827 && MERGABLE_RELOADS (type, rld[i].when_needed,
828 opnum, rld[i].opnum))
829 {
830 /* Make sure reload_in ultimately has the increment,
831 not the plain register. */
832 if (REG_P (in))
833 *p_in = rld[i].in;
834 return i;
835 }
836 return n_reloads;
837 }
838
839 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
840 expression. MODE is the mode that X will be used in. OUTPUT is true if
841 the function is invoked for the output part of an enclosing reload. */
842
843 static bool
844 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
845 {
846 rtx inner;
847
848 /* Only SUBREGs are problematical. */
849 if (GET_CODE (x) != SUBREG)
850 return false;
851
852 inner = SUBREG_REG (x);
853
854 /* If INNER is a constant or PLUS, then INNER will need reloading. */
855 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
856 return true;
857
858 /* If INNER is not a hard register, then INNER will not need reloading. */
859 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
860 return false;
861
862 /* If INNER is not ok for MODE, then INNER will need reloading. */
863 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
864 return true;
865
866 /* If this is for an output, and the outer part is a word or smaller,
867 INNER is larger than a word and the number of registers in INNER is
868 not the same as the number of words in INNER, then INNER will need
869 reloading (with an in-out reload). */
870 return (output
871 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
872 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
873 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
874 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
875 }
876
877 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
878 requiring an extra reload register. The caller has already found that
879 IN contains some reference to REGNO, so check that we can produce the
880 new value in a single step. E.g. if we have
881 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
882 instruction that adds one to a register, this should succeed.
883 However, if we have something like
884 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
885 needs to be loaded into a register first, we need a separate reload
886 register.
887 Such PLUS reloads are generated by find_reload_address_part.
888 The out-of-range PLUS expressions are usually introduced in the instruction
889 patterns by register elimination and substituting pseudos without a home
890 by their function-invariant equivalences. */
891 static int
892 can_reload_into (rtx in, int regno, machine_mode mode)
893 {
894 rtx dst;
895 rtx_insn *test_insn;
896 int r = 0;
897 struct recog_data_d save_recog_data;
898
899 /* For matching constraints, we often get notional input reloads where
900 we want to use the original register as the reload register. I.e.
901 technically this is a non-optional input-output reload, but IN is
902 already a valid register, and has been chosen as the reload register.
903 Speed this up, since it trivially works. */
904 if (REG_P (in))
905 return 1;
906
907 /* To test MEMs properly, we'd have to take into account all the reloads
908 that are already scheduled, which can become quite complicated.
909 And since we've already handled address reloads for this MEM, it
910 should always succeed anyway. */
911 if (MEM_P (in))
912 return 1;
913
914 /* If we can make a simple SET insn that does the job, everything should
915 be fine. */
916 dst = gen_rtx_REG (mode, regno);
917 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
918 save_recog_data = recog_data;
919 if (recog_memoized (test_insn) >= 0)
920 {
921 extract_insn (test_insn);
922 r = constrain_operands (1, get_enabled_alternatives (test_insn));
923 }
924 recog_data = save_recog_data;
925 return r;
926 }
927
928 /* Record one reload that needs to be performed.
929 IN is an rtx saying where the data are to be found before this instruction.
930 OUT says where they must be stored after the instruction.
931 (IN is zero for data not read, and OUT is zero for data not written.)
932 INLOC and OUTLOC point to the places in the instructions where
933 IN and OUT were found.
934 If IN and OUT are both nonzero, it means the same register must be used
935 to reload both IN and OUT.
936
937 RCLASS is a register class required for the reloaded data.
938 INMODE is the machine mode that the instruction requires
939 for the reg that replaces IN and OUTMODE is likewise for OUT.
940
941 If IN is zero, then OUT's location and mode should be passed as
942 INLOC and INMODE.
943
944 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
945
946 OPTIONAL nonzero means this reload does not need to be performed:
947 it can be discarded if that is more convenient.
948
949 OPNUM and TYPE say what the purpose of this reload is.
950
951 The return value is the reload-number for this reload.
952
953 If both IN and OUT are nonzero, in some rare cases we might
954 want to make two separate reloads. (Actually we never do this now.)
955 Therefore, the reload-number for OUT is stored in
956 output_reloadnum when we return; the return value applies to IN.
957 Usually (presently always), when IN and OUT are nonzero,
958 the two reload-numbers are equal, but the caller should be careful to
959 distinguish them. */
960
961 int
962 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
963 enum reg_class rclass, machine_mode inmode,
964 machine_mode outmode, int strict_low, int optional,
965 int opnum, enum reload_type type)
966 {
967 int i;
968 int dont_share = 0;
969 int dont_remove_subreg = 0;
970 #ifdef LIMIT_RELOAD_CLASS
971 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
972 #endif
973 int secondary_in_reload = -1, secondary_out_reload = -1;
974 enum insn_code secondary_in_icode = CODE_FOR_nothing;
975 enum insn_code secondary_out_icode = CODE_FOR_nothing;
976 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
977 subreg_in_class = NO_REGS;
978
979 /* INMODE and/or OUTMODE could be VOIDmode if no mode
980 has been specified for the operand. In that case,
981 use the operand's mode as the mode to reload. */
982 if (inmode == VOIDmode && in != 0)
983 inmode = GET_MODE (in);
984 if (outmode == VOIDmode && out != 0)
985 outmode = GET_MODE (out);
986
987 /* If find_reloads and friends until now missed to replace a pseudo
988 with a constant of reg_equiv_constant something went wrong
989 beforehand.
990 Note that it can't simply be done here if we missed it earlier
991 since the constant might need to be pushed into the literal pool
992 and the resulting memref would probably need further
993 reloading. */
994 if (in != 0 && REG_P (in))
995 {
996 int regno = REGNO (in);
997
998 gcc_assert (regno < FIRST_PSEUDO_REGISTER
999 || reg_renumber[regno] >= 0
1000 || reg_equiv_constant (regno) == NULL_RTX);
1001 }
1002
1003 /* reg_equiv_constant only contains constants which are obviously
1004 not appropriate as destination. So if we would need to replace
1005 the destination pseudo with a constant we are in real
1006 trouble. */
1007 if (out != 0 && REG_P (out))
1008 {
1009 int regno = REGNO (out);
1010
1011 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1012 || reg_renumber[regno] >= 0
1013 || reg_equiv_constant (regno) == NULL_RTX);
1014 }
1015
1016 /* If we have a read-write operand with an address side-effect,
1017 change either IN or OUT so the side-effect happens only once. */
1018 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1019 switch (GET_CODE (XEXP (in, 0)))
1020 {
1021 case POST_INC: case POST_DEC: case POST_MODIFY:
1022 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1023 break;
1024
1025 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1026 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1027 break;
1028
1029 default:
1030 break;
1031 }
1032
1033 /* If we are reloading a (SUBREG constant ...), really reload just the
1034 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1035 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1036 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1037 register is a pseudo, also reload the inside expression.
1038 For machines that extend byte loads, do this for any SUBREG of a pseudo
1039 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1040 M2 is an integral mode that gets extended when loaded.
1041 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1042 where either M1 is not valid for R or M2 is wider than a word but we
1043 only need one register to store an M2-sized quantity in R.
1044 (However, if OUT is nonzero, we need to reload the reg *and*
1045 the subreg, so do nothing here, and let following statement handle it.)
1046
1047 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1048 we can't handle it here because CONST_INT does not indicate a mode.
1049
1050 Similarly, we must reload the inside expression if we have a
1051 STRICT_LOW_PART (presumably, in == out in this case).
1052
1053 Also reload the inner expression if it does not require a secondary
1054 reload but the SUBREG does.
1055
1056 Finally, reload the inner expression if it is a register that is in
1057 the class whose registers cannot be referenced in a different size
1058 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1059 cannot reload just the inside since we might end up with the wrong
1060 register class. But if it is inside a STRICT_LOW_PART, we have
1061 no choice, so we hope we do get the right register class there. */
1062
1063 if (in != 0 && GET_CODE (in) == SUBREG
1064 && (subreg_lowpart_p (in) || strict_low)
1065 #ifdef CANNOT_CHANGE_MODE_CLASS
1066 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1067 #endif
1068 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1069 && (CONSTANT_P (SUBREG_REG (in))
1070 || GET_CODE (SUBREG_REG (in)) == PLUS
1071 || strict_low
1072 || (((REG_P (SUBREG_REG (in))
1073 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1074 || MEM_P (SUBREG_REG (in)))
1075 && ((GET_MODE_PRECISION (inmode)
1076 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1077 #ifdef LOAD_EXTEND_OP
1078 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1079 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1080 <= UNITS_PER_WORD)
1081 && (GET_MODE_PRECISION (inmode)
1082 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1083 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1084 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1085 #endif
1086 #if WORD_REGISTER_OPERATIONS
1087 || ((GET_MODE_PRECISION (inmode)
1088 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1089 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1090 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1091 / UNITS_PER_WORD)))
1092 #endif
1093 ))
1094 || (REG_P (SUBREG_REG (in))
1095 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1096 /* The case where out is nonzero
1097 is handled differently in the following statement. */
1098 && (out == 0 || subreg_lowpart_p (in))
1099 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1100 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1101 > UNITS_PER_WORD)
1102 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1103 / UNITS_PER_WORD)
1104 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1105 [GET_MODE (SUBREG_REG (in))]))
1106 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1107 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1108 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1109 SUBREG_REG (in))
1110 == NO_REGS))
1111 #ifdef CANNOT_CHANGE_MODE_CLASS
1112 || (REG_P (SUBREG_REG (in))
1113 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1114 && REG_CANNOT_CHANGE_MODE_P
1115 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1116 #endif
1117 ))
1118 {
1119 #ifdef LIMIT_RELOAD_CLASS
1120 in_subreg_loc = inloc;
1121 #endif
1122 inloc = &SUBREG_REG (in);
1123 in = *inloc;
1124 #if ! defined (LOAD_EXTEND_OP)
1125 if (!WORD_REGISTER_OPERATIONS
1126 && MEM_P (in))
1127 /* This is supposed to happen only for paradoxical subregs made by
1128 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1129 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1130 #endif
1131 inmode = GET_MODE (in);
1132 }
1133
1134 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1135 where M1 is not valid for R if it was not handled by the code above.
1136
1137 Similar issue for (SUBREG constant ...) if it was not handled by the
1138 code above. This can happen if SUBREG_BYTE != 0.
1139
1140 However, we must reload the inner reg *as well as* the subreg in
1141 that case. */
1142
1143 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1144 {
1145 if (REG_P (SUBREG_REG (in)))
1146 subreg_in_class
1147 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1148 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1149 GET_MODE (SUBREG_REG (in)),
1150 SUBREG_BYTE (in),
1151 GET_MODE (in)),
1152 REGNO (SUBREG_REG (in)));
1153 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1154 subreg_in_class = find_valid_class_1 (inmode,
1155 GET_MODE (SUBREG_REG (in)),
1156 rclass);
1157
1158 /* This relies on the fact that emit_reload_insns outputs the
1159 instructions for input reloads of type RELOAD_OTHER in the same
1160 order as the reloads. Thus if the outer reload is also of type
1161 RELOAD_OTHER, we are guaranteed that this inner reload will be
1162 output before the outer reload. */
1163 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1164 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1165 dont_remove_subreg = 1;
1166 }
1167
1168 /* Similarly for paradoxical and problematical SUBREGs on the output.
1169 Note that there is no reason we need worry about the previous value
1170 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1171 entitled to clobber it all (except in the case of a word mode subreg
1172 or of a STRICT_LOW_PART, in that latter case the constraint should
1173 label it input-output.) */
1174 if (out != 0 && GET_CODE (out) == SUBREG
1175 && (subreg_lowpart_p (out) || strict_low)
1176 #ifdef CANNOT_CHANGE_MODE_CLASS
1177 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1178 #endif
1179 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1180 && (CONSTANT_P (SUBREG_REG (out))
1181 || strict_low
1182 || (((REG_P (SUBREG_REG (out))
1183 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1184 || MEM_P (SUBREG_REG (out)))
1185 && ((GET_MODE_PRECISION (outmode)
1186 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1187 #if WORD_REGISTER_OPERATIONS
1188 || ((GET_MODE_PRECISION (outmode)
1189 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1190 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1191 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1192 / UNITS_PER_WORD)))
1193 #endif
1194 ))
1195 || (REG_P (SUBREG_REG (out))
1196 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1197 /* The case of a word mode subreg
1198 is handled differently in the following statement. */
1199 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1200 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1201 > UNITS_PER_WORD))
1202 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1203 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1204 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1205 SUBREG_REG (out))
1206 == NO_REGS))
1207 #ifdef CANNOT_CHANGE_MODE_CLASS
1208 || (REG_P (SUBREG_REG (out))
1209 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1210 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1211 GET_MODE (SUBREG_REG (out)),
1212 outmode))
1213 #endif
1214 ))
1215 {
1216 #ifdef LIMIT_RELOAD_CLASS
1217 out_subreg_loc = outloc;
1218 #endif
1219 outloc = &SUBREG_REG (out);
1220 out = *outloc;
1221 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1222 || GET_MODE_SIZE (GET_MODE (out))
1223 <= GET_MODE_SIZE (outmode));
1224 outmode = GET_MODE (out);
1225 }
1226
1227 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1228 where either M1 is not valid for R or M2 is wider than a word but we
1229 only need one register to store an M2-sized quantity in R.
1230
1231 However, we must reload the inner reg *as well as* the subreg in
1232 that case and the inner reg is an in-out reload. */
1233
1234 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1235 {
1236 enum reg_class in_out_class
1237 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1238 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1239 GET_MODE (SUBREG_REG (out)),
1240 SUBREG_BYTE (out),
1241 GET_MODE (out)),
1242 REGNO (SUBREG_REG (out)));
1243
1244 /* This relies on the fact that emit_reload_insns outputs the
1245 instructions for output reloads of type RELOAD_OTHER in reverse
1246 order of the reloads. Thus if the outer reload is also of type
1247 RELOAD_OTHER, we are guaranteed that this inner reload will be
1248 output after the outer reload. */
1249 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1250 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1251 0, 0, opnum, RELOAD_OTHER);
1252 dont_remove_subreg = 1;
1253 }
1254
1255 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1256 if (in != 0 && out != 0 && MEM_P (out)
1257 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1258 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1259 dont_share = 1;
1260
1261 /* If IN is a SUBREG of a hard register, make a new REG. This
1262 simplifies some of the cases below. */
1263
1264 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1265 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1266 && ! dont_remove_subreg)
1267 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1268
1269 /* Similarly for OUT. */
1270 if (out != 0 && GET_CODE (out) == SUBREG
1271 && REG_P (SUBREG_REG (out))
1272 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1273 && ! dont_remove_subreg)
1274 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1275
1276 /* Narrow down the class of register wanted if that is
1277 desirable on this machine for efficiency. */
1278 {
1279 reg_class_t preferred_class = rclass;
1280
1281 if (in != 0)
1282 preferred_class = targetm.preferred_reload_class (in, rclass);
1283
1284 /* Output reloads may need analogous treatment, different in detail. */
1285 if (out != 0)
1286 preferred_class
1287 = targetm.preferred_output_reload_class (out, preferred_class);
1288
1289 /* Discard what the target said if we cannot do it. */
1290 if (preferred_class != NO_REGS
1291 || (optional && type == RELOAD_FOR_OUTPUT))
1292 rclass = (enum reg_class) preferred_class;
1293 }
1294
1295 /* Make sure we use a class that can handle the actual pseudo
1296 inside any subreg. For example, on the 386, QImode regs
1297 can appear within SImode subregs. Although GENERAL_REGS
1298 can handle SImode, QImode needs a smaller class. */
1299 #ifdef LIMIT_RELOAD_CLASS
1300 if (in_subreg_loc)
1301 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1302 else if (in != 0 && GET_CODE (in) == SUBREG)
1303 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1304
1305 if (out_subreg_loc)
1306 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1307 if (out != 0 && GET_CODE (out) == SUBREG)
1308 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1309 #endif
1310
1311 /* Verify that this class is at least possible for the mode that
1312 is specified. */
1313 if (this_insn_is_asm)
1314 {
1315 machine_mode mode;
1316 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1317 mode = inmode;
1318 else
1319 mode = outmode;
1320 if (mode == VOIDmode)
1321 {
1322 error_for_asm (this_insn, "cannot reload integer constant "
1323 "operand in %<asm%>");
1324 mode = word_mode;
1325 if (in != 0)
1326 inmode = word_mode;
1327 if (out != 0)
1328 outmode = word_mode;
1329 }
1330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1331 if (HARD_REGNO_MODE_OK (i, mode)
1332 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1333 break;
1334 if (i == FIRST_PSEUDO_REGISTER)
1335 {
1336 error_for_asm (this_insn, "impossible register constraint "
1337 "in %<asm%>");
1338 /* Avoid further trouble with this insn. */
1339 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1340 /* We used to continue here setting class to ALL_REGS, but it triggers
1341 sanity check on i386 for:
1342 void foo(long double d)
1343 {
1344 asm("" :: "a" (d));
1345 }
1346 Returning zero here ought to be safe as we take care in
1347 find_reloads to not process the reloads when instruction was
1348 replaced by USE. */
1349
1350 return 0;
1351 }
1352 }
1353
1354 /* Optional output reloads are always OK even if we have no register class,
1355 since the function of these reloads is only to have spill_reg_store etc.
1356 set, so that the storing insn can be deleted later. */
1357 gcc_assert (rclass != NO_REGS
1358 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1359
1360 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1361
1362 if (i == n_reloads)
1363 {
1364 /* See if we need a secondary reload register to move between CLASS
1365 and IN or CLASS and OUT. Get the icode and push any required reloads
1366 needed for each of them if so. */
1367
1368 if (in != 0)
1369 secondary_in_reload
1370 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1371 &secondary_in_icode, NULL);
1372 if (out != 0 && GET_CODE (out) != SCRATCH)
1373 secondary_out_reload
1374 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1375 type, &secondary_out_icode, NULL);
1376
1377 /* We found no existing reload suitable for re-use.
1378 So add an additional reload. */
1379
1380 #ifdef SECONDARY_MEMORY_NEEDED
1381 if (subreg_in_class == NO_REGS
1382 && in != 0
1383 && (REG_P (in)
1384 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1385 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1386 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1387 /* If a memory location is needed for the copy, make one. */
1388 if (subreg_in_class != NO_REGS
1389 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1390 get_secondary_mem (in, inmode, opnum, type);
1391 #endif
1392
1393 i = n_reloads;
1394 rld[i].in = in;
1395 rld[i].out = out;
1396 rld[i].rclass = rclass;
1397 rld[i].inmode = inmode;
1398 rld[i].outmode = outmode;
1399 rld[i].reg_rtx = 0;
1400 rld[i].optional = optional;
1401 rld[i].inc = 0;
1402 rld[i].nocombine = 0;
1403 rld[i].in_reg = inloc ? *inloc : 0;
1404 rld[i].out_reg = outloc ? *outloc : 0;
1405 rld[i].opnum = opnum;
1406 rld[i].when_needed = type;
1407 rld[i].secondary_in_reload = secondary_in_reload;
1408 rld[i].secondary_out_reload = secondary_out_reload;
1409 rld[i].secondary_in_icode = secondary_in_icode;
1410 rld[i].secondary_out_icode = secondary_out_icode;
1411 rld[i].secondary_p = 0;
1412
1413 n_reloads++;
1414
1415 #ifdef SECONDARY_MEMORY_NEEDED
1416 if (out != 0
1417 && (REG_P (out)
1418 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1419 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1420 && SECONDARY_MEMORY_NEEDED (rclass,
1421 REGNO_REG_CLASS (reg_or_subregno (out)),
1422 outmode))
1423 get_secondary_mem (out, outmode, opnum, type);
1424 #endif
1425 }
1426 else
1427 {
1428 /* We are reusing an existing reload,
1429 but we may have additional information for it.
1430 For example, we may now have both IN and OUT
1431 while the old one may have just one of them. */
1432
1433 /* The modes can be different. If they are, we want to reload in
1434 the larger mode, so that the value is valid for both modes. */
1435 if (inmode != VOIDmode
1436 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1437 rld[i].inmode = inmode;
1438 if (outmode != VOIDmode
1439 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1440 rld[i].outmode = outmode;
1441 if (in != 0)
1442 {
1443 rtx in_reg = inloc ? *inloc : 0;
1444 /* If we merge reloads for two distinct rtl expressions that
1445 are identical in content, there might be duplicate address
1446 reloads. Remove the extra set now, so that if we later find
1447 that we can inherit this reload, we can get rid of the
1448 address reloads altogether.
1449
1450 Do not do this if both reloads are optional since the result
1451 would be an optional reload which could potentially leave
1452 unresolved address replacements.
1453
1454 It is not sufficient to call transfer_replacements since
1455 choose_reload_regs will remove the replacements for address
1456 reloads of inherited reloads which results in the same
1457 problem. */
1458 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1459 && ! (rld[i].optional && optional))
1460 {
1461 /* We must keep the address reload with the lower operand
1462 number alive. */
1463 if (opnum > rld[i].opnum)
1464 {
1465 remove_address_replacements (in);
1466 in = rld[i].in;
1467 in_reg = rld[i].in_reg;
1468 }
1469 else
1470 remove_address_replacements (rld[i].in);
1471 }
1472 /* When emitting reloads we don't necessarily look at the in-
1473 and outmode, but also directly at the operands (in and out).
1474 So we can't simply overwrite them with whatever we have found
1475 for this (to-be-merged) reload, we have to "merge" that too.
1476 Reusing another reload already verified that we deal with the
1477 same operands, just possibly in different modes. So we
1478 overwrite the operands only when the new mode is larger.
1479 See also PR33613. */
1480 if (!rld[i].in
1481 || GET_MODE_SIZE (GET_MODE (in))
1482 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1483 rld[i].in = in;
1484 if (!rld[i].in_reg
1485 || (in_reg
1486 && GET_MODE_SIZE (GET_MODE (in_reg))
1487 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1488 rld[i].in_reg = in_reg;
1489 }
1490 if (out != 0)
1491 {
1492 if (!rld[i].out
1493 || (out
1494 && GET_MODE_SIZE (GET_MODE (out))
1495 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1496 rld[i].out = out;
1497 if (outloc
1498 && (!rld[i].out_reg
1499 || GET_MODE_SIZE (GET_MODE (*outloc))
1500 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1501 rld[i].out_reg = *outloc;
1502 }
1503 if (reg_class_subset_p (rclass, rld[i].rclass))
1504 rld[i].rclass = rclass;
1505 rld[i].optional &= optional;
1506 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1507 opnum, rld[i].opnum))
1508 rld[i].when_needed = RELOAD_OTHER;
1509 rld[i].opnum = MIN (rld[i].opnum, opnum);
1510 }
1511
1512 /* If the ostensible rtx being reloaded differs from the rtx found
1513 in the location to substitute, this reload is not safe to combine
1514 because we cannot reliably tell whether it appears in the insn. */
1515
1516 if (in != 0 && in != *inloc)
1517 rld[i].nocombine = 1;
1518
1519 #if 0
1520 /* This was replaced by changes in find_reloads_address_1 and the new
1521 function inc_for_reload, which go with a new meaning of reload_inc. */
1522
1523 /* If this is an IN/OUT reload in an insn that sets the CC,
1524 it must be for an autoincrement. It doesn't work to store
1525 the incremented value after the insn because that would clobber the CC.
1526 So we must do the increment of the value reloaded from,
1527 increment it, store it back, then decrement again. */
1528 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1529 {
1530 out = 0;
1531 rld[i].out = 0;
1532 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1533 /* If we did not find a nonzero amount-to-increment-by,
1534 that contradicts the belief that IN is being incremented
1535 in an address in this insn. */
1536 gcc_assert (rld[i].inc != 0);
1537 }
1538 #endif
1539
1540 /* If we will replace IN and OUT with the reload-reg,
1541 record where they are located so that substitution need
1542 not do a tree walk. */
1543
1544 if (replace_reloads)
1545 {
1546 if (inloc != 0)
1547 {
1548 struct replacement *r = &replacements[n_replacements++];
1549 r->what = i;
1550 r->where = inloc;
1551 r->mode = inmode;
1552 }
1553 if (outloc != 0 && outloc != inloc)
1554 {
1555 struct replacement *r = &replacements[n_replacements++];
1556 r->what = i;
1557 r->where = outloc;
1558 r->mode = outmode;
1559 }
1560 }
1561
1562 /* If this reload is just being introduced and it has both
1563 an incoming quantity and an outgoing quantity that are
1564 supposed to be made to match, see if either one of the two
1565 can serve as the place to reload into.
1566
1567 If one of them is acceptable, set rld[i].reg_rtx
1568 to that one. */
1569
1570 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1571 {
1572 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1573 inmode, outmode,
1574 rld[i].rclass, i,
1575 earlyclobber_operand_p (out));
1576
1577 /* If the outgoing register already contains the same value
1578 as the incoming one, we can dispense with loading it.
1579 The easiest way to tell the caller that is to give a phony
1580 value for the incoming operand (same as outgoing one). */
1581 if (rld[i].reg_rtx == out
1582 && (REG_P (in) || CONSTANT_P (in))
1583 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1584 static_reload_reg_p, i, inmode))
1585 rld[i].in = out;
1586 }
1587
1588 /* If this is an input reload and the operand contains a register that
1589 dies in this insn and is used nowhere else, see if it is the right class
1590 to be used for this reload. Use it if so. (This occurs most commonly
1591 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1592 this if it is also an output reload that mentions the register unless
1593 the output is a SUBREG that clobbers an entire register.
1594
1595 Note that the operand might be one of the spill regs, if it is a
1596 pseudo reg and we are in a block where spilling has not taken place.
1597 But if there is no spilling in this block, that is OK.
1598 An explicitly used hard reg cannot be a spill reg. */
1599
1600 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1601 {
1602 rtx note;
1603 int regno;
1604 machine_mode rel_mode = inmode;
1605
1606 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1607 rel_mode = outmode;
1608
1609 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1610 if (REG_NOTE_KIND (note) == REG_DEAD
1611 && REG_P (XEXP (note, 0))
1612 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1613 && reg_mentioned_p (XEXP (note, 0), in)
1614 /* Check that a former pseudo is valid; see find_dummy_reload. */
1615 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1616 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1617 ORIGINAL_REGNO (XEXP (note, 0)))
1618 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1619 && ! refers_to_regno_for_reload_p (regno,
1620 end_hard_regno (rel_mode,
1621 regno),
1622 PATTERN (this_insn), inloc)
1623 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1624 /* If this is also an output reload, IN cannot be used as
1625 the reload register if it is set in this insn unless IN
1626 is also OUT. */
1627 && (out == 0 || in == out
1628 || ! hard_reg_set_here_p (regno,
1629 end_hard_regno (rel_mode, regno),
1630 PATTERN (this_insn)))
1631 /* ??? Why is this code so different from the previous?
1632 Is there any simple coherent way to describe the two together?
1633 What's going on here. */
1634 && (in != out
1635 || (GET_CODE (in) == SUBREG
1636 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1637 / UNITS_PER_WORD)
1638 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1639 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1640 /* Make sure the operand fits in the reg that dies. */
1641 && (GET_MODE_SIZE (rel_mode)
1642 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1643 && HARD_REGNO_MODE_OK (regno, inmode)
1644 && HARD_REGNO_MODE_OK (regno, outmode))
1645 {
1646 unsigned int offs;
1647 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1648 hard_regno_nregs[regno][outmode]);
1649
1650 for (offs = 0; offs < nregs; offs++)
1651 if (fixed_regs[regno + offs]
1652 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1653 regno + offs))
1654 break;
1655
1656 if (offs == nregs
1657 && (! (refers_to_regno_for_reload_p
1658 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1659 || can_reload_into (in, regno, inmode)))
1660 {
1661 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1662 break;
1663 }
1664 }
1665 }
1666
1667 if (out)
1668 output_reloadnum = i;
1669
1670 return i;
1671 }
1672
1673 /* Record an additional place we must replace a value
1674 for which we have already recorded a reload.
1675 RELOADNUM is the value returned by push_reload
1676 when the reload was recorded.
1677 This is used in insn patterns that use match_dup. */
1678
1679 static void
1680 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1681 {
1682 if (replace_reloads)
1683 {
1684 struct replacement *r = &replacements[n_replacements++];
1685 r->what = reloadnum;
1686 r->where = loc;
1687 r->mode = mode;
1688 }
1689 }
1690
1691 /* Duplicate any replacement we have recorded to apply at
1692 location ORIG_LOC to also be performed at DUP_LOC.
1693 This is used in insn patterns that use match_dup. */
1694
1695 static void
1696 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1697 {
1698 int i, n = n_replacements;
1699
1700 for (i = 0; i < n; i++)
1701 {
1702 struct replacement *r = &replacements[i];
1703 if (r->where == orig_loc)
1704 push_replacement (dup_loc, r->what, r->mode);
1705 }
1706 }
1707 \f
1708 /* Transfer all replacements that used to be in reload FROM to be in
1709 reload TO. */
1710
1711 void
1712 transfer_replacements (int to, int from)
1713 {
1714 int i;
1715
1716 for (i = 0; i < n_replacements; i++)
1717 if (replacements[i].what == from)
1718 replacements[i].what = to;
1719 }
1720 \f
1721 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1722 or a subpart of it. If we have any replacements registered for IN_RTX,
1723 cancel the reloads that were supposed to load them.
1724 Return nonzero if we canceled any reloads. */
1725 int
1726 remove_address_replacements (rtx in_rtx)
1727 {
1728 int i, j;
1729 char reload_flags[MAX_RELOADS];
1730 int something_changed = 0;
1731
1732 memset (reload_flags, 0, sizeof reload_flags);
1733 for (i = 0, j = 0; i < n_replacements; i++)
1734 {
1735 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1736 reload_flags[replacements[i].what] |= 1;
1737 else
1738 {
1739 replacements[j++] = replacements[i];
1740 reload_flags[replacements[i].what] |= 2;
1741 }
1742 }
1743 /* Note that the following store must be done before the recursive calls. */
1744 n_replacements = j;
1745
1746 for (i = n_reloads - 1; i >= 0; i--)
1747 {
1748 if (reload_flags[i] == 1)
1749 {
1750 deallocate_reload_reg (i);
1751 remove_address_replacements (rld[i].in);
1752 rld[i].in = 0;
1753 something_changed = 1;
1754 }
1755 }
1756 return something_changed;
1757 }
1758 \f
1759 /* If there is only one output reload, and it is not for an earlyclobber
1760 operand, try to combine it with a (logically unrelated) input reload
1761 to reduce the number of reload registers needed.
1762
1763 This is safe if the input reload does not appear in
1764 the value being output-reloaded, because this implies
1765 it is not needed any more once the original insn completes.
1766
1767 If that doesn't work, see we can use any of the registers that
1768 die in this insn as a reload register. We can if it is of the right
1769 class and does not appear in the value being output-reloaded. */
1770
1771 static void
1772 combine_reloads (void)
1773 {
1774 int i, regno;
1775 int output_reload = -1;
1776 int secondary_out = -1;
1777 rtx note;
1778
1779 /* Find the output reload; return unless there is exactly one
1780 and that one is mandatory. */
1781
1782 for (i = 0; i < n_reloads; i++)
1783 if (rld[i].out != 0)
1784 {
1785 if (output_reload >= 0)
1786 return;
1787 output_reload = i;
1788 }
1789
1790 if (output_reload < 0 || rld[output_reload].optional)
1791 return;
1792
1793 /* An input-output reload isn't combinable. */
1794
1795 if (rld[output_reload].in != 0)
1796 return;
1797
1798 /* If this reload is for an earlyclobber operand, we can't do anything. */
1799 if (earlyclobber_operand_p (rld[output_reload].out))
1800 return;
1801
1802 /* If there is a reload for part of the address of this operand, we would
1803 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1804 its life to the point where doing this combine would not lower the
1805 number of spill registers needed. */
1806 for (i = 0; i < n_reloads; i++)
1807 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1808 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1809 && rld[i].opnum == rld[output_reload].opnum)
1810 return;
1811
1812 /* Check each input reload; can we combine it? */
1813
1814 for (i = 0; i < n_reloads; i++)
1815 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1816 /* Life span of this reload must not extend past main insn. */
1817 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1818 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1819 && rld[i].when_needed != RELOAD_OTHER
1820 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1821 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1822 [(int) rld[output_reload].outmode])
1823 && rld[i].inc == 0
1824 && rld[i].reg_rtx == 0
1825 #ifdef SECONDARY_MEMORY_NEEDED
1826 /* Don't combine two reloads with different secondary
1827 memory locations. */
1828 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1829 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1830 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1831 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1832 #endif
1833 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1834 ? (rld[i].rclass == rld[output_reload].rclass)
1835 : (reg_class_subset_p (rld[i].rclass,
1836 rld[output_reload].rclass)
1837 || reg_class_subset_p (rld[output_reload].rclass,
1838 rld[i].rclass)))
1839 && (MATCHES (rld[i].in, rld[output_reload].out)
1840 /* Args reversed because the first arg seems to be
1841 the one that we imagine being modified
1842 while the second is the one that might be affected. */
1843 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1844 rld[i].in)
1845 /* However, if the input is a register that appears inside
1846 the output, then we also can't share.
1847 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1848 If the same reload reg is used for both reg 69 and the
1849 result to be stored in memory, then that result
1850 will clobber the address of the memory ref. */
1851 && ! (REG_P (rld[i].in)
1852 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1853 rld[output_reload].out))))
1854 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1855 rld[i].when_needed != RELOAD_FOR_INPUT)
1856 && (reg_class_size[(int) rld[i].rclass]
1857 || targetm.small_register_classes_for_mode_p (VOIDmode))
1858 /* We will allow making things slightly worse by combining an
1859 input and an output, but no worse than that. */
1860 && (rld[i].when_needed == RELOAD_FOR_INPUT
1861 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1862 {
1863 int j;
1864
1865 /* We have found a reload to combine with! */
1866 rld[i].out = rld[output_reload].out;
1867 rld[i].out_reg = rld[output_reload].out_reg;
1868 rld[i].outmode = rld[output_reload].outmode;
1869 /* Mark the old output reload as inoperative. */
1870 rld[output_reload].out = 0;
1871 /* The combined reload is needed for the entire insn. */
1872 rld[i].when_needed = RELOAD_OTHER;
1873 /* If the output reload had a secondary reload, copy it. */
1874 if (rld[output_reload].secondary_out_reload != -1)
1875 {
1876 rld[i].secondary_out_reload
1877 = rld[output_reload].secondary_out_reload;
1878 rld[i].secondary_out_icode
1879 = rld[output_reload].secondary_out_icode;
1880 }
1881
1882 #ifdef SECONDARY_MEMORY_NEEDED
1883 /* Copy any secondary MEM. */
1884 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1885 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1886 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1887 #endif
1888 /* If required, minimize the register class. */
1889 if (reg_class_subset_p (rld[output_reload].rclass,
1890 rld[i].rclass))
1891 rld[i].rclass = rld[output_reload].rclass;
1892
1893 /* Transfer all replacements from the old reload to the combined. */
1894 for (j = 0; j < n_replacements; j++)
1895 if (replacements[j].what == output_reload)
1896 replacements[j].what = i;
1897
1898 return;
1899 }
1900
1901 /* If this insn has only one operand that is modified or written (assumed
1902 to be the first), it must be the one corresponding to this reload. It
1903 is safe to use anything that dies in this insn for that output provided
1904 that it does not occur in the output (we already know it isn't an
1905 earlyclobber. If this is an asm insn, give up. */
1906
1907 if (INSN_CODE (this_insn) == -1)
1908 return;
1909
1910 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1911 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1912 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1913 return;
1914
1915 /* See if some hard register that dies in this insn and is not used in
1916 the output is the right class. Only works if the register we pick
1917 up can fully hold our output reload. */
1918 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1919 if (REG_NOTE_KIND (note) == REG_DEAD
1920 && REG_P (XEXP (note, 0))
1921 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1922 rld[output_reload].out)
1923 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1924 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1925 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1926 regno)
1927 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1928 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1929 /* Ensure that a secondary or tertiary reload for this output
1930 won't want this register. */
1931 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1932 || (!(TEST_HARD_REG_BIT
1933 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1934 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1935 || !(TEST_HARD_REG_BIT
1936 (reg_class_contents[(int) rld[secondary_out].rclass],
1937 regno)))))
1938 && !fixed_regs[regno]
1939 /* Check that a former pseudo is valid; see find_dummy_reload. */
1940 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1941 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1942 ORIGINAL_REGNO (XEXP (note, 0)))
1943 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1944 {
1945 rld[output_reload].reg_rtx
1946 = gen_rtx_REG (rld[output_reload].outmode, regno);
1947 return;
1948 }
1949 }
1950 \f
1951 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1952 See if one of IN and OUT is a register that may be used;
1953 this is desirable since a spill-register won't be needed.
1954 If so, return the register rtx that proves acceptable.
1955
1956 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1957 RCLASS is the register class required for the reload.
1958
1959 If FOR_REAL is >= 0, it is the number of the reload,
1960 and in some cases when it can be discovered that OUT doesn't need
1961 to be computed, clear out rld[FOR_REAL].out.
1962
1963 If FOR_REAL is -1, this should not be done, because this call
1964 is just to see if a register can be found, not to find and install it.
1965
1966 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1967 puts an additional constraint on being able to use IN for OUT since
1968 IN must not appear elsewhere in the insn (it is assumed that IN itself
1969 is safe from the earlyclobber). */
1970
1971 static rtx
1972 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1973 machine_mode inmode, machine_mode outmode,
1974 reg_class_t rclass, int for_real, int earlyclobber)
1975 {
1976 rtx in = real_in;
1977 rtx out = real_out;
1978 int in_offset = 0;
1979 int out_offset = 0;
1980 rtx value = 0;
1981
1982 /* If operands exceed a word, we can't use either of them
1983 unless they have the same size. */
1984 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1985 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1986 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1987 return 0;
1988
1989 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1990 respectively refers to a hard register. */
1991
1992 /* Find the inside of any subregs. */
1993 while (GET_CODE (out) == SUBREG)
1994 {
1995 if (REG_P (SUBREG_REG (out))
1996 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1997 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1998 GET_MODE (SUBREG_REG (out)),
1999 SUBREG_BYTE (out),
2000 GET_MODE (out));
2001 out = SUBREG_REG (out);
2002 }
2003 while (GET_CODE (in) == SUBREG)
2004 {
2005 if (REG_P (SUBREG_REG (in))
2006 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
2007 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
2008 GET_MODE (SUBREG_REG (in)),
2009 SUBREG_BYTE (in),
2010 GET_MODE (in));
2011 in = SUBREG_REG (in);
2012 }
2013
2014 /* Narrow down the reg class, the same way push_reload will;
2015 otherwise we might find a dummy now, but push_reload won't. */
2016 {
2017 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2018 if (preferred_class != NO_REGS)
2019 rclass = (enum reg_class) preferred_class;
2020 }
2021
2022 /* See if OUT will do. */
2023 if (REG_P (out)
2024 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2025 {
2026 unsigned int regno = REGNO (out) + out_offset;
2027 unsigned int nwords = hard_regno_nregs[regno][outmode];
2028 rtx saved_rtx;
2029
2030 /* When we consider whether the insn uses OUT,
2031 ignore references within IN. They don't prevent us
2032 from copying IN into OUT, because those refs would
2033 move into the insn that reloads IN.
2034
2035 However, we only ignore IN in its role as this reload.
2036 If the insn uses IN elsewhere and it contains OUT,
2037 that counts. We can't be sure it's the "same" operand
2038 so it might not go through this reload.
2039
2040 We also need to avoid using OUT if it, or part of it, is a
2041 fixed register. Modifying such registers, even transiently,
2042 may have undefined effects on the machine, such as modifying
2043 the stack pointer. */
2044 saved_rtx = *inloc;
2045 *inloc = const0_rtx;
2046
2047 if (regno < FIRST_PSEUDO_REGISTER
2048 && HARD_REGNO_MODE_OK (regno, outmode)
2049 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2050 PATTERN (this_insn), outloc))
2051 {
2052 unsigned int i;
2053
2054 for (i = 0; i < nwords; i++)
2055 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2056 regno + i)
2057 || fixed_regs[regno + i])
2058 break;
2059
2060 if (i == nwords)
2061 {
2062 if (REG_P (real_out))
2063 value = real_out;
2064 else
2065 value = gen_rtx_REG (outmode, regno);
2066 }
2067 }
2068
2069 *inloc = saved_rtx;
2070 }
2071
2072 /* Consider using IN if OUT was not acceptable
2073 or if OUT dies in this insn (like the quotient in a divmod insn).
2074 We can't use IN unless it is dies in this insn,
2075 which means we must know accurately which hard regs are live.
2076 Also, the result can't go in IN if IN is used within OUT,
2077 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2078 if (hard_regs_live_known
2079 && REG_P (in)
2080 && REGNO (in) < FIRST_PSEUDO_REGISTER
2081 && (value == 0
2082 || find_reg_note (this_insn, REG_UNUSED, real_out))
2083 && find_reg_note (this_insn, REG_DEAD, real_in)
2084 && !fixed_regs[REGNO (in)]
2085 && HARD_REGNO_MODE_OK (REGNO (in),
2086 /* The only case where out and real_out might
2087 have different modes is where real_out
2088 is a subreg, and in that case, out
2089 has a real mode. */
2090 (GET_MODE (out) != VOIDmode
2091 ? GET_MODE (out) : outmode))
2092 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2093 /* However only do this if we can be sure that this input
2094 operand doesn't correspond with an uninitialized pseudo.
2095 global can assign some hardreg to it that is the same as
2096 the one assigned to a different, also live pseudo (as it
2097 can ignore the conflict). We must never introduce writes
2098 to such hardregs, as they would clobber the other live
2099 pseudo. See PR 20973. */
2100 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2101 ORIGINAL_REGNO (in))
2102 /* Similarly, only do this if we can be sure that the death
2103 note is still valid. global can assign some hardreg to
2104 the pseudo referenced in the note and simultaneously a
2105 subword of this hardreg to a different, also live pseudo,
2106 because only another subword of the hardreg is actually
2107 used in the insn. This cannot happen if the pseudo has
2108 been assigned exactly one hardreg. See PR 33732. */
2109 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2110 {
2111 unsigned int regno = REGNO (in) + in_offset;
2112 unsigned int nwords = hard_regno_nregs[regno][inmode];
2113
2114 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2115 && ! hard_reg_set_here_p (regno, regno + nwords,
2116 PATTERN (this_insn))
2117 && (! earlyclobber
2118 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2119 PATTERN (this_insn), inloc)))
2120 {
2121 unsigned int i;
2122
2123 for (i = 0; i < nwords; i++)
2124 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2125 regno + i))
2126 break;
2127
2128 if (i == nwords)
2129 {
2130 /* If we were going to use OUT as the reload reg
2131 and changed our mind, it means OUT is a dummy that
2132 dies here. So don't bother copying value to it. */
2133 if (for_real >= 0 && value == real_out)
2134 rld[for_real].out = 0;
2135 if (REG_P (real_in))
2136 value = real_in;
2137 else
2138 value = gen_rtx_REG (inmode, regno);
2139 }
2140 }
2141 }
2142
2143 return value;
2144 }
2145 \f
2146 /* This page contains subroutines used mainly for determining
2147 whether the IN or an OUT of a reload can serve as the
2148 reload register. */
2149
2150 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2151
2152 int
2153 earlyclobber_operand_p (rtx x)
2154 {
2155 int i;
2156
2157 for (i = 0; i < n_earlyclobbers; i++)
2158 if (reload_earlyclobbers[i] == x)
2159 return 1;
2160
2161 return 0;
2162 }
2163
2164 /* Return 1 if expression X alters a hard reg in the range
2165 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2166 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2167 X should be the body of an instruction. */
2168
2169 static int
2170 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2171 {
2172 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2173 {
2174 rtx op0 = SET_DEST (x);
2175
2176 while (GET_CODE (op0) == SUBREG)
2177 op0 = SUBREG_REG (op0);
2178 if (REG_P (op0))
2179 {
2180 unsigned int r = REGNO (op0);
2181
2182 /* See if this reg overlaps range under consideration. */
2183 if (r < end_regno
2184 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2185 return 1;
2186 }
2187 }
2188 else if (GET_CODE (x) == PARALLEL)
2189 {
2190 int i = XVECLEN (x, 0) - 1;
2191
2192 for (; i >= 0; i--)
2193 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2194 return 1;
2195 }
2196
2197 return 0;
2198 }
2199
2200 /* Return 1 if ADDR is a valid memory address for mode MODE
2201 in address space AS, and check that each pseudo reg has the
2202 proper kind of hard reg. */
2203
2204 int
2205 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2206 rtx addr, addr_space_t as)
2207 {
2208 #ifdef GO_IF_LEGITIMATE_ADDRESS
2209 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2210 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2211 return 0;
2212
2213 win:
2214 return 1;
2215 #else
2216 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2217 #endif
2218 }
2219 \f
2220 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2221 if they are the same hard reg, and has special hacks for
2222 autoincrement and autodecrement.
2223 This is specifically intended for find_reloads to use
2224 in determining whether two operands match.
2225 X is the operand whose number is the lower of the two.
2226
2227 The value is 2 if Y contains a pre-increment that matches
2228 a non-incrementing address in X. */
2229
2230 /* ??? To be completely correct, we should arrange to pass
2231 for X the output operand and for Y the input operand.
2232 For now, we assume that the output operand has the lower number
2233 because that is natural in (SET output (... input ...)). */
2234
2235 int
2236 operands_match_p (rtx x, rtx y)
2237 {
2238 int i;
2239 RTX_CODE code = GET_CODE (x);
2240 const char *fmt;
2241 int success_2;
2242
2243 if (x == y)
2244 return 1;
2245 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2246 && (REG_P (y) || (GET_CODE (y) == SUBREG
2247 && REG_P (SUBREG_REG (y)))))
2248 {
2249 int j;
2250
2251 if (code == SUBREG)
2252 {
2253 i = REGNO (SUBREG_REG (x));
2254 if (i >= FIRST_PSEUDO_REGISTER)
2255 goto slow;
2256 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2257 GET_MODE (SUBREG_REG (x)),
2258 SUBREG_BYTE (x),
2259 GET_MODE (x));
2260 }
2261 else
2262 i = REGNO (x);
2263
2264 if (GET_CODE (y) == SUBREG)
2265 {
2266 j = REGNO (SUBREG_REG (y));
2267 if (j >= FIRST_PSEUDO_REGISTER)
2268 goto slow;
2269 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2270 GET_MODE (SUBREG_REG (y)),
2271 SUBREG_BYTE (y),
2272 GET_MODE (y));
2273 }
2274 else
2275 j = REGNO (y);
2276
2277 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2278 multiple hard register group of scalar integer registers, so that
2279 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2280 register. */
2281 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2282 && SCALAR_INT_MODE_P (GET_MODE (x))
2283 && i < FIRST_PSEUDO_REGISTER)
2284 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2285 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2286 && SCALAR_INT_MODE_P (GET_MODE (y))
2287 && j < FIRST_PSEUDO_REGISTER)
2288 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2289
2290 return i == j;
2291 }
2292 /* If two operands must match, because they are really a single
2293 operand of an assembler insn, then two postincrements are invalid
2294 because the assembler insn would increment only once.
2295 On the other hand, a postincrement matches ordinary indexing
2296 if the postincrement is the output operand. */
2297 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2298 return operands_match_p (XEXP (x, 0), y);
2299 /* Two preincrements are invalid
2300 because the assembler insn would increment only once.
2301 On the other hand, a preincrement matches ordinary indexing
2302 if the preincrement is the input operand.
2303 In this case, return 2, since some callers need to do special
2304 things when this happens. */
2305 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2306 || GET_CODE (y) == PRE_MODIFY)
2307 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2308
2309 slow:
2310
2311 /* Now we have disposed of all the cases in which different rtx codes
2312 can match. */
2313 if (code != GET_CODE (y))
2314 return 0;
2315
2316 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2317 if (GET_MODE (x) != GET_MODE (y))
2318 return 0;
2319
2320 /* MEMs referring to different address space are not equivalent. */
2321 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2322 return 0;
2323
2324 switch (code)
2325 {
2326 CASE_CONST_UNIQUE:
2327 return 0;
2328
2329 case LABEL_REF:
2330 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2331 case SYMBOL_REF:
2332 return XSTR (x, 0) == XSTR (y, 0);
2333
2334 default:
2335 break;
2336 }
2337
2338 /* Compare the elements. If any pair of corresponding elements
2339 fail to match, return 0 for the whole things. */
2340
2341 success_2 = 0;
2342 fmt = GET_RTX_FORMAT (code);
2343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2344 {
2345 int val, j;
2346 switch (fmt[i])
2347 {
2348 case 'w':
2349 if (XWINT (x, i) != XWINT (y, i))
2350 return 0;
2351 break;
2352
2353 case 'i':
2354 if (XINT (x, i) != XINT (y, i))
2355 return 0;
2356 break;
2357
2358 case 'e':
2359 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2360 if (val == 0)
2361 return 0;
2362 /* If any subexpression returns 2,
2363 we should return 2 if we are successful. */
2364 if (val == 2)
2365 success_2 = 1;
2366 break;
2367
2368 case '0':
2369 break;
2370
2371 case 'E':
2372 if (XVECLEN (x, i) != XVECLEN (y, i))
2373 return 0;
2374 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2375 {
2376 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2377 if (val == 0)
2378 return 0;
2379 if (val == 2)
2380 success_2 = 1;
2381 }
2382 break;
2383
2384 /* It is believed that rtx's at this level will never
2385 contain anything but integers and other rtx's,
2386 except for within LABEL_REFs and SYMBOL_REFs. */
2387 default:
2388 gcc_unreachable ();
2389 }
2390 }
2391 return 1 + success_2;
2392 }
2393 \f
2394 /* Describe the range of registers or memory referenced by X.
2395 If X is a register, set REG_FLAG and put the first register
2396 number into START and the last plus one into END.
2397 If X is a memory reference, put a base address into BASE
2398 and a range of integer offsets into START and END.
2399 If X is pushing on the stack, we can assume it causes no trouble,
2400 so we set the SAFE field. */
2401
2402 static struct decomposition
2403 decompose (rtx x)
2404 {
2405 struct decomposition val;
2406 int all_const = 0;
2407
2408 memset (&val, 0, sizeof (val));
2409
2410 switch (GET_CODE (x))
2411 {
2412 case MEM:
2413 {
2414 rtx base = NULL_RTX, offset = 0;
2415 rtx addr = XEXP (x, 0);
2416
2417 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2418 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2419 {
2420 val.base = XEXP (addr, 0);
2421 val.start = -GET_MODE_SIZE (GET_MODE (x));
2422 val.end = GET_MODE_SIZE (GET_MODE (x));
2423 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2424 return val;
2425 }
2426
2427 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2428 {
2429 if (GET_CODE (XEXP (addr, 1)) == PLUS
2430 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2431 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2432 {
2433 val.base = XEXP (addr, 0);
2434 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2435 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2436 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2437 return val;
2438 }
2439 }
2440
2441 if (GET_CODE (addr) == CONST)
2442 {
2443 addr = XEXP (addr, 0);
2444 all_const = 1;
2445 }
2446 if (GET_CODE (addr) == PLUS)
2447 {
2448 if (CONSTANT_P (XEXP (addr, 0)))
2449 {
2450 base = XEXP (addr, 1);
2451 offset = XEXP (addr, 0);
2452 }
2453 else if (CONSTANT_P (XEXP (addr, 1)))
2454 {
2455 base = XEXP (addr, 0);
2456 offset = XEXP (addr, 1);
2457 }
2458 }
2459
2460 if (offset == 0)
2461 {
2462 base = addr;
2463 offset = const0_rtx;
2464 }
2465 if (GET_CODE (offset) == CONST)
2466 offset = XEXP (offset, 0);
2467 if (GET_CODE (offset) == PLUS)
2468 {
2469 if (CONST_INT_P (XEXP (offset, 0)))
2470 {
2471 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2472 offset = XEXP (offset, 0);
2473 }
2474 else if (CONST_INT_P (XEXP (offset, 1)))
2475 {
2476 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2477 offset = XEXP (offset, 1);
2478 }
2479 else
2480 {
2481 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2482 offset = const0_rtx;
2483 }
2484 }
2485 else if (!CONST_INT_P (offset))
2486 {
2487 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2488 offset = const0_rtx;
2489 }
2490
2491 if (all_const && GET_CODE (base) == PLUS)
2492 base = gen_rtx_CONST (GET_MODE (base), base);
2493
2494 gcc_assert (CONST_INT_P (offset));
2495
2496 val.start = INTVAL (offset);
2497 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2498 val.base = base;
2499 }
2500 break;
2501
2502 case REG:
2503 val.reg_flag = 1;
2504 val.start = true_regnum (x);
2505 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2506 {
2507 /* A pseudo with no hard reg. */
2508 val.start = REGNO (x);
2509 val.end = val.start + 1;
2510 }
2511 else
2512 /* A hard reg. */
2513 val.end = end_hard_regno (GET_MODE (x), val.start);
2514 break;
2515
2516 case SUBREG:
2517 if (!REG_P (SUBREG_REG (x)))
2518 /* This could be more precise, but it's good enough. */
2519 return decompose (SUBREG_REG (x));
2520 val.reg_flag = 1;
2521 val.start = true_regnum (x);
2522 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2523 return decompose (SUBREG_REG (x));
2524 else
2525 /* A hard reg. */
2526 val.end = val.start + subreg_nregs (x);
2527 break;
2528
2529 case SCRATCH:
2530 /* This hasn't been assigned yet, so it can't conflict yet. */
2531 val.safe = 1;
2532 break;
2533
2534 default:
2535 gcc_assert (CONSTANT_P (x));
2536 val.safe = 1;
2537 break;
2538 }
2539 return val;
2540 }
2541
2542 /* Return 1 if altering Y will not modify the value of X.
2543 Y is also described by YDATA, which should be decompose (Y). */
2544
2545 static int
2546 immune_p (rtx x, rtx y, struct decomposition ydata)
2547 {
2548 struct decomposition xdata;
2549
2550 if (ydata.reg_flag)
2551 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2552 if (ydata.safe)
2553 return 1;
2554
2555 gcc_assert (MEM_P (y));
2556 /* If Y is memory and X is not, Y can't affect X. */
2557 if (!MEM_P (x))
2558 return 1;
2559
2560 xdata = decompose (x);
2561
2562 if (! rtx_equal_p (xdata.base, ydata.base))
2563 {
2564 /* If bases are distinct symbolic constants, there is no overlap. */
2565 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2566 return 1;
2567 /* Constants and stack slots never overlap. */
2568 if (CONSTANT_P (xdata.base)
2569 && (ydata.base == frame_pointer_rtx
2570 || ydata.base == hard_frame_pointer_rtx
2571 || ydata.base == stack_pointer_rtx))
2572 return 1;
2573 if (CONSTANT_P (ydata.base)
2574 && (xdata.base == frame_pointer_rtx
2575 || xdata.base == hard_frame_pointer_rtx
2576 || xdata.base == stack_pointer_rtx))
2577 return 1;
2578 /* If either base is variable, we don't know anything. */
2579 return 0;
2580 }
2581
2582 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2583 }
2584
2585 /* Similar, but calls decompose. */
2586
2587 int
2588 safe_from_earlyclobber (rtx op, rtx clobber)
2589 {
2590 struct decomposition early_data;
2591
2592 early_data = decompose (clobber);
2593 return immune_p (op, clobber, early_data);
2594 }
2595 \f
2596 /* Main entry point of this file: search the body of INSN
2597 for values that need reloading and record them with push_reload.
2598 REPLACE nonzero means record also where the values occur
2599 so that subst_reloads can be used.
2600
2601 IND_LEVELS says how many levels of indirection are supported by this
2602 machine; a value of zero means that a memory reference is not a valid
2603 memory address.
2604
2605 LIVE_KNOWN says we have valid information about which hard
2606 regs are live at each point in the program; this is true when
2607 we are called from global_alloc but false when stupid register
2608 allocation has been done.
2609
2610 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2611 which is nonnegative if the reg has been commandeered for reloading into.
2612 It is copied into STATIC_RELOAD_REG_P and referenced from there
2613 by various subroutines.
2614
2615 Return TRUE if some operands need to be changed, because of swapping
2616 commutative operands, reg_equiv_address substitution, or whatever. */
2617
2618 int
2619 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2620 short *reload_reg_p)
2621 {
2622 int insn_code_number;
2623 int i, j;
2624 int noperands;
2625 /* These start out as the constraints for the insn
2626 and they are chewed up as we consider alternatives. */
2627 const char *constraints[MAX_RECOG_OPERANDS];
2628 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2629 a register. */
2630 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2631 char pref_or_nothing[MAX_RECOG_OPERANDS];
2632 /* Nonzero for a MEM operand whose entire address needs a reload.
2633 May be -1 to indicate the entire address may or may not need a reload. */
2634 int address_reloaded[MAX_RECOG_OPERANDS];
2635 /* Nonzero for an address operand that needs to be completely reloaded.
2636 May be -1 to indicate the entire operand may or may not need a reload. */
2637 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2638 /* Value of enum reload_type to use for operand. */
2639 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2640 /* Value of enum reload_type to use within address of operand. */
2641 enum reload_type address_type[MAX_RECOG_OPERANDS];
2642 /* Save the usage of each operand. */
2643 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2644 int no_input_reloads = 0, no_output_reloads = 0;
2645 int n_alternatives;
2646 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2647 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2648 char this_alternative_win[MAX_RECOG_OPERANDS];
2649 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2650 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2651 int this_alternative_matches[MAX_RECOG_OPERANDS];
2652 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2653 int this_alternative_number;
2654 int goal_alternative_number = 0;
2655 int operand_reloadnum[MAX_RECOG_OPERANDS];
2656 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2657 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2658 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2659 char goal_alternative_win[MAX_RECOG_OPERANDS];
2660 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2661 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2662 int goal_alternative_swapped;
2663 int best;
2664 int commutative;
2665 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2666 rtx substed_operand[MAX_RECOG_OPERANDS];
2667 rtx body = PATTERN (insn);
2668 rtx set = single_set (insn);
2669 int goal_earlyclobber = 0, this_earlyclobber;
2670 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2671 int retval = 0;
2672
2673 this_insn = insn;
2674 n_reloads = 0;
2675 n_replacements = 0;
2676 n_earlyclobbers = 0;
2677 replace_reloads = replace;
2678 hard_regs_live_known = live_known;
2679 static_reload_reg_p = reload_reg_p;
2680
2681 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2682 neither are insns that SET cc0. Insns that use CC0 are not allowed
2683 to have any input reloads. */
2684 if (JUMP_P (insn) || CALL_P (insn))
2685 no_output_reloads = 1;
2686
2687 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2688 no_input_reloads = 1;
2689 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2690 no_output_reloads = 1;
2691
2692 #ifdef SECONDARY_MEMORY_NEEDED
2693 /* The eliminated forms of any secondary memory locations are per-insn, so
2694 clear them out here. */
2695
2696 if (secondary_memlocs_elim_used)
2697 {
2698 memset (secondary_memlocs_elim, 0,
2699 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2700 secondary_memlocs_elim_used = 0;
2701 }
2702 #endif
2703
2704 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2705 is cheap to move between them. If it is not, there may not be an insn
2706 to do the copy, so we may need a reload. */
2707 if (GET_CODE (body) == SET
2708 && REG_P (SET_DEST (body))
2709 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2710 && REG_P (SET_SRC (body))
2711 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2712 && register_move_cost (GET_MODE (SET_SRC (body)),
2713 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2714 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2715 return 0;
2716
2717 extract_insn (insn);
2718
2719 noperands = reload_n_operands = recog_data.n_operands;
2720 n_alternatives = recog_data.n_alternatives;
2721
2722 /* Just return "no reloads" if insn has no operands with constraints. */
2723 if (noperands == 0 || n_alternatives == 0)
2724 return 0;
2725
2726 insn_code_number = INSN_CODE (insn);
2727 this_insn_is_asm = insn_code_number < 0;
2728
2729 memcpy (operand_mode, recog_data.operand_mode,
2730 noperands * sizeof (machine_mode));
2731 memcpy (constraints, recog_data.constraints,
2732 noperands * sizeof (const char *));
2733
2734 commutative = -1;
2735
2736 /* If we will need to know, later, whether some pair of operands
2737 are the same, we must compare them now and save the result.
2738 Reloading the base and index registers will clobber them
2739 and afterward they will fail to match. */
2740
2741 for (i = 0; i < noperands; i++)
2742 {
2743 const char *p;
2744 int c;
2745 char *end;
2746
2747 substed_operand[i] = recog_data.operand[i];
2748 p = constraints[i];
2749
2750 modified[i] = RELOAD_READ;
2751
2752 /* Scan this operand's constraint to see if it is an output operand,
2753 an in-out operand, is commutative, or should match another. */
2754
2755 while ((c = *p))
2756 {
2757 p += CONSTRAINT_LEN (c, p);
2758 switch (c)
2759 {
2760 case '=':
2761 modified[i] = RELOAD_WRITE;
2762 break;
2763 case '+':
2764 modified[i] = RELOAD_READ_WRITE;
2765 break;
2766 case '%':
2767 {
2768 /* The last operand should not be marked commutative. */
2769 gcc_assert (i != noperands - 1);
2770
2771 /* We currently only support one commutative pair of
2772 operands. Some existing asm code currently uses more
2773 than one pair. Previously, that would usually work,
2774 but sometimes it would crash the compiler. We
2775 continue supporting that case as well as we can by
2776 silently ignoring all but the first pair. In the
2777 future we may handle it correctly. */
2778 if (commutative < 0)
2779 commutative = i;
2780 else
2781 gcc_assert (this_insn_is_asm);
2782 }
2783 break;
2784 /* Use of ISDIGIT is tempting here, but it may get expensive because
2785 of locale support we don't want. */
2786 case '0': case '1': case '2': case '3': case '4':
2787 case '5': case '6': case '7': case '8': case '9':
2788 {
2789 c = strtoul (p - 1, &end, 10);
2790 p = end;
2791
2792 operands_match[c][i]
2793 = operands_match_p (recog_data.operand[c],
2794 recog_data.operand[i]);
2795
2796 /* An operand may not match itself. */
2797 gcc_assert (c != i);
2798
2799 /* If C can be commuted with C+1, and C might need to match I,
2800 then C+1 might also need to match I. */
2801 if (commutative >= 0)
2802 {
2803 if (c == commutative || c == commutative + 1)
2804 {
2805 int other = c + (c == commutative ? 1 : -1);
2806 operands_match[other][i]
2807 = operands_match_p (recog_data.operand[other],
2808 recog_data.operand[i]);
2809 }
2810 if (i == commutative || i == commutative + 1)
2811 {
2812 int other = i + (i == commutative ? 1 : -1);
2813 operands_match[c][other]
2814 = operands_match_p (recog_data.operand[c],
2815 recog_data.operand[other]);
2816 }
2817 /* Note that C is supposed to be less than I.
2818 No need to consider altering both C and I because in
2819 that case we would alter one into the other. */
2820 }
2821 }
2822 }
2823 }
2824 }
2825
2826 /* Examine each operand that is a memory reference or memory address
2827 and reload parts of the addresses into index registers.
2828 Also here any references to pseudo regs that didn't get hard regs
2829 but are equivalent to constants get replaced in the insn itself
2830 with those constants. Nobody will ever see them again.
2831
2832 Finally, set up the preferred classes of each operand. */
2833
2834 for (i = 0; i < noperands; i++)
2835 {
2836 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2837
2838 address_reloaded[i] = 0;
2839 address_operand_reloaded[i] = 0;
2840 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2841 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2842 : RELOAD_OTHER);
2843 address_type[i]
2844 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2845 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2846 : RELOAD_OTHER);
2847
2848 if (*constraints[i] == 0)
2849 /* Ignore things like match_operator operands. */
2850 ;
2851 else if (insn_extra_address_constraint
2852 (lookup_constraint (constraints[i])))
2853 {
2854 address_operand_reloaded[i]
2855 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2856 recog_data.operand[i],
2857 recog_data.operand_loc[i],
2858 i, operand_type[i], ind_levels, insn);
2859
2860 /* If we now have a simple operand where we used to have a
2861 PLUS or MULT, re-recognize and try again. */
2862 if ((OBJECT_P (*recog_data.operand_loc[i])
2863 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2864 && (GET_CODE (recog_data.operand[i]) == MULT
2865 || GET_CODE (recog_data.operand[i]) == PLUS))
2866 {
2867 INSN_CODE (insn) = -1;
2868 retval = find_reloads (insn, replace, ind_levels, live_known,
2869 reload_reg_p);
2870 return retval;
2871 }
2872
2873 recog_data.operand[i] = *recog_data.operand_loc[i];
2874 substed_operand[i] = recog_data.operand[i];
2875
2876 /* Address operands are reloaded in their existing mode,
2877 no matter what is specified in the machine description. */
2878 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2879
2880 /* If the address is a single CONST_INT pick address mode
2881 instead otherwise we will later not know in which mode
2882 the reload should be performed. */
2883 if (operand_mode[i] == VOIDmode)
2884 operand_mode[i] = Pmode;
2885
2886 }
2887 else if (code == MEM)
2888 {
2889 address_reloaded[i]
2890 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2891 recog_data.operand_loc[i],
2892 XEXP (recog_data.operand[i], 0),
2893 &XEXP (recog_data.operand[i], 0),
2894 i, address_type[i], ind_levels, insn);
2895 recog_data.operand[i] = *recog_data.operand_loc[i];
2896 substed_operand[i] = recog_data.operand[i];
2897 }
2898 else if (code == SUBREG)
2899 {
2900 rtx reg = SUBREG_REG (recog_data.operand[i]);
2901 rtx op
2902 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2903 ind_levels,
2904 set != 0
2905 && &SET_DEST (set) == recog_data.operand_loc[i],
2906 insn,
2907 &address_reloaded[i]);
2908
2909 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2910 that didn't get a hard register, emit a USE with a REG_EQUAL
2911 note in front so that we might inherit a previous, possibly
2912 wider reload. */
2913
2914 if (replace
2915 && MEM_P (op)
2916 && REG_P (reg)
2917 && (GET_MODE_SIZE (GET_MODE (reg))
2918 >= GET_MODE_SIZE (GET_MODE (op)))
2919 && reg_equiv_constant (REGNO (reg)) == 0)
2920 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2921 insn),
2922 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2923
2924 substed_operand[i] = recog_data.operand[i] = op;
2925 }
2926 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2927 /* We can get a PLUS as an "operand" as a result of register
2928 elimination. See eliminate_regs and gen_reload. We handle
2929 a unary operator by reloading the operand. */
2930 substed_operand[i] = recog_data.operand[i]
2931 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2932 ind_levels, 0, insn,
2933 &address_reloaded[i]);
2934 else if (code == REG)
2935 {
2936 /* This is equivalent to calling find_reloads_toplev.
2937 The code is duplicated for speed.
2938 When we find a pseudo always equivalent to a constant,
2939 we replace it by the constant. We must be sure, however,
2940 that we don't try to replace it in the insn in which it
2941 is being set. */
2942 int regno = REGNO (recog_data.operand[i]);
2943 if (reg_equiv_constant (regno) != 0
2944 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2945 {
2946 /* Record the existing mode so that the check if constants are
2947 allowed will work when operand_mode isn't specified. */
2948
2949 if (operand_mode[i] == VOIDmode)
2950 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2951
2952 substed_operand[i] = recog_data.operand[i]
2953 = reg_equiv_constant (regno);
2954 }
2955 if (reg_equiv_memory_loc (regno) != 0
2956 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2957 /* We need not give a valid is_set_dest argument since the case
2958 of a constant equivalence was checked above. */
2959 substed_operand[i] = recog_data.operand[i]
2960 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2961 ind_levels, 0, insn,
2962 &address_reloaded[i]);
2963 }
2964 /* If the operand is still a register (we didn't replace it with an
2965 equivalent), get the preferred class to reload it into. */
2966 code = GET_CODE (recog_data.operand[i]);
2967 preferred_class[i]
2968 = ((code == REG && REGNO (recog_data.operand[i])
2969 >= FIRST_PSEUDO_REGISTER)
2970 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2971 : NO_REGS);
2972 pref_or_nothing[i]
2973 = (code == REG
2974 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2975 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2976 }
2977
2978 /* If this is simply a copy from operand 1 to operand 0, merge the
2979 preferred classes for the operands. */
2980 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2981 && recog_data.operand[1] == SET_SRC (set))
2982 {
2983 preferred_class[0] = preferred_class[1]
2984 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2985 pref_or_nothing[0] |= pref_or_nothing[1];
2986 pref_or_nothing[1] |= pref_or_nothing[0];
2987 }
2988
2989 /* Now see what we need for pseudo-regs that didn't get hard regs
2990 or got the wrong kind of hard reg. For this, we must consider
2991 all the operands together against the register constraints. */
2992
2993 best = MAX_RECOG_OPERANDS * 2 + 600;
2994
2995 goal_alternative_swapped = 0;
2996
2997 /* The constraints are made of several alternatives.
2998 Each operand's constraint looks like foo,bar,... with commas
2999 separating the alternatives. The first alternatives for all
3000 operands go together, the second alternatives go together, etc.
3001
3002 First loop over alternatives. */
3003
3004 alternative_mask enabled = get_enabled_alternatives (insn);
3005 for (this_alternative_number = 0;
3006 this_alternative_number < n_alternatives;
3007 this_alternative_number++)
3008 {
3009 int swapped;
3010
3011 if (!TEST_BIT (enabled, this_alternative_number))
3012 {
3013 int i;
3014
3015 for (i = 0; i < recog_data.n_operands; i++)
3016 constraints[i] = skip_alternative (constraints[i]);
3017
3018 continue;
3019 }
3020
3021 /* If insn is commutative (it's safe to exchange a certain pair
3022 of operands) then we need to try each alternative twice, the
3023 second time matching those two operands as if we had
3024 exchanged them. To do this, really exchange them in
3025 operands. */
3026 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3027 {
3028 /* Loop over operands for one constraint alternative. */
3029 /* LOSERS counts those that don't fit this alternative
3030 and would require loading. */
3031 int losers = 0;
3032 /* BAD is set to 1 if it some operand can't fit this alternative
3033 even after reloading. */
3034 int bad = 0;
3035 /* REJECT is a count of how undesirable this alternative says it is
3036 if any reloading is required. If the alternative matches exactly
3037 then REJECT is ignored, but otherwise it gets this much
3038 counted against it in addition to the reloading needed. Each
3039 ? counts three times here since we want the disparaging caused by
3040 a bad register class to only count 1/3 as much. */
3041 int reject = 0;
3042
3043 if (swapped)
3044 {
3045 recog_data.operand[commutative] = substed_operand[commutative + 1];
3046 recog_data.operand[commutative + 1] = substed_operand[commutative];
3047 /* Swap the duplicates too. */
3048 for (i = 0; i < recog_data.n_dups; i++)
3049 if (recog_data.dup_num[i] == commutative
3050 || recog_data.dup_num[i] == commutative + 1)
3051 *recog_data.dup_loc[i]
3052 = recog_data.operand[(int) recog_data.dup_num[i]];
3053
3054 std::swap (preferred_class[commutative],
3055 preferred_class[commutative + 1]);
3056 std::swap (pref_or_nothing[commutative],
3057 pref_or_nothing[commutative + 1]);
3058 std::swap (address_reloaded[commutative],
3059 address_reloaded[commutative + 1]);
3060 }
3061
3062 this_earlyclobber = 0;
3063
3064 for (i = 0; i < noperands; i++)
3065 {
3066 const char *p = constraints[i];
3067 char *end;
3068 int len;
3069 int win = 0;
3070 int did_match = 0;
3071 /* 0 => this operand can be reloaded somehow for this alternative. */
3072 int badop = 1;
3073 /* 0 => this operand can be reloaded if the alternative allows regs. */
3074 int winreg = 0;
3075 int c;
3076 int m;
3077 rtx operand = recog_data.operand[i];
3078 int offset = 0;
3079 /* Nonzero means this is a MEM that must be reloaded into a reg
3080 regardless of what the constraint says. */
3081 int force_reload = 0;
3082 int offmemok = 0;
3083 /* Nonzero if a constant forced into memory would be OK for this
3084 operand. */
3085 int constmemok = 0;
3086 int earlyclobber = 0;
3087 enum constraint_num cn;
3088 enum reg_class cl;
3089
3090 /* If the predicate accepts a unary operator, it means that
3091 we need to reload the operand, but do not do this for
3092 match_operator and friends. */
3093 if (UNARY_P (operand) && *p != 0)
3094 operand = XEXP (operand, 0);
3095
3096 /* If the operand is a SUBREG, extract
3097 the REG or MEM (or maybe even a constant) within.
3098 (Constants can occur as a result of reg_equiv_constant.) */
3099
3100 while (GET_CODE (operand) == SUBREG)
3101 {
3102 /* Offset only matters when operand is a REG and
3103 it is a hard reg. This is because it is passed
3104 to reg_fits_class_p if it is a REG and all pseudos
3105 return 0 from that function. */
3106 if (REG_P (SUBREG_REG (operand))
3107 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3108 {
3109 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3110 GET_MODE (SUBREG_REG (operand)),
3111 SUBREG_BYTE (operand),
3112 GET_MODE (operand)) < 0)
3113 force_reload = 1;
3114 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3115 GET_MODE (SUBREG_REG (operand)),
3116 SUBREG_BYTE (operand),
3117 GET_MODE (operand));
3118 }
3119 operand = SUBREG_REG (operand);
3120 /* Force reload if this is a constant or PLUS or if there may
3121 be a problem accessing OPERAND in the outer mode. */
3122 if (CONSTANT_P (operand)
3123 || GET_CODE (operand) == PLUS
3124 /* We must force a reload of paradoxical SUBREGs
3125 of a MEM because the alignment of the inner value
3126 may not be enough to do the outer reference. On
3127 big-endian machines, it may also reference outside
3128 the object.
3129
3130 On machines that extend byte operations and we have a
3131 SUBREG where both the inner and outer modes are no wider
3132 than a word and the inner mode is narrower, is integral,
3133 and gets extended when loaded from memory, combine.c has
3134 made assumptions about the behavior of the machine in such
3135 register access. If the data is, in fact, in memory we
3136 must always load using the size assumed to be in the
3137 register and let the insn do the different-sized
3138 accesses.
3139
3140 This is doubly true if WORD_REGISTER_OPERATIONS. In
3141 this case eliminate_regs has left non-paradoxical
3142 subregs for push_reload to see. Make sure it does
3143 by forcing the reload.
3144
3145 ??? When is it right at this stage to have a subreg
3146 of a mem that is _not_ to be handled specially? IMO
3147 those should have been reduced to just a mem. */
3148 || ((MEM_P (operand)
3149 || (REG_P (operand)
3150 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3151 #if !WORD_REGISTER_OPERATIONS
3152 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3153 < BIGGEST_ALIGNMENT)
3154 && (GET_MODE_SIZE (operand_mode[i])
3155 > GET_MODE_SIZE (GET_MODE (operand))))
3156 || BYTES_BIG_ENDIAN
3157 #ifdef LOAD_EXTEND_OP
3158 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3159 && (GET_MODE_SIZE (GET_MODE (operand))
3160 <= UNITS_PER_WORD)
3161 && (GET_MODE_SIZE (operand_mode[i])
3162 > GET_MODE_SIZE (GET_MODE (operand)))
3163 && INTEGRAL_MODE_P (GET_MODE (operand))
3164 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3165 #endif
3166 )
3167 #endif
3168 )
3169 )
3170 force_reload = 1;
3171 }
3172
3173 this_alternative[i] = NO_REGS;
3174 this_alternative_win[i] = 0;
3175 this_alternative_match_win[i] = 0;
3176 this_alternative_offmemok[i] = 0;
3177 this_alternative_earlyclobber[i] = 0;
3178 this_alternative_matches[i] = -1;
3179
3180 /* An empty constraint or empty alternative
3181 allows anything which matched the pattern. */
3182 if (*p == 0 || *p == ',')
3183 win = 1, badop = 0;
3184
3185 /* Scan this alternative's specs for this operand;
3186 set WIN if the operand fits any letter in this alternative.
3187 Otherwise, clear BADOP if this operand could
3188 fit some letter after reloads,
3189 or set WINREG if this operand could fit after reloads
3190 provided the constraint allows some registers. */
3191
3192 do
3193 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3194 {
3195 case '\0':
3196 len = 0;
3197 break;
3198 case ',':
3199 c = '\0';
3200 break;
3201
3202 case '?':
3203 reject += 6;
3204 break;
3205
3206 case '!':
3207 reject = 600;
3208 break;
3209
3210 case '#':
3211 /* Ignore rest of this alternative as far as
3212 reloading is concerned. */
3213 do
3214 p++;
3215 while (*p && *p != ',');
3216 len = 0;
3217 break;
3218
3219 case '0': case '1': case '2': case '3': case '4':
3220 case '5': case '6': case '7': case '8': case '9':
3221 m = strtoul (p, &end, 10);
3222 p = end;
3223 len = 0;
3224
3225 this_alternative_matches[i] = m;
3226 /* We are supposed to match a previous operand.
3227 If we do, we win if that one did.
3228 If we do not, count both of the operands as losers.
3229 (This is too conservative, since most of the time
3230 only a single reload insn will be needed to make
3231 the two operands win. As a result, this alternative
3232 may be rejected when it is actually desirable.) */
3233 if ((swapped && (m != commutative || i != commutative + 1))
3234 /* If we are matching as if two operands were swapped,
3235 also pretend that operands_match had been computed
3236 with swapped.
3237 But if I is the second of those and C is the first,
3238 don't exchange them, because operands_match is valid
3239 only on one side of its diagonal. */
3240 ? (operands_match
3241 [(m == commutative || m == commutative + 1)
3242 ? 2 * commutative + 1 - m : m]
3243 [(i == commutative || i == commutative + 1)
3244 ? 2 * commutative + 1 - i : i])
3245 : operands_match[m][i])
3246 {
3247 /* If we are matching a non-offsettable address where an
3248 offsettable address was expected, then we must reject
3249 this combination, because we can't reload it. */
3250 if (this_alternative_offmemok[m]
3251 && MEM_P (recog_data.operand[m])
3252 && this_alternative[m] == NO_REGS
3253 && ! this_alternative_win[m])
3254 bad = 1;
3255
3256 did_match = this_alternative_win[m];
3257 }
3258 else
3259 {
3260 /* Operands don't match. */
3261 rtx value;
3262 int loc1, loc2;
3263 /* Retroactively mark the operand we had to match
3264 as a loser, if it wasn't already. */
3265 if (this_alternative_win[m])
3266 losers++;
3267 this_alternative_win[m] = 0;
3268 if (this_alternative[m] == NO_REGS)
3269 bad = 1;
3270 /* But count the pair only once in the total badness of
3271 this alternative, if the pair can be a dummy reload.
3272 The pointers in operand_loc are not swapped; swap
3273 them by hand if necessary. */
3274 if (swapped && i == commutative)
3275 loc1 = commutative + 1;
3276 else if (swapped && i == commutative + 1)
3277 loc1 = commutative;
3278 else
3279 loc1 = i;
3280 if (swapped && m == commutative)
3281 loc2 = commutative + 1;
3282 else if (swapped && m == commutative + 1)
3283 loc2 = commutative;
3284 else
3285 loc2 = m;
3286 value
3287 = find_dummy_reload (recog_data.operand[i],
3288 recog_data.operand[m],
3289 recog_data.operand_loc[loc1],
3290 recog_data.operand_loc[loc2],
3291 operand_mode[i], operand_mode[m],
3292 this_alternative[m], -1,
3293 this_alternative_earlyclobber[m]);
3294
3295 if (value != 0)
3296 losers--;
3297 }
3298 /* This can be fixed with reloads if the operand
3299 we are supposed to match can be fixed with reloads. */
3300 badop = 0;
3301 this_alternative[i] = this_alternative[m];
3302
3303 /* If we have to reload this operand and some previous
3304 operand also had to match the same thing as this
3305 operand, we don't know how to do that. So reject this
3306 alternative. */
3307 if (! did_match || force_reload)
3308 for (j = 0; j < i; j++)
3309 if (this_alternative_matches[j]
3310 == this_alternative_matches[i])
3311 {
3312 badop = 1;
3313 break;
3314 }
3315 break;
3316
3317 case 'p':
3318 /* All necessary reloads for an address_operand
3319 were handled in find_reloads_address. */
3320 this_alternative[i]
3321 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3322 ADDRESS, SCRATCH);
3323 win = 1;
3324 badop = 0;
3325 break;
3326
3327 case TARGET_MEM_CONSTRAINT:
3328 if (force_reload)
3329 break;
3330 if (MEM_P (operand)
3331 || (REG_P (operand)
3332 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3333 && reg_renumber[REGNO (operand)] < 0))
3334 win = 1;
3335 if (CONST_POOL_OK_P (operand_mode[i], operand))
3336 badop = 0;
3337 constmemok = 1;
3338 break;
3339
3340 case '<':
3341 if (MEM_P (operand)
3342 && ! address_reloaded[i]
3343 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3344 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3345 win = 1;
3346 break;
3347
3348 case '>':
3349 if (MEM_P (operand)
3350 && ! address_reloaded[i]
3351 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3352 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3353 win = 1;
3354 break;
3355
3356 /* Memory operand whose address is not offsettable. */
3357 case 'V':
3358 if (force_reload)
3359 break;
3360 if (MEM_P (operand)
3361 && ! (ind_levels ? offsettable_memref_p (operand)
3362 : offsettable_nonstrict_memref_p (operand))
3363 /* Certain mem addresses will become offsettable
3364 after they themselves are reloaded. This is important;
3365 we don't want our own handling of unoffsettables
3366 to override the handling of reg_equiv_address. */
3367 && !(REG_P (XEXP (operand, 0))
3368 && (ind_levels == 0
3369 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3370 win = 1;
3371 break;
3372
3373 /* Memory operand whose address is offsettable. */
3374 case 'o':
3375 if (force_reload)
3376 break;
3377 if ((MEM_P (operand)
3378 /* If IND_LEVELS, find_reloads_address won't reload a
3379 pseudo that didn't get a hard reg, so we have to
3380 reject that case. */
3381 && ((ind_levels ? offsettable_memref_p (operand)
3382 : offsettable_nonstrict_memref_p (operand))
3383 /* A reloaded address is offsettable because it is now
3384 just a simple register indirect. */
3385 || address_reloaded[i] == 1))
3386 || (REG_P (operand)
3387 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3388 && reg_renumber[REGNO (operand)] < 0
3389 /* If reg_equiv_address is nonzero, we will be
3390 loading it into a register; hence it will be
3391 offsettable, but we cannot say that reg_equiv_mem
3392 is offsettable without checking. */
3393 && ((reg_equiv_mem (REGNO (operand)) != 0
3394 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3395 || (reg_equiv_address (REGNO (operand)) != 0))))
3396 win = 1;
3397 if (CONST_POOL_OK_P (operand_mode[i], operand)
3398 || MEM_P (operand))
3399 badop = 0;
3400 constmemok = 1;
3401 offmemok = 1;
3402 break;
3403
3404 case '&':
3405 /* Output operand that is stored before the need for the
3406 input operands (and their index registers) is over. */
3407 earlyclobber = 1, this_earlyclobber = 1;
3408 break;
3409
3410 case 'X':
3411 force_reload = 0;
3412 win = 1;
3413 break;
3414
3415 case 'g':
3416 if (! force_reload
3417 /* A PLUS is never a valid operand, but reload can make
3418 it from a register when eliminating registers. */
3419 && GET_CODE (operand) != PLUS
3420 /* A SCRATCH is not a valid operand. */
3421 && GET_CODE (operand) != SCRATCH
3422 && (! CONSTANT_P (operand)
3423 || ! flag_pic
3424 || LEGITIMATE_PIC_OPERAND_P (operand))
3425 && (GENERAL_REGS == ALL_REGS
3426 || !REG_P (operand)
3427 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3428 && reg_renumber[REGNO (operand)] < 0)))
3429 win = 1;
3430 cl = GENERAL_REGS;
3431 goto reg;
3432
3433 default:
3434 cn = lookup_constraint (p);
3435 switch (get_constraint_type (cn))
3436 {
3437 case CT_REGISTER:
3438 cl = reg_class_for_constraint (cn);
3439 if (cl != NO_REGS)
3440 goto reg;
3441 break;
3442
3443 case CT_CONST_INT:
3444 if (CONST_INT_P (operand)
3445 && (insn_const_int_ok_for_constraint
3446 (INTVAL (operand), cn)))
3447 win = true;
3448 break;
3449
3450 case CT_MEMORY:
3451 if (force_reload)
3452 break;
3453 if (constraint_satisfied_p (operand, cn))
3454 win = 1;
3455 /* If the address was already reloaded,
3456 we win as well. */
3457 else if (MEM_P (operand) && address_reloaded[i] == 1)
3458 win = 1;
3459 /* Likewise if the address will be reloaded because
3460 reg_equiv_address is nonzero. For reg_equiv_mem
3461 we have to check. */
3462 else if (REG_P (operand)
3463 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3464 && reg_renumber[REGNO (operand)] < 0
3465 && ((reg_equiv_mem (REGNO (operand)) != 0
3466 && (constraint_satisfied_p
3467 (reg_equiv_mem (REGNO (operand)),
3468 cn)))
3469 || (reg_equiv_address (REGNO (operand))
3470 != 0)))
3471 win = 1;
3472
3473 /* If we didn't already win, we can reload
3474 constants via force_const_mem, and other
3475 MEMs by reloading the address like for 'o'. */
3476 if (CONST_POOL_OK_P (operand_mode[i], operand)
3477 || MEM_P (operand))
3478 badop = 0;
3479 constmemok = 1;
3480 offmemok = 1;
3481 break;
3482
3483 case CT_ADDRESS:
3484 if (constraint_satisfied_p (operand, cn))
3485 win = 1;
3486
3487 /* If we didn't already win, we can reload
3488 the address into a base register. */
3489 this_alternative[i]
3490 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3491 ADDRESS, SCRATCH);
3492 badop = 0;
3493 break;
3494
3495 case CT_FIXED_FORM:
3496 if (constraint_satisfied_p (operand, cn))
3497 win = 1;
3498 break;
3499 }
3500 break;
3501
3502 reg:
3503 this_alternative[i]
3504 = reg_class_subunion[this_alternative[i]][cl];
3505 if (GET_MODE (operand) == BLKmode)
3506 break;
3507 winreg = 1;
3508 if (REG_P (operand)
3509 && reg_fits_class_p (operand, this_alternative[i],
3510 offset, GET_MODE (recog_data.operand[i])))
3511 win = 1;
3512 break;
3513 }
3514 while ((p += len), c);
3515
3516 if (swapped == (commutative >= 0 ? 1 : 0))
3517 constraints[i] = p;
3518
3519 /* If this operand could be handled with a reg,
3520 and some reg is allowed, then this operand can be handled. */
3521 if (winreg && this_alternative[i] != NO_REGS
3522 && (win || !class_only_fixed_regs[this_alternative[i]]))
3523 badop = 0;
3524
3525 /* Record which operands fit this alternative. */
3526 this_alternative_earlyclobber[i] = earlyclobber;
3527 if (win && ! force_reload)
3528 this_alternative_win[i] = 1;
3529 else if (did_match && ! force_reload)
3530 this_alternative_match_win[i] = 1;
3531 else
3532 {
3533 int const_to_mem = 0;
3534
3535 this_alternative_offmemok[i] = offmemok;
3536 losers++;
3537 if (badop)
3538 bad = 1;
3539 /* Alternative loses if it has no regs for a reg operand. */
3540 if (REG_P (operand)
3541 && this_alternative[i] == NO_REGS
3542 && this_alternative_matches[i] < 0)
3543 bad = 1;
3544
3545 /* If this is a constant that is reloaded into the desired
3546 class by copying it to memory first, count that as another
3547 reload. This is consistent with other code and is
3548 required to avoid choosing another alternative when
3549 the constant is moved into memory by this function on
3550 an early reload pass. Note that the test here is
3551 precisely the same as in the code below that calls
3552 force_const_mem. */
3553 if (CONST_POOL_OK_P (operand_mode[i], operand)
3554 && ((targetm.preferred_reload_class (operand,
3555 this_alternative[i])
3556 == NO_REGS)
3557 || no_input_reloads))
3558 {
3559 const_to_mem = 1;
3560 if (this_alternative[i] != NO_REGS)
3561 losers++;
3562 }
3563
3564 /* Alternative loses if it requires a type of reload not
3565 permitted for this insn. We can always reload SCRATCH
3566 and objects with a REG_UNUSED note. */
3567 if (GET_CODE (operand) != SCRATCH
3568 && modified[i] != RELOAD_READ && no_output_reloads
3569 && ! find_reg_note (insn, REG_UNUSED, operand))
3570 bad = 1;
3571 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3572 && ! const_to_mem)
3573 bad = 1;
3574
3575 /* If we can't reload this value at all, reject this
3576 alternative. Note that we could also lose due to
3577 LIMIT_RELOAD_CLASS, but we don't check that
3578 here. */
3579
3580 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3581 {
3582 if (targetm.preferred_reload_class (operand,
3583 this_alternative[i])
3584 == NO_REGS)
3585 reject = 600;
3586
3587 if (operand_type[i] == RELOAD_FOR_OUTPUT
3588 && (targetm.preferred_output_reload_class (operand,
3589 this_alternative[i])
3590 == NO_REGS))
3591 reject = 600;
3592 }
3593
3594 /* We prefer to reload pseudos over reloading other things,
3595 since such reloads may be able to be eliminated later.
3596 If we are reloading a SCRATCH, we won't be generating any
3597 insns, just using a register, so it is also preferred.
3598 So bump REJECT in other cases. Don't do this in the
3599 case where we are forcing a constant into memory and
3600 it will then win since we don't want to have a different
3601 alternative match then. */
3602 if (! (REG_P (operand)
3603 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3604 && GET_CODE (operand) != SCRATCH
3605 && ! (const_to_mem && constmemok))
3606 reject += 2;
3607
3608 /* Input reloads can be inherited more often than output
3609 reloads can be removed, so penalize output reloads. */
3610 if (operand_type[i] != RELOAD_FOR_INPUT
3611 && GET_CODE (operand) != SCRATCH)
3612 reject++;
3613 }
3614
3615 /* If this operand is a pseudo register that didn't get
3616 a hard reg and this alternative accepts some
3617 register, see if the class that we want is a subset
3618 of the preferred class for this register. If not,
3619 but it intersects that class, use the preferred class
3620 instead. If it does not intersect the preferred
3621 class, show that usage of this alternative should be
3622 discouraged; it will be discouraged more still if the
3623 register is `preferred or nothing'. We do this
3624 because it increases the chance of reusing our spill
3625 register in a later insn and avoiding a pair of
3626 memory stores and loads.
3627
3628 Don't bother with this if this alternative will
3629 accept this operand.
3630
3631 Don't do this for a multiword operand, since it is
3632 only a small win and has the risk of requiring more
3633 spill registers, which could cause a large loss.
3634
3635 Don't do this if the preferred class has only one
3636 register because we might otherwise exhaust the
3637 class. */
3638
3639 if (! win && ! did_match
3640 && this_alternative[i] != NO_REGS
3641 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3642 && reg_class_size [(int) preferred_class[i]] > 0
3643 && ! small_register_class_p (preferred_class[i]))
3644 {
3645 if (! reg_class_subset_p (this_alternative[i],
3646 preferred_class[i]))
3647 {
3648 /* Since we don't have a way of forming the intersection,
3649 we just do something special if the preferred class
3650 is a subset of the class we have; that's the most
3651 common case anyway. */
3652 if (reg_class_subset_p (preferred_class[i],
3653 this_alternative[i]))
3654 this_alternative[i] = preferred_class[i];
3655 else
3656 reject += (2 + 2 * pref_or_nothing[i]);
3657 }
3658 }
3659 }
3660
3661 /* Now see if any output operands that are marked "earlyclobber"
3662 in this alternative conflict with any input operands
3663 or any memory addresses. */
3664
3665 for (i = 0; i < noperands; i++)
3666 if (this_alternative_earlyclobber[i]
3667 && (this_alternative_win[i] || this_alternative_match_win[i]))
3668 {
3669 struct decomposition early_data;
3670
3671 early_data = decompose (recog_data.operand[i]);
3672
3673 gcc_assert (modified[i] != RELOAD_READ);
3674
3675 if (this_alternative[i] == NO_REGS)
3676 {
3677 this_alternative_earlyclobber[i] = 0;
3678 gcc_assert (this_insn_is_asm);
3679 error_for_asm (this_insn,
3680 "%<&%> constraint used with no register class");
3681 }
3682
3683 for (j = 0; j < noperands; j++)
3684 /* Is this an input operand or a memory ref? */
3685 if ((MEM_P (recog_data.operand[j])
3686 || modified[j] != RELOAD_WRITE)
3687 && j != i
3688 /* Ignore things like match_operator operands. */
3689 && !recog_data.is_operator[j]
3690 /* Don't count an input operand that is constrained to match
3691 the early clobber operand. */
3692 && ! (this_alternative_matches[j] == i
3693 && rtx_equal_p (recog_data.operand[i],
3694 recog_data.operand[j]))
3695 /* Is it altered by storing the earlyclobber operand? */
3696 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3697 early_data))
3698 {
3699 /* If the output is in a non-empty few-regs class,
3700 it's costly to reload it, so reload the input instead. */
3701 if (small_register_class_p (this_alternative[i])
3702 && (REG_P (recog_data.operand[j])
3703 || GET_CODE (recog_data.operand[j]) == SUBREG))
3704 {
3705 losers++;
3706 this_alternative_win[j] = 0;
3707 this_alternative_match_win[j] = 0;
3708 }
3709 else
3710 break;
3711 }
3712 /* If an earlyclobber operand conflicts with something,
3713 it must be reloaded, so request this and count the cost. */
3714 if (j != noperands)
3715 {
3716 losers++;
3717 this_alternative_win[i] = 0;
3718 this_alternative_match_win[j] = 0;
3719 for (j = 0; j < noperands; j++)
3720 if (this_alternative_matches[j] == i
3721 && this_alternative_match_win[j])
3722 {
3723 this_alternative_win[j] = 0;
3724 this_alternative_match_win[j] = 0;
3725 losers++;
3726 }
3727 }
3728 }
3729
3730 /* If one alternative accepts all the operands, no reload required,
3731 choose that alternative; don't consider the remaining ones. */
3732 if (losers == 0)
3733 {
3734 /* Unswap these so that they are never swapped at `finish'. */
3735 if (swapped)
3736 {
3737 recog_data.operand[commutative] = substed_operand[commutative];
3738 recog_data.operand[commutative + 1]
3739 = substed_operand[commutative + 1];
3740 }
3741 for (i = 0; i < noperands; i++)
3742 {
3743 goal_alternative_win[i] = this_alternative_win[i];
3744 goal_alternative_match_win[i] = this_alternative_match_win[i];
3745 goal_alternative[i] = this_alternative[i];
3746 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3747 goal_alternative_matches[i] = this_alternative_matches[i];
3748 goal_alternative_earlyclobber[i]
3749 = this_alternative_earlyclobber[i];
3750 }
3751 goal_alternative_number = this_alternative_number;
3752 goal_alternative_swapped = swapped;
3753 goal_earlyclobber = this_earlyclobber;
3754 goto finish;
3755 }
3756
3757 /* REJECT, set by the ! and ? constraint characters and when a register
3758 would be reloaded into a non-preferred class, discourages the use of
3759 this alternative for a reload goal. REJECT is incremented by six
3760 for each ? and two for each non-preferred class. */
3761 losers = losers * 6 + reject;
3762
3763 /* If this alternative can be made to work by reloading,
3764 and it needs less reloading than the others checked so far,
3765 record it as the chosen goal for reloading. */
3766 if (! bad)
3767 {
3768 if (best > losers)
3769 {
3770 for (i = 0; i < noperands; i++)
3771 {
3772 goal_alternative[i] = this_alternative[i];
3773 goal_alternative_win[i] = this_alternative_win[i];
3774 goal_alternative_match_win[i]
3775 = this_alternative_match_win[i];
3776 goal_alternative_offmemok[i]
3777 = this_alternative_offmemok[i];
3778 goal_alternative_matches[i] = this_alternative_matches[i];
3779 goal_alternative_earlyclobber[i]
3780 = this_alternative_earlyclobber[i];
3781 }
3782 goal_alternative_swapped = swapped;
3783 best = losers;
3784 goal_alternative_number = this_alternative_number;
3785 goal_earlyclobber = this_earlyclobber;
3786 }
3787 }
3788
3789 if (swapped)
3790 {
3791 /* If the commutative operands have been swapped, swap
3792 them back in order to check the next alternative. */
3793 recog_data.operand[commutative] = substed_operand[commutative];
3794 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3795 /* Unswap the duplicates too. */
3796 for (i = 0; i < recog_data.n_dups; i++)
3797 if (recog_data.dup_num[i] == commutative
3798 || recog_data.dup_num[i] == commutative + 1)
3799 *recog_data.dup_loc[i]
3800 = recog_data.operand[(int) recog_data.dup_num[i]];
3801
3802 /* Unswap the operand related information as well. */
3803 std::swap (preferred_class[commutative],
3804 preferred_class[commutative + 1]);
3805 std::swap (pref_or_nothing[commutative],
3806 pref_or_nothing[commutative + 1]);
3807 std::swap (address_reloaded[commutative],
3808 address_reloaded[commutative + 1]);
3809 }
3810 }
3811 }
3812
3813 /* The operands don't meet the constraints.
3814 goal_alternative describes the alternative
3815 that we could reach by reloading the fewest operands.
3816 Reload so as to fit it. */
3817
3818 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3819 {
3820 /* No alternative works with reloads?? */
3821 if (insn_code_number >= 0)
3822 fatal_insn ("unable to generate reloads for:", insn);
3823 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3824 /* Avoid further trouble with this insn. */
3825 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3826 n_reloads = 0;
3827 return 0;
3828 }
3829
3830 /* Jump to `finish' from above if all operands are valid already.
3831 In that case, goal_alternative_win is all 1. */
3832 finish:
3833
3834 /* Right now, for any pair of operands I and J that are required to match,
3835 with I < J,
3836 goal_alternative_matches[J] is I.
3837 Set up goal_alternative_matched as the inverse function:
3838 goal_alternative_matched[I] = J. */
3839
3840 for (i = 0; i < noperands; i++)
3841 goal_alternative_matched[i] = -1;
3842
3843 for (i = 0; i < noperands; i++)
3844 if (! goal_alternative_win[i]
3845 && goal_alternative_matches[i] >= 0)
3846 goal_alternative_matched[goal_alternative_matches[i]] = i;
3847
3848 for (i = 0; i < noperands; i++)
3849 goal_alternative_win[i] |= goal_alternative_match_win[i];
3850
3851 /* If the best alternative is with operands 1 and 2 swapped,
3852 consider them swapped before reporting the reloads. Update the
3853 operand numbers of any reloads already pushed. */
3854
3855 if (goal_alternative_swapped)
3856 {
3857 std::swap (substed_operand[commutative],
3858 substed_operand[commutative + 1]);
3859 std::swap (recog_data.operand[commutative],
3860 recog_data.operand[commutative + 1]);
3861 std::swap (*recog_data.operand_loc[commutative],
3862 *recog_data.operand_loc[commutative + 1]);
3863
3864 for (i = 0; i < recog_data.n_dups; i++)
3865 if (recog_data.dup_num[i] == commutative
3866 || recog_data.dup_num[i] == commutative + 1)
3867 *recog_data.dup_loc[i]
3868 = recog_data.operand[(int) recog_data.dup_num[i]];
3869
3870 for (i = 0; i < n_reloads; i++)
3871 {
3872 if (rld[i].opnum == commutative)
3873 rld[i].opnum = commutative + 1;
3874 else if (rld[i].opnum == commutative + 1)
3875 rld[i].opnum = commutative;
3876 }
3877 }
3878
3879 for (i = 0; i < noperands; i++)
3880 {
3881 operand_reloadnum[i] = -1;
3882
3883 /* If this is an earlyclobber operand, we need to widen the scope.
3884 The reload must remain valid from the start of the insn being
3885 reloaded until after the operand is stored into its destination.
3886 We approximate this with RELOAD_OTHER even though we know that we
3887 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3888
3889 One special case that is worth checking is when we have an
3890 output that is earlyclobber but isn't used past the insn (typically
3891 a SCRATCH). In this case, we only need have the reload live
3892 through the insn itself, but not for any of our input or output
3893 reloads.
3894 But we must not accidentally narrow the scope of an existing
3895 RELOAD_OTHER reload - leave these alone.
3896
3897 In any case, anything needed to address this operand can remain
3898 however they were previously categorized. */
3899
3900 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3901 operand_type[i]
3902 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3903 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3904 }
3905
3906 /* Any constants that aren't allowed and can't be reloaded
3907 into registers are here changed into memory references. */
3908 for (i = 0; i < noperands; i++)
3909 if (! goal_alternative_win[i])
3910 {
3911 rtx op = recog_data.operand[i];
3912 rtx subreg = NULL_RTX;
3913 rtx plus = NULL_RTX;
3914 machine_mode mode = operand_mode[i];
3915
3916 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3917 push_reload so we have to let them pass here. */
3918 if (GET_CODE (op) == SUBREG)
3919 {
3920 subreg = op;
3921 op = SUBREG_REG (op);
3922 mode = GET_MODE (op);
3923 }
3924
3925 if (GET_CODE (op) == PLUS)
3926 {
3927 plus = op;
3928 op = XEXP (op, 1);
3929 }
3930
3931 if (CONST_POOL_OK_P (mode, op)
3932 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3933 == NO_REGS)
3934 || no_input_reloads))
3935 {
3936 int this_address_reloaded;
3937 rtx tem = force_const_mem (mode, op);
3938
3939 /* If we stripped a SUBREG or a PLUS above add it back. */
3940 if (plus != NULL_RTX)
3941 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3942
3943 if (subreg != NULL_RTX)
3944 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3945
3946 this_address_reloaded = 0;
3947 substed_operand[i] = recog_data.operand[i]
3948 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3949 0, insn, &this_address_reloaded);
3950
3951 /* If the alternative accepts constant pool refs directly
3952 there will be no reload needed at all. */
3953 if (plus == NULL_RTX
3954 && subreg == NULL_RTX
3955 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3956 ? substed_operand[i]
3957 : NULL,
3958 recog_data.constraints[i],
3959 goal_alternative_number))
3960 goal_alternative_win[i] = 1;
3961 }
3962 }
3963
3964 /* Record the values of the earlyclobber operands for the caller. */
3965 if (goal_earlyclobber)
3966 for (i = 0; i < noperands; i++)
3967 if (goal_alternative_earlyclobber[i])
3968 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3969
3970 /* Now record reloads for all the operands that need them. */
3971 for (i = 0; i < noperands; i++)
3972 if (! goal_alternative_win[i])
3973 {
3974 /* Operands that match previous ones have already been handled. */
3975 if (goal_alternative_matches[i] >= 0)
3976 ;
3977 /* Handle an operand with a nonoffsettable address
3978 appearing where an offsettable address will do
3979 by reloading the address into a base register.
3980
3981 ??? We can also do this when the operand is a register and
3982 reg_equiv_mem is not offsettable, but this is a bit tricky,
3983 so we don't bother with it. It may not be worth doing. */
3984 else if (goal_alternative_matched[i] == -1
3985 && goal_alternative_offmemok[i]
3986 && MEM_P (recog_data.operand[i]))
3987 {
3988 /* If the address to be reloaded is a VOIDmode constant,
3989 use the default address mode as mode of the reload register,
3990 as would have been done by find_reloads_address. */
3991 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3992 machine_mode address_mode;
3993
3994 address_mode = get_address_mode (recog_data.operand[i]);
3995 operand_reloadnum[i]
3996 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3997 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3998 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3999 address_mode,
4000 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
4001 rld[operand_reloadnum[i]].inc
4002 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4003
4004 /* If this operand is an output, we will have made any
4005 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4006 now we are treating part of the operand as an input, so
4007 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
4008
4009 if (modified[i] == RELOAD_WRITE)
4010 {
4011 for (j = 0; j < n_reloads; j++)
4012 {
4013 if (rld[j].opnum == i)
4014 {
4015 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4016 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4017 else if (rld[j].when_needed
4018 == RELOAD_FOR_OUTADDR_ADDRESS)
4019 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4020 }
4021 }
4022 }
4023 }
4024 else if (goal_alternative_matched[i] == -1)
4025 {
4026 operand_reloadnum[i]
4027 = push_reload ((modified[i] != RELOAD_WRITE
4028 ? recog_data.operand[i] : 0),
4029 (modified[i] != RELOAD_READ
4030 ? recog_data.operand[i] : 0),
4031 (modified[i] != RELOAD_WRITE
4032 ? recog_data.operand_loc[i] : 0),
4033 (modified[i] != RELOAD_READ
4034 ? recog_data.operand_loc[i] : 0),
4035 (enum reg_class) goal_alternative[i],
4036 (modified[i] == RELOAD_WRITE
4037 ? VOIDmode : operand_mode[i]),
4038 (modified[i] == RELOAD_READ
4039 ? VOIDmode : operand_mode[i]),
4040 (insn_code_number < 0 ? 0
4041 : insn_data[insn_code_number].operand[i].strict_low),
4042 0, i, operand_type[i]);
4043 }
4044 /* In a matching pair of operands, one must be input only
4045 and the other must be output only.
4046 Pass the input operand as IN and the other as OUT. */
4047 else if (modified[i] == RELOAD_READ
4048 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4049 {
4050 operand_reloadnum[i]
4051 = push_reload (recog_data.operand[i],
4052 recog_data.operand[goal_alternative_matched[i]],
4053 recog_data.operand_loc[i],
4054 recog_data.operand_loc[goal_alternative_matched[i]],
4055 (enum reg_class) goal_alternative[i],
4056 operand_mode[i],
4057 operand_mode[goal_alternative_matched[i]],
4058 0, 0, i, RELOAD_OTHER);
4059 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4060 }
4061 else if (modified[i] == RELOAD_WRITE
4062 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4063 {
4064 operand_reloadnum[goal_alternative_matched[i]]
4065 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4066 recog_data.operand[i],
4067 recog_data.operand_loc[goal_alternative_matched[i]],
4068 recog_data.operand_loc[i],
4069 (enum reg_class) goal_alternative[i],
4070 operand_mode[goal_alternative_matched[i]],
4071 operand_mode[i],
4072 0, 0, i, RELOAD_OTHER);
4073 operand_reloadnum[i] = output_reloadnum;
4074 }
4075 else
4076 {
4077 gcc_assert (insn_code_number < 0);
4078 error_for_asm (insn, "inconsistent operand constraints "
4079 "in an %<asm%>");
4080 /* Avoid further trouble with this insn. */
4081 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4082 n_reloads = 0;
4083 return 0;
4084 }
4085 }
4086 else if (goal_alternative_matched[i] < 0
4087 && goal_alternative_matches[i] < 0
4088 && address_operand_reloaded[i] != 1
4089 && optimize)
4090 {
4091 /* For each non-matching operand that's a MEM or a pseudo-register
4092 that didn't get a hard register, make an optional reload.
4093 This may get done even if the insn needs no reloads otherwise. */
4094
4095 rtx operand = recog_data.operand[i];
4096
4097 while (GET_CODE (operand) == SUBREG)
4098 operand = SUBREG_REG (operand);
4099 if ((MEM_P (operand)
4100 || (REG_P (operand)
4101 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4102 /* If this is only for an output, the optional reload would not
4103 actually cause us to use a register now, just note that
4104 something is stored here. */
4105 && (goal_alternative[i] != NO_REGS
4106 || modified[i] == RELOAD_WRITE)
4107 && ! no_input_reloads
4108 /* An optional output reload might allow to delete INSN later.
4109 We mustn't make in-out reloads on insns that are not permitted
4110 output reloads.
4111 If this is an asm, we can't delete it; we must not even call
4112 push_reload for an optional output reload in this case,
4113 because we can't be sure that the constraint allows a register,
4114 and push_reload verifies the constraints for asms. */
4115 && (modified[i] == RELOAD_READ
4116 || (! no_output_reloads && ! this_insn_is_asm)))
4117 operand_reloadnum[i]
4118 = push_reload ((modified[i] != RELOAD_WRITE
4119 ? recog_data.operand[i] : 0),
4120 (modified[i] != RELOAD_READ
4121 ? recog_data.operand[i] : 0),
4122 (modified[i] != RELOAD_WRITE
4123 ? recog_data.operand_loc[i] : 0),
4124 (modified[i] != RELOAD_READ
4125 ? recog_data.operand_loc[i] : 0),
4126 (enum reg_class) goal_alternative[i],
4127 (modified[i] == RELOAD_WRITE
4128 ? VOIDmode : operand_mode[i]),
4129 (modified[i] == RELOAD_READ
4130 ? VOIDmode : operand_mode[i]),
4131 (insn_code_number < 0 ? 0
4132 : insn_data[insn_code_number].operand[i].strict_low),
4133 1, i, operand_type[i]);
4134 /* If a memory reference remains (either as a MEM or a pseudo that
4135 did not get a hard register), yet we can't make an optional
4136 reload, check if this is actually a pseudo register reference;
4137 we then need to emit a USE and/or a CLOBBER so that reload
4138 inheritance will do the right thing. */
4139 else if (replace
4140 && (MEM_P (operand)
4141 || (REG_P (operand)
4142 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4143 && reg_renumber [REGNO (operand)] < 0)))
4144 {
4145 operand = *recog_data.operand_loc[i];
4146
4147 while (GET_CODE (operand) == SUBREG)
4148 operand = SUBREG_REG (operand);
4149 if (REG_P (operand))
4150 {
4151 if (modified[i] != RELOAD_WRITE)
4152 /* We mark the USE with QImode so that we recognize
4153 it as one that can be safely deleted at the end
4154 of reload. */
4155 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4156 insn), QImode);
4157 if (modified[i] != RELOAD_READ)
4158 emit_insn_after (gen_clobber (operand), insn);
4159 }
4160 }
4161 }
4162 else if (goal_alternative_matches[i] >= 0
4163 && goal_alternative_win[goal_alternative_matches[i]]
4164 && modified[i] == RELOAD_READ
4165 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4166 && ! no_input_reloads && ! no_output_reloads
4167 && optimize)
4168 {
4169 /* Similarly, make an optional reload for a pair of matching
4170 objects that are in MEM or a pseudo that didn't get a hard reg. */
4171
4172 rtx operand = recog_data.operand[i];
4173
4174 while (GET_CODE (operand) == SUBREG)
4175 operand = SUBREG_REG (operand);
4176 if ((MEM_P (operand)
4177 || (REG_P (operand)
4178 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4179 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4180 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4181 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4182 recog_data.operand[i],
4183 recog_data.operand_loc[goal_alternative_matches[i]],
4184 recog_data.operand_loc[i],
4185 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4186 operand_mode[goal_alternative_matches[i]],
4187 operand_mode[i],
4188 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4189 }
4190
4191 /* Perform whatever substitutions on the operands we are supposed
4192 to make due to commutativity or replacement of registers
4193 with equivalent constants or memory slots. */
4194
4195 for (i = 0; i < noperands; i++)
4196 {
4197 /* We only do this on the last pass through reload, because it is
4198 possible for some data (like reg_equiv_address) to be changed during
4199 later passes. Moreover, we lose the opportunity to get a useful
4200 reload_{in,out}_reg when we do these replacements. */
4201
4202 if (replace)
4203 {
4204 rtx substitution = substed_operand[i];
4205
4206 *recog_data.operand_loc[i] = substitution;
4207
4208 /* If we're replacing an operand with a LABEL_REF, we need to
4209 make sure that there's a REG_LABEL_OPERAND note attached to
4210 this instruction. */
4211 if (GET_CODE (substitution) == LABEL_REF
4212 && !find_reg_note (insn, REG_LABEL_OPERAND,
4213 LABEL_REF_LABEL (substitution))
4214 /* For a JUMP_P, if it was a branch target it must have
4215 already been recorded as such. */
4216 && (!JUMP_P (insn)
4217 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4218 insn)))
4219 {
4220 add_reg_note (insn, REG_LABEL_OPERAND,
4221 LABEL_REF_LABEL (substitution));
4222 if (LABEL_P (LABEL_REF_LABEL (substitution)))
4223 ++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4224 }
4225
4226 }
4227 else
4228 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4229 }
4230
4231 /* If this insn pattern contains any MATCH_DUP's, make sure that
4232 they will be substituted if the operands they match are substituted.
4233 Also do now any substitutions we already did on the operands.
4234
4235 Don't do this if we aren't making replacements because we might be
4236 propagating things allocated by frame pointer elimination into places
4237 it doesn't expect. */
4238
4239 if (insn_code_number >= 0 && replace)
4240 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4241 {
4242 int opno = recog_data.dup_num[i];
4243 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4244 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4245 }
4246
4247 #if 0
4248 /* This loses because reloading of prior insns can invalidate the equivalence
4249 (or at least find_equiv_reg isn't smart enough to find it any more),
4250 causing this insn to need more reload regs than it needed before.
4251 It may be too late to make the reload regs available.
4252 Now this optimization is done safely in choose_reload_regs. */
4253
4254 /* For each reload of a reg into some other class of reg,
4255 search for an existing equivalent reg (same value now) in the right class.
4256 We can use it as long as we don't need to change its contents. */
4257 for (i = 0; i < n_reloads; i++)
4258 if (rld[i].reg_rtx == 0
4259 && rld[i].in != 0
4260 && REG_P (rld[i].in)
4261 && rld[i].out == 0)
4262 {
4263 rld[i].reg_rtx
4264 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4265 static_reload_reg_p, 0, rld[i].inmode);
4266 /* Prevent generation of insn to load the value
4267 because the one we found already has the value. */
4268 if (rld[i].reg_rtx)
4269 rld[i].in = rld[i].reg_rtx;
4270 }
4271 #endif
4272
4273 /* If we detected error and replaced asm instruction by USE, forget about the
4274 reloads. */
4275 if (GET_CODE (PATTERN (insn)) == USE
4276 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4277 n_reloads = 0;
4278
4279 /* Perhaps an output reload can be combined with another
4280 to reduce needs by one. */
4281 if (!goal_earlyclobber)
4282 combine_reloads ();
4283
4284 /* If we have a pair of reloads for parts of an address, they are reloading
4285 the same object, the operands themselves were not reloaded, and they
4286 are for two operands that are supposed to match, merge the reloads and
4287 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4288
4289 for (i = 0; i < n_reloads; i++)
4290 {
4291 int k;
4292
4293 for (j = i + 1; j < n_reloads; j++)
4294 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4295 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4296 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4297 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4298 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4299 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4300 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4301 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4302 && rtx_equal_p (rld[i].in, rld[j].in)
4303 && (operand_reloadnum[rld[i].opnum] < 0
4304 || rld[operand_reloadnum[rld[i].opnum]].optional)
4305 && (operand_reloadnum[rld[j].opnum] < 0
4306 || rld[operand_reloadnum[rld[j].opnum]].optional)
4307 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4308 || (goal_alternative_matches[rld[j].opnum]
4309 == rld[i].opnum)))
4310 {
4311 for (k = 0; k < n_replacements; k++)
4312 if (replacements[k].what == j)
4313 replacements[k].what = i;
4314
4315 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4316 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4317 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4318 else
4319 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4320 rld[j].in = 0;
4321 }
4322 }
4323
4324 /* Scan all the reloads and update their type.
4325 If a reload is for the address of an operand and we didn't reload
4326 that operand, change the type. Similarly, change the operand number
4327 of a reload when two operands match. If a reload is optional, treat it
4328 as though the operand isn't reloaded.
4329
4330 ??? This latter case is somewhat odd because if we do the optional
4331 reload, it means the object is hanging around. Thus we need only
4332 do the address reload if the optional reload was NOT done.
4333
4334 Change secondary reloads to be the address type of their operand, not
4335 the normal type.
4336
4337 If an operand's reload is now RELOAD_OTHER, change any
4338 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4339 RELOAD_FOR_OTHER_ADDRESS. */
4340
4341 for (i = 0; i < n_reloads; i++)
4342 {
4343 if (rld[i].secondary_p
4344 && rld[i].when_needed == operand_type[rld[i].opnum])
4345 rld[i].when_needed = address_type[rld[i].opnum];
4346
4347 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4348 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4349 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4350 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4351 && (operand_reloadnum[rld[i].opnum] < 0
4352 || rld[operand_reloadnum[rld[i].opnum]].optional))
4353 {
4354 /* If we have a secondary reload to go along with this reload,
4355 change its type to RELOAD_FOR_OPADDR_ADDR. */
4356
4357 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4358 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4359 && rld[i].secondary_in_reload != -1)
4360 {
4361 int secondary_in_reload = rld[i].secondary_in_reload;
4362
4363 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4364
4365 /* If there's a tertiary reload we have to change it also. */
4366 if (secondary_in_reload > 0
4367 && rld[secondary_in_reload].secondary_in_reload != -1)
4368 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4369 = RELOAD_FOR_OPADDR_ADDR;
4370 }
4371
4372 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4373 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4374 && rld[i].secondary_out_reload != -1)
4375 {
4376 int secondary_out_reload = rld[i].secondary_out_reload;
4377
4378 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4379
4380 /* If there's a tertiary reload we have to change it also. */
4381 if (secondary_out_reload
4382 && rld[secondary_out_reload].secondary_out_reload != -1)
4383 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4384 = RELOAD_FOR_OPADDR_ADDR;
4385 }
4386
4387 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4388 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4389 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4390 else
4391 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4392 }
4393
4394 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4395 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4396 && operand_reloadnum[rld[i].opnum] >= 0
4397 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4398 == RELOAD_OTHER))
4399 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4400
4401 if (goal_alternative_matches[rld[i].opnum] >= 0)
4402 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4403 }
4404
4405 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4406 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4407 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4408
4409 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4410 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4411 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4412 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4413 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4414 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4415 This is complicated by the fact that a single operand can have more
4416 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4417 choose_reload_regs without affecting code quality, and cases that
4418 actually fail are extremely rare, so it turns out to be better to fix
4419 the problem here by not generating cases that choose_reload_regs will
4420 fail for. */
4421 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4422 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4423 a single operand.
4424 We can reduce the register pressure by exploiting that a
4425 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4426 does not conflict with any of them, if it is only used for the first of
4427 the RELOAD_FOR_X_ADDRESS reloads. */
4428 {
4429 int first_op_addr_num = -2;
4430 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4431 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4432 int need_change = 0;
4433 /* We use last_op_addr_reload and the contents of the above arrays
4434 first as flags - -2 means no instance encountered, -1 means exactly
4435 one instance encountered.
4436 If more than one instance has been encountered, we store the reload
4437 number of the first reload of the kind in question; reload numbers
4438 are known to be non-negative. */
4439 for (i = 0; i < noperands; i++)
4440 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4441 for (i = n_reloads - 1; i >= 0; i--)
4442 {
4443 switch (rld[i].when_needed)
4444 {
4445 case RELOAD_FOR_OPERAND_ADDRESS:
4446 if (++first_op_addr_num >= 0)
4447 {
4448 first_op_addr_num = i;
4449 need_change = 1;
4450 }
4451 break;
4452 case RELOAD_FOR_INPUT_ADDRESS:
4453 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4454 {
4455 first_inpaddr_num[rld[i].opnum] = i;
4456 need_change = 1;
4457 }
4458 break;
4459 case RELOAD_FOR_OUTPUT_ADDRESS:
4460 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4461 {
4462 first_outpaddr_num[rld[i].opnum] = i;
4463 need_change = 1;
4464 }
4465 break;
4466 default:
4467 break;
4468 }
4469 }
4470
4471 if (need_change)
4472 {
4473 for (i = 0; i < n_reloads; i++)
4474 {
4475 int first_num;
4476 enum reload_type type;
4477
4478 switch (rld[i].when_needed)
4479 {
4480 case RELOAD_FOR_OPADDR_ADDR:
4481 first_num = first_op_addr_num;
4482 type = RELOAD_FOR_OPERAND_ADDRESS;
4483 break;
4484 case RELOAD_FOR_INPADDR_ADDRESS:
4485 first_num = first_inpaddr_num[rld[i].opnum];
4486 type = RELOAD_FOR_INPUT_ADDRESS;
4487 break;
4488 case RELOAD_FOR_OUTADDR_ADDRESS:
4489 first_num = first_outpaddr_num[rld[i].opnum];
4490 type = RELOAD_FOR_OUTPUT_ADDRESS;
4491 break;
4492 default:
4493 continue;
4494 }
4495 if (first_num < 0)
4496 continue;
4497 else if (i > first_num)
4498 rld[i].when_needed = type;
4499 else
4500 {
4501 /* Check if the only TYPE reload that uses reload I is
4502 reload FIRST_NUM. */
4503 for (j = n_reloads - 1; j > first_num; j--)
4504 {
4505 if (rld[j].when_needed == type
4506 && (rld[i].secondary_p
4507 ? rld[j].secondary_in_reload == i
4508 : reg_mentioned_p (rld[i].in, rld[j].in)))
4509 {
4510 rld[i].when_needed = type;
4511 break;
4512 }
4513 }
4514 }
4515 }
4516 }
4517 }
4518
4519 /* See if we have any reloads that are now allowed to be merged
4520 because we've changed when the reload is needed to
4521 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4522 check for the most common cases. */
4523
4524 for (i = 0; i < n_reloads; i++)
4525 if (rld[i].in != 0 && rld[i].out == 0
4526 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4527 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4528 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4529 for (j = 0; j < n_reloads; j++)
4530 if (i != j && rld[j].in != 0 && rld[j].out == 0
4531 && rld[j].when_needed == rld[i].when_needed
4532 && MATCHES (rld[i].in, rld[j].in)
4533 && rld[i].rclass == rld[j].rclass
4534 && !rld[i].nocombine && !rld[j].nocombine
4535 && rld[i].reg_rtx == rld[j].reg_rtx)
4536 {
4537 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4538 transfer_replacements (i, j);
4539 rld[j].in = 0;
4540 }
4541
4542 /* If we made any reloads for addresses, see if they violate a
4543 "no input reloads" requirement for this insn. But loads that we
4544 do after the insn (such as for output addresses) are fine. */
4545 if (HAVE_cc0 && no_input_reloads)
4546 for (i = 0; i < n_reloads; i++)
4547 gcc_assert (rld[i].in == 0
4548 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4549 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4550
4551 /* Compute reload_mode and reload_nregs. */
4552 for (i = 0; i < n_reloads; i++)
4553 {
4554 rld[i].mode
4555 = (rld[i].inmode == VOIDmode
4556 || (GET_MODE_SIZE (rld[i].outmode)
4557 > GET_MODE_SIZE (rld[i].inmode)))
4558 ? rld[i].outmode : rld[i].inmode;
4559
4560 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4561 }
4562
4563 /* Special case a simple move with an input reload and a
4564 destination of a hard reg, if the hard reg is ok, use it. */
4565 for (i = 0; i < n_reloads; i++)
4566 if (rld[i].when_needed == RELOAD_FOR_INPUT
4567 && GET_CODE (PATTERN (insn)) == SET
4568 && REG_P (SET_DEST (PATTERN (insn)))
4569 && (SET_SRC (PATTERN (insn)) == rld[i].in
4570 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4571 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4572 {
4573 rtx dest = SET_DEST (PATTERN (insn));
4574 unsigned int regno = REGNO (dest);
4575
4576 if (regno < FIRST_PSEUDO_REGISTER
4577 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4578 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4579 {
4580 int nr = hard_regno_nregs[regno][rld[i].mode];
4581 int ok = 1, nri;
4582
4583 for (nri = 1; nri < nr; nri ++)
4584 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4585 {
4586 ok = 0;
4587 break;
4588 }
4589
4590 if (ok)
4591 rld[i].reg_rtx = dest;
4592 }
4593 }
4594
4595 return retval;
4596 }
4597
4598 /* Return true if alternative number ALTNUM in constraint-string
4599 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4600 MEM gives the reference if it didn't need any reloads, otherwise it
4601 is null. */
4602
4603 static bool
4604 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4605 const char *constraint, int altnum)
4606 {
4607 int c;
4608
4609 /* Skip alternatives before the one requested. */
4610 while (altnum > 0)
4611 {
4612 while (*constraint++ != ',')
4613 ;
4614 altnum--;
4615 }
4616 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4617 If one of them is present, this alternative accepts the result of
4618 passing a constant-pool reference through find_reloads_toplev.
4619
4620 The same is true of extra memory constraints if the address
4621 was reloaded into a register. However, the target may elect
4622 to disallow the original constant address, forcing it to be
4623 reloaded into a register instead. */
4624 for (; (c = *constraint) && c != ',' && c != '#';
4625 constraint += CONSTRAINT_LEN (c, constraint))
4626 {
4627 enum constraint_num cn = lookup_constraint (constraint);
4628 if (insn_extra_memory_constraint (cn)
4629 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4630 return true;
4631 }
4632 return false;
4633 }
4634 \f
4635 /* Scan X for memory references and scan the addresses for reloading.
4636 Also checks for references to "constant" regs that we want to eliminate
4637 and replaces them with the values they stand for.
4638 We may alter X destructively if it contains a reference to such.
4639 If X is just a constant reg, we return the equivalent value
4640 instead of X.
4641
4642 IND_LEVELS says how many levels of indirect addressing this machine
4643 supports.
4644
4645 OPNUM and TYPE identify the purpose of the reload.
4646
4647 IS_SET_DEST is true if X is the destination of a SET, which is not
4648 appropriate to be replaced by a constant.
4649
4650 INSN, if nonzero, is the insn in which we do the reload. It is used
4651 to determine if we may generate output reloads, and where to put USEs
4652 for pseudos that we have to replace with stack slots.
4653
4654 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4655 result of find_reloads_address. */
4656
4657 static rtx
4658 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4659 int ind_levels, int is_set_dest, rtx_insn *insn,
4660 int *address_reloaded)
4661 {
4662 RTX_CODE code = GET_CODE (x);
4663
4664 const char *fmt = GET_RTX_FORMAT (code);
4665 int i;
4666 int copied;
4667
4668 if (code == REG)
4669 {
4670 /* This code is duplicated for speed in find_reloads. */
4671 int regno = REGNO (x);
4672 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4673 x = reg_equiv_constant (regno);
4674 #if 0
4675 /* This creates (subreg (mem...)) which would cause an unnecessary
4676 reload of the mem. */
4677 else if (reg_equiv_mem (regno) != 0)
4678 x = reg_equiv_mem (regno);
4679 #endif
4680 else if (reg_equiv_memory_loc (regno)
4681 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4682 {
4683 rtx mem = make_memloc (x, regno);
4684 if (reg_equiv_address (regno)
4685 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4686 {
4687 /* If this is not a toplevel operand, find_reloads doesn't see
4688 this substitution. We have to emit a USE of the pseudo so
4689 that delete_output_reload can see it. */
4690 if (replace_reloads && recog_data.operand[opnum] != x)
4691 /* We mark the USE with QImode so that we recognize it
4692 as one that can be safely deleted at the end of
4693 reload. */
4694 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4695 QImode);
4696 x = mem;
4697 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4698 opnum, type, ind_levels, insn);
4699 if (!rtx_equal_p (x, mem))
4700 push_reg_equiv_alt_mem (regno, x);
4701 if (address_reloaded)
4702 *address_reloaded = i;
4703 }
4704 }
4705 return x;
4706 }
4707 if (code == MEM)
4708 {
4709 rtx tem = x;
4710
4711 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4712 opnum, type, ind_levels, insn);
4713 if (address_reloaded)
4714 *address_reloaded = i;
4715
4716 return tem;
4717 }
4718
4719 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4720 {
4721 /* Check for SUBREG containing a REG that's equivalent to a
4722 constant. If the constant has a known value, truncate it
4723 right now. Similarly if we are extracting a single-word of a
4724 multi-word constant. If the constant is symbolic, allow it
4725 to be substituted normally. push_reload will strip the
4726 subreg later. The constant must not be VOIDmode, because we
4727 will lose the mode of the register (this should never happen
4728 because one of the cases above should handle it). */
4729
4730 int regno = REGNO (SUBREG_REG (x));
4731 rtx tem;
4732
4733 if (regno >= FIRST_PSEUDO_REGISTER
4734 && reg_renumber[regno] < 0
4735 && reg_equiv_constant (regno) != 0)
4736 {
4737 tem =
4738 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4739 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4740 gcc_assert (tem);
4741 if (CONSTANT_P (tem)
4742 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4743 {
4744 tem = force_const_mem (GET_MODE (x), tem);
4745 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4746 &XEXP (tem, 0), opnum, type,
4747 ind_levels, insn);
4748 if (address_reloaded)
4749 *address_reloaded = i;
4750 }
4751 return tem;
4752 }
4753
4754 /* If the subreg contains a reg that will be converted to a mem,
4755 attempt to convert the whole subreg to a (narrower or wider)
4756 memory reference instead. If this succeeds, we're done --
4757 otherwise fall through to check whether the inner reg still
4758 needs address reloads anyway. */
4759
4760 if (regno >= FIRST_PSEUDO_REGISTER
4761 && reg_equiv_memory_loc (regno) != 0)
4762 {
4763 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4764 insn, address_reloaded);
4765 if (tem)
4766 return tem;
4767 }
4768 }
4769
4770 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4771 {
4772 if (fmt[i] == 'e')
4773 {
4774 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4775 ind_levels, is_set_dest, insn,
4776 address_reloaded);
4777 /* If we have replaced a reg with it's equivalent memory loc -
4778 that can still be handled here e.g. if it's in a paradoxical
4779 subreg - we must make the change in a copy, rather than using
4780 a destructive change. This way, find_reloads can still elect
4781 not to do the change. */
4782 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4783 {
4784 x = shallow_copy_rtx (x);
4785 copied = 1;
4786 }
4787 XEXP (x, i) = new_part;
4788 }
4789 }
4790 return x;
4791 }
4792
4793 /* Return a mem ref for the memory equivalent of reg REGNO.
4794 This mem ref is not shared with anything. */
4795
4796 static rtx
4797 make_memloc (rtx ad, int regno)
4798 {
4799 /* We must rerun eliminate_regs, in case the elimination
4800 offsets have changed. */
4801 rtx tem
4802 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4803 0);
4804
4805 /* If TEM might contain a pseudo, we must copy it to avoid
4806 modifying it when we do the substitution for the reload. */
4807 if (rtx_varies_p (tem, 0))
4808 tem = copy_rtx (tem);
4809
4810 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4811 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4812
4813 /* Copy the result if it's still the same as the equivalence, to avoid
4814 modifying it when we do the substitution for the reload. */
4815 if (tem == reg_equiv_memory_loc (regno))
4816 tem = copy_rtx (tem);
4817 return tem;
4818 }
4819
4820 /* Returns true if AD could be turned into a valid memory reference
4821 to mode MODE in address space AS by reloading the part pointed to
4822 by PART into a register. */
4823
4824 static int
4825 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4826 addr_space_t as, rtx *part)
4827 {
4828 int retv;
4829 rtx tem = *part;
4830 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4831
4832 *part = reg;
4833 retv = memory_address_addr_space_p (mode, ad, as);
4834 *part = tem;
4835
4836 return retv;
4837 }
4838
4839 /* Record all reloads needed for handling memory address AD
4840 which appears in *LOC in a memory reference to mode MODE
4841 which itself is found in location *MEMREFLOC.
4842 Note that we take shortcuts assuming that no multi-reg machine mode
4843 occurs as part of an address.
4844
4845 OPNUM and TYPE specify the purpose of this reload.
4846
4847 IND_LEVELS says how many levels of indirect addressing this machine
4848 supports.
4849
4850 INSN, if nonzero, is the insn in which we do the reload. It is used
4851 to determine if we may generate output reloads, and where to put USEs
4852 for pseudos that we have to replace with stack slots.
4853
4854 Value is one if this address is reloaded or replaced as a whole; it is
4855 zero if the top level of this address was not reloaded or replaced, and
4856 it is -1 if it may or may not have been reloaded or replaced.
4857
4858 Note that there is no verification that the address will be valid after
4859 this routine does its work. Instead, we rely on the fact that the address
4860 was valid when reload started. So we need only undo things that reload
4861 could have broken. These are wrong register types, pseudos not allocated
4862 to a hard register, and frame pointer elimination. */
4863
4864 static int
4865 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4866 rtx *loc, int opnum, enum reload_type type,
4867 int ind_levels, rtx_insn *insn)
4868 {
4869 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4870 : ADDR_SPACE_GENERIC;
4871 int regno;
4872 int removed_and = 0;
4873 int op_index;
4874 rtx tem;
4875
4876 /* If the address is a register, see if it is a legitimate address and
4877 reload if not. We first handle the cases where we need not reload
4878 or where we must reload in a non-standard way. */
4879
4880 if (REG_P (ad))
4881 {
4882 regno = REGNO (ad);
4883
4884 if (reg_equiv_constant (regno) != 0)
4885 {
4886 find_reloads_address_part (reg_equiv_constant (regno), loc,
4887 base_reg_class (mode, as, MEM, SCRATCH),
4888 GET_MODE (ad), opnum, type, ind_levels);
4889 return 1;
4890 }
4891
4892 tem = reg_equiv_memory_loc (regno);
4893 if (tem != 0)
4894 {
4895 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4896 {
4897 tem = make_memloc (ad, regno);
4898 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4899 XEXP (tem, 0),
4900 MEM_ADDR_SPACE (tem)))
4901 {
4902 rtx orig = tem;
4903
4904 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4905 &XEXP (tem, 0), opnum,
4906 ADDR_TYPE (type), ind_levels, insn);
4907 if (!rtx_equal_p (tem, orig))
4908 push_reg_equiv_alt_mem (regno, tem);
4909 }
4910 /* We can avoid a reload if the register's equivalent memory
4911 expression is valid as an indirect memory address.
4912 But not all addresses are valid in a mem used as an indirect
4913 address: only reg or reg+constant. */
4914
4915 if (ind_levels > 0
4916 && strict_memory_address_addr_space_p (mode, tem, as)
4917 && (REG_P (XEXP (tem, 0))
4918 || (GET_CODE (XEXP (tem, 0)) == PLUS
4919 && REG_P (XEXP (XEXP (tem, 0), 0))
4920 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4921 {
4922 /* TEM is not the same as what we'll be replacing the
4923 pseudo with after reload, put a USE in front of INSN
4924 in the final reload pass. */
4925 if (replace_reloads
4926 && num_not_at_initial_offset
4927 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4928 {
4929 *loc = tem;
4930 /* We mark the USE with QImode so that we
4931 recognize it as one that can be safely
4932 deleted at the end of reload. */
4933 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4934 insn), QImode);
4935
4936 /* This doesn't really count as replacing the address
4937 as a whole, since it is still a memory access. */
4938 }
4939 return 0;
4940 }
4941 ad = tem;
4942 }
4943 }
4944
4945 /* The only remaining case where we can avoid a reload is if this is a
4946 hard register that is valid as a base register and which is not the
4947 subject of a CLOBBER in this insn. */
4948
4949 else if (regno < FIRST_PSEUDO_REGISTER
4950 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4951 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4952 return 0;
4953
4954 /* If we do not have one of the cases above, we must do the reload. */
4955 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4956 base_reg_class (mode, as, MEM, SCRATCH),
4957 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4958 return 1;
4959 }
4960
4961 if (strict_memory_address_addr_space_p (mode, ad, as))
4962 {
4963 /* The address appears valid, so reloads are not needed.
4964 But the address may contain an eliminable register.
4965 This can happen because a machine with indirect addressing
4966 may consider a pseudo register by itself a valid address even when
4967 it has failed to get a hard reg.
4968 So do a tree-walk to find and eliminate all such regs. */
4969
4970 /* But first quickly dispose of a common case. */
4971 if (GET_CODE (ad) == PLUS
4972 && CONST_INT_P (XEXP (ad, 1))
4973 && REG_P (XEXP (ad, 0))
4974 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4975 return 0;
4976
4977 subst_reg_equivs_changed = 0;
4978 *loc = subst_reg_equivs (ad, insn);
4979
4980 if (! subst_reg_equivs_changed)
4981 return 0;
4982
4983 /* Check result for validity after substitution. */
4984 if (strict_memory_address_addr_space_p (mode, ad, as))
4985 return 0;
4986 }
4987
4988 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4989 do
4990 {
4991 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4992 {
4993 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4994 ind_levels, win);
4995 }
4996 break;
4997 win:
4998 *memrefloc = copy_rtx (*memrefloc);
4999 XEXP (*memrefloc, 0) = ad;
5000 move_replacements (&ad, &XEXP (*memrefloc, 0));
5001 return -1;
5002 }
5003 while (0);
5004 #endif
5005
5006 /* The address is not valid. We have to figure out why. First see if
5007 we have an outer AND and remove it if so. Then analyze what's inside. */
5008
5009 if (GET_CODE (ad) == AND)
5010 {
5011 removed_and = 1;
5012 loc = &XEXP (ad, 0);
5013 ad = *loc;
5014 }
5015
5016 /* One possibility for why the address is invalid is that it is itself
5017 a MEM. This can happen when the frame pointer is being eliminated, a
5018 pseudo is not allocated to a hard register, and the offset between the
5019 frame and stack pointers is not its initial value. In that case the
5020 pseudo will have been replaced by a MEM referring to the
5021 stack pointer. */
5022 if (MEM_P (ad))
5023 {
5024 /* First ensure that the address in this MEM is valid. Then, unless
5025 indirect addresses are valid, reload the MEM into a register. */
5026 tem = ad;
5027 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5028 opnum, ADDR_TYPE (type),
5029 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5030
5031 /* If tem was changed, then we must create a new memory reference to
5032 hold it and store it back into memrefloc. */
5033 if (tem != ad && memrefloc)
5034 {
5035 *memrefloc = copy_rtx (*memrefloc);
5036 copy_replacements (tem, XEXP (*memrefloc, 0));
5037 loc = &XEXP (*memrefloc, 0);
5038 if (removed_and)
5039 loc = &XEXP (*loc, 0);
5040 }
5041
5042 /* Check similar cases as for indirect addresses as above except
5043 that we can allow pseudos and a MEM since they should have been
5044 taken care of above. */
5045
5046 if (ind_levels == 0
5047 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5048 || MEM_P (XEXP (tem, 0))
5049 || ! (REG_P (XEXP (tem, 0))
5050 || (GET_CODE (XEXP (tem, 0)) == PLUS
5051 && REG_P (XEXP (XEXP (tem, 0), 0))
5052 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5053 {
5054 /* Must use TEM here, not AD, since it is the one that will
5055 have any subexpressions reloaded, if needed. */
5056 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5057 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5058 VOIDmode, 0,
5059 0, opnum, type);
5060 return ! removed_and;
5061 }
5062 else
5063 return 0;
5064 }
5065
5066 /* If we have address of a stack slot but it's not valid because the
5067 displacement is too large, compute the sum in a register.
5068 Handle all base registers here, not just fp/ap/sp, because on some
5069 targets (namely SH) we can also get too large displacements from
5070 big-endian corrections. */
5071 else if (GET_CODE (ad) == PLUS
5072 && REG_P (XEXP (ad, 0))
5073 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5074 && CONST_INT_P (XEXP (ad, 1))
5075 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5076 CONST_INT)
5077 /* Similarly, if we were to reload the base register and the
5078 mem+offset address is still invalid, then we want to reload
5079 the whole address, not just the base register. */
5080 || ! maybe_memory_address_addr_space_p
5081 (mode, ad, as, &(XEXP (ad, 0)))))
5082
5083 {
5084 /* Unshare the MEM rtx so we can safely alter it. */
5085 if (memrefloc)
5086 {
5087 *memrefloc = copy_rtx (*memrefloc);
5088 loc = &XEXP (*memrefloc, 0);
5089 if (removed_and)
5090 loc = &XEXP (*loc, 0);
5091 }
5092
5093 if (double_reg_address_ok
5094 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5095 PLUS, CONST_INT))
5096 {
5097 /* Unshare the sum as well. */
5098 *loc = ad = copy_rtx (ad);
5099
5100 /* Reload the displacement into an index reg.
5101 We assume the frame pointer or arg pointer is a base reg. */
5102 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5103 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5104 type, ind_levels);
5105 return 0;
5106 }
5107 else
5108 {
5109 /* If the sum of two regs is not necessarily valid,
5110 reload the sum into a base reg.
5111 That will at least work. */
5112 find_reloads_address_part (ad, loc,
5113 base_reg_class (mode, as, MEM, SCRATCH),
5114 GET_MODE (ad), opnum, type, ind_levels);
5115 }
5116 return ! removed_and;
5117 }
5118
5119 /* If we have an indexed stack slot, there are three possible reasons why
5120 it might be invalid: The index might need to be reloaded, the address
5121 might have been made by frame pointer elimination and hence have a
5122 constant out of range, or both reasons might apply.
5123
5124 We can easily check for an index needing reload, but even if that is the
5125 case, we might also have an invalid constant. To avoid making the
5126 conservative assumption and requiring two reloads, we see if this address
5127 is valid when not interpreted strictly. If it is, the only problem is
5128 that the index needs a reload and find_reloads_address_1 will take care
5129 of it.
5130
5131 Handle all base registers here, not just fp/ap/sp, because on some
5132 targets (namely SPARC) we can also get invalid addresses from preventive
5133 subreg big-endian corrections made by find_reloads_toplev. We
5134 can also get expressions involving LO_SUM (rather than PLUS) from
5135 find_reloads_subreg_address.
5136
5137 If we decide to do something, it must be that `double_reg_address_ok'
5138 is true. We generate a reload of the base register + constant and
5139 rework the sum so that the reload register will be added to the index.
5140 This is safe because we know the address isn't shared.
5141
5142 We check for the base register as both the first and second operand of
5143 the innermost PLUS and/or LO_SUM. */
5144
5145 for (op_index = 0; op_index < 2; ++op_index)
5146 {
5147 rtx operand, addend;
5148 enum rtx_code inner_code;
5149
5150 if (GET_CODE (ad) != PLUS)
5151 continue;
5152
5153 inner_code = GET_CODE (XEXP (ad, 0));
5154 if (!(GET_CODE (ad) == PLUS
5155 && CONST_INT_P (XEXP (ad, 1))
5156 && (inner_code == PLUS || inner_code == LO_SUM)))
5157 continue;
5158
5159 operand = XEXP (XEXP (ad, 0), op_index);
5160 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5161 continue;
5162
5163 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5164
5165 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5166 GET_CODE (addend))
5167 || operand == frame_pointer_rtx
5168 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5169 && operand == hard_frame_pointer_rtx)
5170 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5171 && operand == arg_pointer_rtx)
5172 || operand == stack_pointer_rtx)
5173 && ! maybe_memory_address_addr_space_p
5174 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5175 {
5176 rtx offset_reg;
5177 enum reg_class cls;
5178
5179 offset_reg = plus_constant (GET_MODE (ad), operand,
5180 INTVAL (XEXP (ad, 1)));
5181
5182 /* Form the adjusted address. */
5183 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5184 ad = gen_rtx_PLUS (GET_MODE (ad),
5185 op_index == 0 ? offset_reg : addend,
5186 op_index == 0 ? addend : offset_reg);
5187 else
5188 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5189 op_index == 0 ? offset_reg : addend,
5190 op_index == 0 ? addend : offset_reg);
5191 *loc = ad;
5192
5193 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5194 find_reloads_address_part (XEXP (ad, op_index),
5195 &XEXP (ad, op_index), cls,
5196 GET_MODE (ad), opnum, type, ind_levels);
5197 find_reloads_address_1 (mode, as,
5198 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5199 GET_CODE (XEXP (ad, op_index)),
5200 &XEXP (ad, 1 - op_index), opnum,
5201 type, 0, insn);
5202
5203 return 0;
5204 }
5205 }
5206
5207 /* See if address becomes valid when an eliminable register
5208 in a sum is replaced. */
5209
5210 tem = ad;
5211 if (GET_CODE (ad) == PLUS)
5212 tem = subst_indexed_address (ad);
5213 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5214 {
5215 /* Ok, we win that way. Replace any additional eliminable
5216 registers. */
5217
5218 subst_reg_equivs_changed = 0;
5219 tem = subst_reg_equivs (tem, insn);
5220
5221 /* Make sure that didn't make the address invalid again. */
5222
5223 if (! subst_reg_equivs_changed
5224 || strict_memory_address_addr_space_p (mode, tem, as))
5225 {
5226 *loc = tem;
5227 return 0;
5228 }
5229 }
5230
5231 /* If constants aren't valid addresses, reload the constant address
5232 into a register. */
5233 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5234 {
5235 machine_mode address_mode = GET_MODE (ad);
5236 if (address_mode == VOIDmode)
5237 address_mode = targetm.addr_space.address_mode (as);
5238
5239 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5240 Unshare it so we can safely alter it. */
5241 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5242 && CONSTANT_POOL_ADDRESS_P (ad))
5243 {
5244 *memrefloc = copy_rtx (*memrefloc);
5245 loc = &XEXP (*memrefloc, 0);
5246 if (removed_and)
5247 loc = &XEXP (*loc, 0);
5248 }
5249
5250 find_reloads_address_part (ad, loc,
5251 base_reg_class (mode, as, MEM, SCRATCH),
5252 address_mode, opnum, type, ind_levels);
5253 return ! removed_and;
5254 }
5255
5256 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5257 opnum, type, ind_levels, insn);
5258 }
5259 \f
5260 /* Find all pseudo regs appearing in AD
5261 that are eliminable in favor of equivalent values
5262 and do not have hard regs; replace them by their equivalents.
5263 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5264 front of it for pseudos that we have to replace with stack slots. */
5265
5266 static rtx
5267 subst_reg_equivs (rtx ad, rtx_insn *insn)
5268 {
5269 RTX_CODE code = GET_CODE (ad);
5270 int i;
5271 const char *fmt;
5272
5273 switch (code)
5274 {
5275 case HIGH:
5276 case CONST:
5277 CASE_CONST_ANY:
5278 case SYMBOL_REF:
5279 case LABEL_REF:
5280 case PC:
5281 case CC0:
5282 return ad;
5283
5284 case REG:
5285 {
5286 int regno = REGNO (ad);
5287
5288 if (reg_equiv_constant (regno) != 0)
5289 {
5290 subst_reg_equivs_changed = 1;
5291 return reg_equiv_constant (regno);
5292 }
5293 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5294 {
5295 rtx mem = make_memloc (ad, regno);
5296 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5297 {
5298 subst_reg_equivs_changed = 1;
5299 /* We mark the USE with QImode so that we recognize it
5300 as one that can be safely deleted at the end of
5301 reload. */
5302 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5303 QImode);
5304 return mem;
5305 }
5306 }
5307 }
5308 return ad;
5309
5310 case PLUS:
5311 /* Quickly dispose of a common case. */
5312 if (XEXP (ad, 0) == frame_pointer_rtx
5313 && CONST_INT_P (XEXP (ad, 1)))
5314 return ad;
5315 break;
5316
5317 default:
5318 break;
5319 }
5320
5321 fmt = GET_RTX_FORMAT (code);
5322 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5323 if (fmt[i] == 'e')
5324 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5325 return ad;
5326 }
5327 \f
5328 /* Compute the sum of X and Y, making canonicalizations assumed in an
5329 address, namely: sum constant integers, surround the sum of two
5330 constants with a CONST, put the constant as the second operand, and
5331 group the constant on the outermost sum.
5332
5333 This routine assumes both inputs are already in canonical form. */
5334
5335 rtx
5336 form_sum (machine_mode mode, rtx x, rtx y)
5337 {
5338 rtx tem;
5339
5340 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5341 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5342
5343 if (CONST_INT_P (x))
5344 return plus_constant (mode, y, INTVAL (x));
5345 else if (CONST_INT_P (y))
5346 return plus_constant (mode, x, INTVAL (y));
5347 else if (CONSTANT_P (x))
5348 tem = x, x = y, y = tem;
5349
5350 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5351 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5352
5353 /* Note that if the operands of Y are specified in the opposite
5354 order in the recursive calls below, infinite recursion will occur. */
5355 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5356 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5357
5358 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5359 constant will have been placed second. */
5360 if (CONSTANT_P (x) && CONSTANT_P (y))
5361 {
5362 if (GET_CODE (x) == CONST)
5363 x = XEXP (x, 0);
5364 if (GET_CODE (y) == CONST)
5365 y = XEXP (y, 0);
5366
5367 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5368 }
5369
5370 return gen_rtx_PLUS (mode, x, y);
5371 }
5372 \f
5373 /* If ADDR is a sum containing a pseudo register that should be
5374 replaced with a constant (from reg_equiv_constant),
5375 return the result of doing so, and also apply the associative
5376 law so that the result is more likely to be a valid address.
5377 (But it is not guaranteed to be one.)
5378
5379 Note that at most one register is replaced, even if more are
5380 replaceable. Also, we try to put the result into a canonical form
5381 so it is more likely to be a valid address.
5382
5383 In all other cases, return ADDR. */
5384
5385 static rtx
5386 subst_indexed_address (rtx addr)
5387 {
5388 rtx op0 = 0, op1 = 0, op2 = 0;
5389 rtx tem;
5390 int regno;
5391
5392 if (GET_CODE (addr) == PLUS)
5393 {
5394 /* Try to find a register to replace. */
5395 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5396 if (REG_P (op0)
5397 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5398 && reg_renumber[regno] < 0
5399 && reg_equiv_constant (regno) != 0)
5400 op0 = reg_equiv_constant (regno);
5401 else if (REG_P (op1)
5402 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5403 && reg_renumber[regno] < 0
5404 && reg_equiv_constant (regno) != 0)
5405 op1 = reg_equiv_constant (regno);
5406 else if (GET_CODE (op0) == PLUS
5407 && (tem = subst_indexed_address (op0)) != op0)
5408 op0 = tem;
5409 else if (GET_CODE (op1) == PLUS
5410 && (tem = subst_indexed_address (op1)) != op1)
5411 op1 = tem;
5412 else
5413 return addr;
5414
5415 /* Pick out up to three things to add. */
5416 if (GET_CODE (op1) == PLUS)
5417 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5418 else if (GET_CODE (op0) == PLUS)
5419 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5420
5421 /* Compute the sum. */
5422 if (op2 != 0)
5423 op1 = form_sum (GET_MODE (addr), op1, op2);
5424 if (op1 != 0)
5425 op0 = form_sum (GET_MODE (addr), op0, op1);
5426
5427 return op0;
5428 }
5429 return addr;
5430 }
5431 \f
5432 /* Update the REG_INC notes for an insn. It updates all REG_INC
5433 notes for the instruction which refer to REGNO the to refer
5434 to the reload number.
5435
5436 INSN is the insn for which any REG_INC notes need updating.
5437
5438 REGNO is the register number which has been reloaded.
5439
5440 RELOADNUM is the reload number. */
5441
5442 static void
5443 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5444 int reloadnum ATTRIBUTE_UNUSED)
5445 {
5446 if (!AUTO_INC_DEC)
5447 return;
5448
5449 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5450 if (REG_NOTE_KIND (link) == REG_INC
5451 && (int) REGNO (XEXP (link, 0)) == regno)
5452 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5453 }
5454 \f
5455 /* Record the pseudo registers we must reload into hard registers in a
5456 subexpression of a would-be memory address, X referring to a value
5457 in mode MODE. (This function is not called if the address we find
5458 is strictly valid.)
5459
5460 CONTEXT = 1 means we are considering regs as index regs,
5461 = 0 means we are considering them as base regs.
5462 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5463 or an autoinc code.
5464 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5465 is the code of the index part of the address. Otherwise, pass SCRATCH
5466 for this argument.
5467 OPNUM and TYPE specify the purpose of any reloads made.
5468
5469 IND_LEVELS says how many levels of indirect addressing are
5470 supported at this point in the address.
5471
5472 INSN, if nonzero, is the insn in which we do the reload. It is used
5473 to determine if we may generate output reloads.
5474
5475 We return nonzero if X, as a whole, is reloaded or replaced. */
5476
5477 /* Note that we take shortcuts assuming that no multi-reg machine mode
5478 occurs as part of an address.
5479 Also, this is not fully machine-customizable; it works for machines
5480 such as VAXen and 68000's and 32000's, but other possible machines
5481 could have addressing modes that this does not handle right.
5482 If you add push_reload calls here, you need to make sure gen_reload
5483 handles those cases gracefully. */
5484
5485 static int
5486 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5487 rtx x, int context,
5488 enum rtx_code outer_code, enum rtx_code index_code,
5489 rtx *loc, int opnum, enum reload_type type,
5490 int ind_levels, rtx_insn *insn)
5491 {
5492 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5493 ((CONTEXT) == 0 \
5494 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5495 : REGNO_OK_FOR_INDEX_P (REGNO))
5496
5497 enum reg_class context_reg_class;
5498 RTX_CODE code = GET_CODE (x);
5499 bool reloaded_inner_of_autoinc = false;
5500
5501 if (context == 1)
5502 context_reg_class = INDEX_REG_CLASS;
5503 else
5504 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5505
5506 switch (code)
5507 {
5508 case PLUS:
5509 {
5510 rtx orig_op0 = XEXP (x, 0);
5511 rtx orig_op1 = XEXP (x, 1);
5512 RTX_CODE code0 = GET_CODE (orig_op0);
5513 RTX_CODE code1 = GET_CODE (orig_op1);
5514 rtx op0 = orig_op0;
5515 rtx op1 = orig_op1;
5516
5517 if (GET_CODE (op0) == SUBREG)
5518 {
5519 op0 = SUBREG_REG (op0);
5520 code0 = GET_CODE (op0);
5521 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5522 op0 = gen_rtx_REG (word_mode,
5523 (REGNO (op0) +
5524 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5525 GET_MODE (SUBREG_REG (orig_op0)),
5526 SUBREG_BYTE (orig_op0),
5527 GET_MODE (orig_op0))));
5528 }
5529
5530 if (GET_CODE (op1) == SUBREG)
5531 {
5532 op1 = SUBREG_REG (op1);
5533 code1 = GET_CODE (op1);
5534 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5535 /* ??? Why is this given op1's mode and above for
5536 ??? op0 SUBREGs we use word_mode? */
5537 op1 = gen_rtx_REG (GET_MODE (op1),
5538 (REGNO (op1) +
5539 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5540 GET_MODE (SUBREG_REG (orig_op1)),
5541 SUBREG_BYTE (orig_op1),
5542 GET_MODE (orig_op1))));
5543 }
5544 /* Plus in the index register may be created only as a result of
5545 register rematerialization for expression like &localvar*4. Reload it.
5546 It may be possible to combine the displacement on the outer level,
5547 but it is probably not worthwhile to do so. */
5548 if (context == 1)
5549 {
5550 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5551 opnum, ADDR_TYPE (type), ind_levels, insn);
5552 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5553 context_reg_class,
5554 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5555 return 1;
5556 }
5557
5558 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5559 || code0 == ZERO_EXTEND || code1 == MEM)
5560 {
5561 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5562 &XEXP (x, 0), opnum, type, ind_levels,
5563 insn);
5564 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5565 &XEXP (x, 1), opnum, type, ind_levels,
5566 insn);
5567 }
5568
5569 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5570 || code1 == ZERO_EXTEND || code0 == MEM)
5571 {
5572 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5573 &XEXP (x, 0), opnum, type, ind_levels,
5574 insn);
5575 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5576 &XEXP (x, 1), opnum, type, ind_levels,
5577 insn);
5578 }
5579
5580 else if (code0 == CONST_INT || code0 == CONST
5581 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5582 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5583 &XEXP (x, 1), opnum, type, ind_levels,
5584 insn);
5585
5586 else if (code1 == CONST_INT || code1 == CONST
5587 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5588 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5589 &XEXP (x, 0), opnum, type, ind_levels,
5590 insn);
5591
5592 else if (code0 == REG && code1 == REG)
5593 {
5594 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5595 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5596 return 0;
5597 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5598 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5599 return 0;
5600 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5601 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5602 &XEXP (x, 1), opnum, type, ind_levels,
5603 insn);
5604 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5605 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5606 &XEXP (x, 0), opnum, type, ind_levels,
5607 insn);
5608 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5609 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5610 &XEXP (x, 0), opnum, type, ind_levels,
5611 insn);
5612 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5613 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5614 &XEXP (x, 1), opnum, type, ind_levels,
5615 insn);
5616 else
5617 {
5618 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5619 &XEXP (x, 0), opnum, type, ind_levels,
5620 insn);
5621 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5622 &XEXP (x, 1), opnum, type, ind_levels,
5623 insn);
5624 }
5625 }
5626
5627 else if (code0 == REG)
5628 {
5629 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5630 &XEXP (x, 0), opnum, type, ind_levels,
5631 insn);
5632 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5633 &XEXP (x, 1), opnum, type, ind_levels,
5634 insn);
5635 }
5636
5637 else if (code1 == REG)
5638 {
5639 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5640 &XEXP (x, 1), opnum, type, ind_levels,
5641 insn);
5642 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5643 &XEXP (x, 0), opnum, type, ind_levels,
5644 insn);
5645 }
5646 }
5647
5648 return 0;
5649
5650 case POST_MODIFY:
5651 case PRE_MODIFY:
5652 {
5653 rtx op0 = XEXP (x, 0);
5654 rtx op1 = XEXP (x, 1);
5655 enum rtx_code index_code;
5656 int regno;
5657 int reloadnum;
5658
5659 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5660 return 0;
5661
5662 /* Currently, we only support {PRE,POST}_MODIFY constructs
5663 where a base register is {inc,dec}remented by the contents
5664 of another register or by a constant value. Thus, these
5665 operands must match. */
5666 gcc_assert (op0 == XEXP (op1, 0));
5667
5668 /* Require index register (or constant). Let's just handle the
5669 register case in the meantime... If the target allows
5670 auto-modify by a constant then we could try replacing a pseudo
5671 register with its equivalent constant where applicable.
5672
5673 We also handle the case where the register was eliminated
5674 resulting in a PLUS subexpression.
5675
5676 If we later decide to reload the whole PRE_MODIFY or
5677 POST_MODIFY, inc_for_reload might clobber the reload register
5678 before reading the index. The index register might therefore
5679 need to live longer than a TYPE reload normally would, so be
5680 conservative and class it as RELOAD_OTHER. */
5681 if ((REG_P (XEXP (op1, 1))
5682 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5683 || GET_CODE (XEXP (op1, 1)) == PLUS)
5684 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5685 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5686 ind_levels, insn);
5687
5688 gcc_assert (REG_P (XEXP (op1, 0)));
5689
5690 regno = REGNO (XEXP (op1, 0));
5691 index_code = GET_CODE (XEXP (op1, 1));
5692
5693 /* A register that is incremented cannot be constant! */
5694 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5695 || reg_equiv_constant (regno) == 0);
5696
5697 /* Handle a register that is equivalent to a memory location
5698 which cannot be addressed directly. */
5699 if (reg_equiv_memory_loc (regno) != 0
5700 && (reg_equiv_address (regno) != 0
5701 || num_not_at_initial_offset))
5702 {
5703 rtx tem = make_memloc (XEXP (x, 0), regno);
5704
5705 if (reg_equiv_address (regno)
5706 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5707 {
5708 rtx orig = tem;
5709
5710 /* First reload the memory location's address.
5711 We can't use ADDR_TYPE (type) here, because we need to
5712 write back the value after reading it, hence we actually
5713 need two registers. */
5714 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5715 &XEXP (tem, 0), opnum,
5716 RELOAD_OTHER,
5717 ind_levels, insn);
5718
5719 if (!rtx_equal_p (tem, orig))
5720 push_reg_equiv_alt_mem (regno, tem);
5721
5722 /* Then reload the memory location into a base
5723 register. */
5724 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5725 &XEXP (op1, 0),
5726 base_reg_class (mode, as,
5727 code, index_code),
5728 GET_MODE (x), GET_MODE (x), 0,
5729 0, opnum, RELOAD_OTHER);
5730
5731 update_auto_inc_notes (this_insn, regno, reloadnum);
5732 return 0;
5733 }
5734 }
5735
5736 if (reg_renumber[regno] >= 0)
5737 regno = reg_renumber[regno];
5738
5739 /* We require a base register here... */
5740 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5741 {
5742 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5743 &XEXP (op1, 0), &XEXP (x, 0),
5744 base_reg_class (mode, as,
5745 code, index_code),
5746 GET_MODE (x), GET_MODE (x), 0, 0,
5747 opnum, RELOAD_OTHER);
5748
5749 update_auto_inc_notes (this_insn, regno, reloadnum);
5750 return 0;
5751 }
5752 }
5753 return 0;
5754
5755 case POST_INC:
5756 case POST_DEC:
5757 case PRE_INC:
5758 case PRE_DEC:
5759 if (REG_P (XEXP (x, 0)))
5760 {
5761 int regno = REGNO (XEXP (x, 0));
5762 int value = 0;
5763 rtx x_orig = x;
5764
5765 /* A register that is incremented cannot be constant! */
5766 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5767 || reg_equiv_constant (regno) == 0);
5768
5769 /* Handle a register that is equivalent to a memory location
5770 which cannot be addressed directly. */
5771 if (reg_equiv_memory_loc (regno) != 0
5772 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5773 {
5774 rtx tem = make_memloc (XEXP (x, 0), regno);
5775 if (reg_equiv_address (regno)
5776 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5777 {
5778 rtx orig = tem;
5779
5780 /* First reload the memory location's address.
5781 We can't use ADDR_TYPE (type) here, because we need to
5782 write back the value after reading it, hence we actually
5783 need two registers. */
5784 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5785 &XEXP (tem, 0), opnum, type,
5786 ind_levels, insn);
5787 reloaded_inner_of_autoinc = true;
5788 if (!rtx_equal_p (tem, orig))
5789 push_reg_equiv_alt_mem (regno, tem);
5790 /* Put this inside a new increment-expression. */
5791 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5792 /* Proceed to reload that, as if it contained a register. */
5793 }
5794 }
5795
5796 /* If we have a hard register that is ok in this incdec context,
5797 don't make a reload. If the register isn't nice enough for
5798 autoincdec, we can reload it. But, if an autoincrement of a
5799 register that we here verified as playing nice, still outside
5800 isn't "valid", it must be that no autoincrement is "valid".
5801 If that is true and something made an autoincrement anyway,
5802 this must be a special context where one is allowed.
5803 (For example, a "push" instruction.)
5804 We can't improve this address, so leave it alone. */
5805
5806 /* Otherwise, reload the autoincrement into a suitable hard reg
5807 and record how much to increment by. */
5808
5809 if (reg_renumber[regno] >= 0)
5810 regno = reg_renumber[regno];
5811 if (regno >= FIRST_PSEUDO_REGISTER
5812 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5813 index_code))
5814 {
5815 int reloadnum;
5816
5817 /* If we can output the register afterwards, do so, this
5818 saves the extra update.
5819 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5820 CALL_INSN - and it does not set CC0.
5821 But don't do this if we cannot directly address the
5822 memory location, since this will make it harder to
5823 reuse address reloads, and increases register pressure.
5824 Also don't do this if we can probably update x directly. */
5825 rtx equiv = (MEM_P (XEXP (x, 0))
5826 ? XEXP (x, 0)
5827 : reg_equiv_mem (regno));
5828 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5829 if (insn && NONJUMP_INSN_P (insn) && equiv
5830 && memory_operand (equiv, GET_MODE (equiv))
5831 #if HAVE_cc0
5832 && ! sets_cc0_p (PATTERN (insn))
5833 #endif
5834 && ! (icode != CODE_FOR_nothing
5835 && insn_operand_matches (icode, 0, equiv)
5836 && insn_operand_matches (icode, 1, equiv))
5837 /* Using RELOAD_OTHER means we emit this and the reload we
5838 made earlier in the wrong order. */
5839 && !reloaded_inner_of_autoinc)
5840 {
5841 /* We use the original pseudo for loc, so that
5842 emit_reload_insns() knows which pseudo this
5843 reload refers to and updates the pseudo rtx, not
5844 its equivalent memory location, as well as the
5845 corresponding entry in reg_last_reload_reg. */
5846 loc = &XEXP (x_orig, 0);
5847 x = XEXP (x, 0);
5848 reloadnum
5849 = push_reload (x, x, loc, loc,
5850 context_reg_class,
5851 GET_MODE (x), GET_MODE (x), 0, 0,
5852 opnum, RELOAD_OTHER);
5853 }
5854 else
5855 {
5856 reloadnum
5857 = push_reload (x, x, loc, (rtx*) 0,
5858 context_reg_class,
5859 GET_MODE (x), GET_MODE (x), 0, 0,
5860 opnum, type);
5861 rld[reloadnum].inc
5862 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5863
5864 value = 1;
5865 }
5866
5867 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5868 reloadnum);
5869 }
5870 return value;
5871 }
5872 return 0;
5873
5874 case TRUNCATE:
5875 case SIGN_EXTEND:
5876 case ZERO_EXTEND:
5877 /* Look for parts to reload in the inner expression and reload them
5878 too, in addition to this operation. Reloading all inner parts in
5879 addition to this one shouldn't be necessary, but at this point,
5880 we don't know if we can possibly omit any part that *can* be
5881 reloaded. Targets that are better off reloading just either part
5882 (or perhaps even a different part of an outer expression), should
5883 define LEGITIMIZE_RELOAD_ADDRESS. */
5884 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5885 context, code, SCRATCH, &XEXP (x, 0), opnum,
5886 type, ind_levels, insn);
5887 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5888 context_reg_class,
5889 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5890 return 1;
5891
5892 case MEM:
5893 /* This is probably the result of a substitution, by eliminate_regs, of
5894 an equivalent address for a pseudo that was not allocated to a hard
5895 register. Verify that the specified address is valid and reload it
5896 into a register.
5897
5898 Since we know we are going to reload this item, don't decrement for
5899 the indirection level.
5900
5901 Note that this is actually conservative: it would be slightly more
5902 efficient to use the value of SPILL_INDIRECT_LEVELS from
5903 reload1.c here. */
5904
5905 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5906 opnum, ADDR_TYPE (type), ind_levels, insn);
5907 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5908 context_reg_class,
5909 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5910 return 1;
5911
5912 case REG:
5913 {
5914 int regno = REGNO (x);
5915
5916 if (reg_equiv_constant (regno) != 0)
5917 {
5918 find_reloads_address_part (reg_equiv_constant (regno), loc,
5919 context_reg_class,
5920 GET_MODE (x), opnum, type, ind_levels);
5921 return 1;
5922 }
5923
5924 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5925 that feeds this insn. */
5926 if (reg_equiv_mem (regno) != 0)
5927 {
5928 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5929 context_reg_class,
5930 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5931 return 1;
5932 }
5933 #endif
5934
5935 if (reg_equiv_memory_loc (regno)
5936 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5937 {
5938 rtx tem = make_memloc (x, regno);
5939 if (reg_equiv_address (regno) != 0
5940 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5941 {
5942 x = tem;
5943 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5944 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5945 ind_levels, insn);
5946 if (!rtx_equal_p (x, tem))
5947 push_reg_equiv_alt_mem (regno, x);
5948 }
5949 }
5950
5951 if (reg_renumber[regno] >= 0)
5952 regno = reg_renumber[regno];
5953
5954 if (regno >= FIRST_PSEUDO_REGISTER
5955 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5956 index_code))
5957 {
5958 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5959 context_reg_class,
5960 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5961 return 1;
5962 }
5963
5964 /* If a register appearing in an address is the subject of a CLOBBER
5965 in this insn, reload it into some other register to be safe.
5966 The CLOBBER is supposed to make the register unavailable
5967 from before this insn to after it. */
5968 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5969 {
5970 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5971 context_reg_class,
5972 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5973 return 1;
5974 }
5975 }
5976 return 0;
5977
5978 case SUBREG:
5979 if (REG_P (SUBREG_REG (x)))
5980 {
5981 /* If this is a SUBREG of a hard register and the resulting register
5982 is of the wrong class, reload the whole SUBREG. This avoids
5983 needless copies if SUBREG_REG is multi-word. */
5984 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5985 {
5986 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5987
5988 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5989 index_code))
5990 {
5991 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5992 context_reg_class,
5993 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5994 return 1;
5995 }
5996 }
5997 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5998 is larger than the class size, then reload the whole SUBREG. */
5999 else
6000 {
6001 enum reg_class rclass = context_reg_class;
6002 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6003 > reg_class_size[(int) rclass])
6004 {
6005 /* If the inner register will be replaced by a memory
6006 reference, we can do this only if we can replace the
6007 whole subreg by a (narrower) memory reference. If
6008 this is not possible, fall through and reload just
6009 the inner register (including address reloads). */
6010 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6011 {
6012 rtx tem = find_reloads_subreg_address (x, opnum,
6013 ADDR_TYPE (type),
6014 ind_levels, insn,
6015 NULL);
6016 if (tem)
6017 {
6018 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6019 GET_MODE (tem), VOIDmode, 0, 0,
6020 opnum, type);
6021 return 1;
6022 }
6023 }
6024 else
6025 {
6026 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6027 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6028 return 1;
6029 }
6030 }
6031 }
6032 }
6033 break;
6034
6035 default:
6036 break;
6037 }
6038
6039 {
6040 const char *fmt = GET_RTX_FORMAT (code);
6041 int i;
6042
6043 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6044 {
6045 if (fmt[i] == 'e')
6046 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6047 we get here. */
6048 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6049 code, SCRATCH, &XEXP (x, i),
6050 opnum, type, ind_levels, insn);
6051 }
6052 }
6053
6054 #undef REG_OK_FOR_CONTEXT
6055 return 0;
6056 }
6057 \f
6058 /* X, which is found at *LOC, is a part of an address that needs to be
6059 reloaded into a register of class RCLASS. If X is a constant, or if
6060 X is a PLUS that contains a constant, check that the constant is a
6061 legitimate operand and that we are supposed to be able to load
6062 it into the register.
6063
6064 If not, force the constant into memory and reload the MEM instead.
6065
6066 MODE is the mode to use, in case X is an integer constant.
6067
6068 OPNUM and TYPE describe the purpose of any reloads made.
6069
6070 IND_LEVELS says how many levels of indirect addressing this machine
6071 supports. */
6072
6073 static void
6074 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6075 machine_mode mode, int opnum,
6076 enum reload_type type, int ind_levels)
6077 {
6078 if (CONSTANT_P (x)
6079 && (!targetm.legitimate_constant_p (mode, x)
6080 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6081 {
6082 x = force_const_mem (mode, x);
6083 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6084 opnum, type, ind_levels, 0);
6085 }
6086
6087 else if (GET_CODE (x) == PLUS
6088 && CONSTANT_P (XEXP (x, 1))
6089 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6090 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6091 == NO_REGS))
6092 {
6093 rtx tem;
6094
6095 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6096 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6097 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6098 opnum, type, ind_levels, 0);
6099 }
6100
6101 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6102 mode, VOIDmode, 0, 0, opnum, type);
6103 }
6104 \f
6105 /* X, a subreg of a pseudo, is a part of an address that needs to be
6106 reloaded, and the pseusdo is equivalent to a memory location.
6107
6108 Attempt to replace the whole subreg by a (possibly narrower or wider)
6109 memory reference. If this is possible, return this new memory
6110 reference, and push all required address reloads. Otherwise,
6111 return NULL.
6112
6113 OPNUM and TYPE identify the purpose of the reload.
6114
6115 IND_LEVELS says how many levels of indirect addressing are
6116 supported at this point in the address.
6117
6118 INSN, if nonzero, is the insn in which we do the reload. It is used
6119 to determine where to put USEs for pseudos that we have to replace with
6120 stack slots. */
6121
6122 static rtx
6123 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6124 int ind_levels, rtx_insn *insn,
6125 int *address_reloaded)
6126 {
6127 machine_mode outer_mode = GET_MODE (x);
6128 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6129 int regno = REGNO (SUBREG_REG (x));
6130 int reloaded = 0;
6131 rtx tem, orig;
6132 int offset;
6133
6134 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6135
6136 /* We cannot replace the subreg with a modified memory reference if:
6137
6138 - we have a paradoxical subreg that implicitly acts as a zero or
6139 sign extension operation due to LOAD_EXTEND_OP;
6140
6141 - we have a subreg that is implicitly supposed to act on the full
6142 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6143
6144 - the address of the equivalent memory location is mode-dependent; or
6145
6146 - we have a paradoxical subreg and the resulting memory is not
6147 sufficiently aligned to allow access in the wider mode.
6148
6149 In addition, we choose not to perform the replacement for *any*
6150 paradoxical subreg, even if it were possible in principle. This
6151 is to avoid generating wider memory references than necessary.
6152
6153 This corresponds to how previous versions of reload used to handle
6154 paradoxical subregs where no address reload was required. */
6155
6156 if (paradoxical_subreg_p (x))
6157 return NULL;
6158
6159 if (WORD_REGISTER_OPERATIONS
6160 && GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6161 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6162 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6163 return NULL;
6164
6165 /* Since we don't attempt to handle paradoxical subregs, we can just
6166 call into simplify_subreg, which will handle all remaining checks
6167 for us. */
6168 orig = make_memloc (SUBREG_REG (x), regno);
6169 offset = SUBREG_BYTE (x);
6170 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6171 if (!tem || !MEM_P (tem))
6172 return NULL;
6173
6174 /* Now push all required address reloads, if any. */
6175 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6176 XEXP (tem, 0), &XEXP (tem, 0),
6177 opnum, type, ind_levels, insn);
6178 /* ??? Do we need to handle nonzero offsets somehow? */
6179 if (!offset && !rtx_equal_p (tem, orig))
6180 push_reg_equiv_alt_mem (regno, tem);
6181
6182 /* For some processors an address may be valid in the original mode but
6183 not in a smaller mode. For example, ARM accepts a scaled index register
6184 in SImode but not in HImode. Note that this is only a problem if the
6185 address in reg_equiv_mem is already invalid in the new mode; other
6186 cases would be fixed by find_reloads_address as usual.
6187
6188 ??? We attempt to handle such cases here by doing an additional reload
6189 of the full address after the usual processing by find_reloads_address.
6190 Note that this may not work in the general case, but it seems to cover
6191 the cases where this situation currently occurs. A more general fix
6192 might be to reload the *value* instead of the address, but this would
6193 not be expected by the callers of this routine as-is.
6194
6195 If find_reloads_address already completed replaced the address, there
6196 is nothing further to do. */
6197 if (reloaded == 0
6198 && reg_equiv_mem (regno) != 0
6199 && !strict_memory_address_addr_space_p
6200 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6201 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6202 {
6203 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6204 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6205 MEM, SCRATCH),
6206 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6207 reloaded = 1;
6208 }
6209
6210 /* If this is not a toplevel operand, find_reloads doesn't see this
6211 substitution. We have to emit a USE of the pseudo so that
6212 delete_output_reload can see it. */
6213 if (replace_reloads && recog_data.operand[opnum] != x)
6214 /* We mark the USE with QImode so that we recognize it as one that
6215 can be safely deleted at the end of reload. */
6216 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6217 QImode);
6218
6219 if (address_reloaded)
6220 *address_reloaded = reloaded;
6221
6222 return tem;
6223 }
6224 \f
6225 /* Substitute into the current INSN the registers into which we have reloaded
6226 the things that need reloading. The array `replacements'
6227 contains the locations of all pointers that must be changed
6228 and says what to replace them with.
6229
6230 Return the rtx that X translates into; usually X, but modified. */
6231
6232 void
6233 subst_reloads (rtx_insn *insn)
6234 {
6235 int i;
6236
6237 for (i = 0; i < n_replacements; i++)
6238 {
6239 struct replacement *r = &replacements[i];
6240 rtx reloadreg = rld[r->what].reg_rtx;
6241 if (reloadreg)
6242 {
6243 #ifdef DEBUG_RELOAD
6244 /* This checking takes a very long time on some platforms
6245 causing the gcc.c-torture/compile/limits-fnargs.c test
6246 to time out during testing. See PR 31850.
6247
6248 Internal consistency test. Check that we don't modify
6249 anything in the equivalence arrays. Whenever something from
6250 those arrays needs to be reloaded, it must be unshared before
6251 being substituted into; the equivalence must not be modified.
6252 Otherwise, if the equivalence is used after that, it will
6253 have been modified, and the thing substituted (probably a
6254 register) is likely overwritten and not a usable equivalence. */
6255 int check_regno;
6256
6257 for (check_regno = 0; check_regno < max_regno; check_regno++)
6258 {
6259 #define CHECK_MODF(ARRAY) \
6260 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6261 || !loc_mentioned_in_p (r->where, \
6262 (*reg_equivs)[check_regno].ARRAY))
6263
6264 CHECK_MODF (constant);
6265 CHECK_MODF (memory_loc);
6266 CHECK_MODF (address);
6267 CHECK_MODF (mem);
6268 #undef CHECK_MODF
6269 }
6270 #endif /* DEBUG_RELOAD */
6271
6272 /* If we're replacing a LABEL_REF with a register, there must
6273 already be an indication (to e.g. flow) which label this
6274 register refers to. */
6275 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6276 || !JUMP_P (insn)
6277 || find_reg_note (insn,
6278 REG_LABEL_OPERAND,
6279 XEXP (*r->where, 0))
6280 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6281
6282 /* Encapsulate RELOADREG so its machine mode matches what
6283 used to be there. Note that gen_lowpart_common will
6284 do the wrong thing if RELOADREG is multi-word. RELOADREG
6285 will always be a REG here. */
6286 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6287 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6288
6289 *r->where = reloadreg;
6290 }
6291 /* If reload got no reg and isn't optional, something's wrong. */
6292 else
6293 gcc_assert (rld[r->what].optional);
6294 }
6295 }
6296 \f
6297 /* Make a copy of any replacements being done into X and move those
6298 copies to locations in Y, a copy of X. */
6299
6300 void
6301 copy_replacements (rtx x, rtx y)
6302 {
6303 copy_replacements_1 (&x, &y, n_replacements);
6304 }
6305
6306 static void
6307 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6308 {
6309 int i, j;
6310 rtx x, y;
6311 struct replacement *r;
6312 enum rtx_code code;
6313 const char *fmt;
6314
6315 for (j = 0; j < orig_replacements; j++)
6316 if (replacements[j].where == px)
6317 {
6318 r = &replacements[n_replacements++];
6319 r->where = py;
6320 r->what = replacements[j].what;
6321 r->mode = replacements[j].mode;
6322 }
6323
6324 x = *px;
6325 y = *py;
6326 code = GET_CODE (x);
6327 fmt = GET_RTX_FORMAT (code);
6328
6329 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6330 {
6331 if (fmt[i] == 'e')
6332 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6333 else if (fmt[i] == 'E')
6334 for (j = XVECLEN (x, i); --j >= 0; )
6335 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6336 orig_replacements);
6337 }
6338 }
6339
6340 /* Change any replacements being done to *X to be done to *Y. */
6341
6342 void
6343 move_replacements (rtx *x, rtx *y)
6344 {
6345 int i;
6346
6347 for (i = 0; i < n_replacements; i++)
6348 if (replacements[i].where == x)
6349 replacements[i].where = y;
6350 }
6351 \f
6352 /* If LOC was scheduled to be replaced by something, return the replacement.
6353 Otherwise, return *LOC. */
6354
6355 rtx
6356 find_replacement (rtx *loc)
6357 {
6358 struct replacement *r;
6359
6360 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6361 {
6362 rtx reloadreg = rld[r->what].reg_rtx;
6363
6364 if (reloadreg && r->where == loc)
6365 {
6366 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6367 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6368
6369 return reloadreg;
6370 }
6371 else if (reloadreg && GET_CODE (*loc) == SUBREG
6372 && r->where == &SUBREG_REG (*loc))
6373 {
6374 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6375 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6376
6377 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6378 GET_MODE (SUBREG_REG (*loc)),
6379 SUBREG_BYTE (*loc));
6380 }
6381 }
6382
6383 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6384 what's inside and make a new rtl if so. */
6385 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6386 || GET_CODE (*loc) == MULT)
6387 {
6388 rtx x = find_replacement (&XEXP (*loc, 0));
6389 rtx y = find_replacement (&XEXP (*loc, 1));
6390
6391 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6392 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6393 }
6394
6395 return *loc;
6396 }
6397 \f
6398 /* Return nonzero if register in range [REGNO, ENDREGNO)
6399 appears either explicitly or implicitly in X
6400 other than being stored into (except for earlyclobber operands).
6401
6402 References contained within the substructure at LOC do not count.
6403 LOC may be zero, meaning don't ignore anything.
6404
6405 This is similar to refers_to_regno_p in rtlanal.c except that we
6406 look at equivalences for pseudos that didn't get hard registers. */
6407
6408 static int
6409 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6410 rtx x, rtx *loc)
6411 {
6412 int i;
6413 unsigned int r;
6414 RTX_CODE code;
6415 const char *fmt;
6416
6417 if (x == 0)
6418 return 0;
6419
6420 repeat:
6421 code = GET_CODE (x);
6422
6423 switch (code)
6424 {
6425 case REG:
6426 r = REGNO (x);
6427
6428 /* If this is a pseudo, a hard register must not have been allocated.
6429 X must therefore either be a constant or be in memory. */
6430 if (r >= FIRST_PSEUDO_REGISTER)
6431 {
6432 if (reg_equiv_memory_loc (r))
6433 return refers_to_regno_for_reload_p (regno, endregno,
6434 reg_equiv_memory_loc (r),
6435 (rtx*) 0);
6436
6437 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6438 return 0;
6439 }
6440
6441 return (endregno > r
6442 && regno < r + (r < FIRST_PSEUDO_REGISTER
6443 ? hard_regno_nregs[r][GET_MODE (x)]
6444 : 1));
6445
6446 case SUBREG:
6447 /* If this is a SUBREG of a hard reg, we can see exactly which
6448 registers are being modified. Otherwise, handle normally. */
6449 if (REG_P (SUBREG_REG (x))
6450 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6451 {
6452 unsigned int inner_regno = subreg_regno (x);
6453 unsigned int inner_endregno
6454 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6455 ? subreg_nregs (x) : 1);
6456
6457 return endregno > inner_regno && regno < inner_endregno;
6458 }
6459 break;
6460
6461 case CLOBBER:
6462 case SET:
6463 if (&SET_DEST (x) != loc
6464 /* Note setting a SUBREG counts as referring to the REG it is in for
6465 a pseudo but not for hard registers since we can
6466 treat each word individually. */
6467 && ((GET_CODE (SET_DEST (x)) == SUBREG
6468 && loc != &SUBREG_REG (SET_DEST (x))
6469 && REG_P (SUBREG_REG (SET_DEST (x)))
6470 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6471 && refers_to_regno_for_reload_p (regno, endregno,
6472 SUBREG_REG (SET_DEST (x)),
6473 loc))
6474 /* If the output is an earlyclobber operand, this is
6475 a conflict. */
6476 || ((!REG_P (SET_DEST (x))
6477 || earlyclobber_operand_p (SET_DEST (x)))
6478 && refers_to_regno_for_reload_p (regno, endregno,
6479 SET_DEST (x), loc))))
6480 return 1;
6481
6482 if (code == CLOBBER || loc == &SET_SRC (x))
6483 return 0;
6484 x = SET_SRC (x);
6485 goto repeat;
6486
6487 default:
6488 break;
6489 }
6490
6491 /* X does not match, so try its subexpressions. */
6492
6493 fmt = GET_RTX_FORMAT (code);
6494 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6495 {
6496 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6497 {
6498 if (i == 0)
6499 {
6500 x = XEXP (x, 0);
6501 goto repeat;
6502 }
6503 else
6504 if (refers_to_regno_for_reload_p (regno, endregno,
6505 XEXP (x, i), loc))
6506 return 1;
6507 }
6508 else if (fmt[i] == 'E')
6509 {
6510 int j;
6511 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6512 if (loc != &XVECEXP (x, i, j)
6513 && refers_to_regno_for_reload_p (regno, endregno,
6514 XVECEXP (x, i, j), loc))
6515 return 1;
6516 }
6517 }
6518 return 0;
6519 }
6520
6521 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6522 we check if any register number in X conflicts with the relevant register
6523 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6524 contains a MEM (we don't bother checking for memory addresses that can't
6525 conflict because we expect this to be a rare case.
6526
6527 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6528 that we look at equivalences for pseudos that didn't get hard registers. */
6529
6530 int
6531 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6532 {
6533 int regno, endregno;
6534
6535 /* Overly conservative. */
6536 if (GET_CODE (x) == STRICT_LOW_PART
6537 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6538 x = XEXP (x, 0);
6539
6540 /* If either argument is a constant, then modifying X can not affect IN. */
6541 if (CONSTANT_P (x) || CONSTANT_P (in))
6542 return 0;
6543 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6544 return refers_to_mem_for_reload_p (in);
6545 else if (GET_CODE (x) == SUBREG)
6546 {
6547 regno = REGNO (SUBREG_REG (x));
6548 if (regno < FIRST_PSEUDO_REGISTER)
6549 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6550 GET_MODE (SUBREG_REG (x)),
6551 SUBREG_BYTE (x),
6552 GET_MODE (x));
6553 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6554 ? subreg_nregs (x) : 1);
6555
6556 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6557 }
6558 else if (REG_P (x))
6559 {
6560 regno = REGNO (x);
6561
6562 /* If this is a pseudo, it must not have been assigned a hard register.
6563 Therefore, it must either be in memory or be a constant. */
6564
6565 if (regno >= FIRST_PSEUDO_REGISTER)
6566 {
6567 if (reg_equiv_memory_loc (regno))
6568 return refers_to_mem_for_reload_p (in);
6569 gcc_assert (reg_equiv_constant (regno));
6570 return 0;
6571 }
6572
6573 endregno = END_REGNO (x);
6574
6575 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6576 }
6577 else if (MEM_P (x))
6578 return refers_to_mem_for_reload_p (in);
6579 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6580 || GET_CODE (x) == CC0)
6581 return reg_mentioned_p (x, in);
6582 else
6583 {
6584 gcc_assert (GET_CODE (x) == PLUS);
6585
6586 /* We actually want to know if X is mentioned somewhere inside IN.
6587 We must not say that (plus (sp) (const_int 124)) is in
6588 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6589 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6590 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6591 while (MEM_P (in))
6592 in = XEXP (in, 0);
6593 if (REG_P (in))
6594 return 0;
6595 else if (GET_CODE (in) == PLUS)
6596 return (rtx_equal_p (x, in)
6597 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6598 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6599 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6600 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6601 }
6602
6603 gcc_unreachable ();
6604 }
6605
6606 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6607 registers. */
6608
6609 static int
6610 refers_to_mem_for_reload_p (rtx x)
6611 {
6612 const char *fmt;
6613 int i;
6614
6615 if (MEM_P (x))
6616 return 1;
6617
6618 if (REG_P (x))
6619 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6620 && reg_equiv_memory_loc (REGNO (x)));
6621
6622 fmt = GET_RTX_FORMAT (GET_CODE (x));
6623 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6624 if (fmt[i] == 'e'
6625 && (MEM_P (XEXP (x, i))
6626 || refers_to_mem_for_reload_p (XEXP (x, i))))
6627 return 1;
6628
6629 return 0;
6630 }
6631 \f
6632 /* Check the insns before INSN to see if there is a suitable register
6633 containing the same value as GOAL.
6634 If OTHER is -1, look for a register in class RCLASS.
6635 Otherwise, just see if register number OTHER shares GOAL's value.
6636
6637 Return an rtx for the register found, or zero if none is found.
6638
6639 If RELOAD_REG_P is (short *)1,
6640 we reject any hard reg that appears in reload_reg_rtx
6641 because such a hard reg is also needed coming into this insn.
6642
6643 If RELOAD_REG_P is any other nonzero value,
6644 it is a vector indexed by hard reg number
6645 and we reject any hard reg whose element in the vector is nonnegative
6646 as well as any that appears in reload_reg_rtx.
6647
6648 If GOAL is zero, then GOALREG is a register number; we look
6649 for an equivalent for that register.
6650
6651 MODE is the machine mode of the value we want an equivalence for.
6652 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6653
6654 This function is used by jump.c as well as in the reload pass.
6655
6656 If GOAL is the sum of the stack pointer and a constant, we treat it
6657 as if it were a constant except that sp is required to be unchanging. */
6658
6659 rtx
6660 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6661 short *reload_reg_p, int goalreg, machine_mode mode)
6662 {
6663 rtx_insn *p = insn;
6664 rtx goaltry, valtry, value;
6665 rtx_insn *where;
6666 rtx pat;
6667 int regno = -1;
6668 int valueno;
6669 int goal_mem = 0;
6670 int goal_const = 0;
6671 int goal_mem_addr_varies = 0;
6672 int need_stable_sp = 0;
6673 int nregs;
6674 int valuenregs;
6675 int num = 0;
6676
6677 if (goal == 0)
6678 regno = goalreg;
6679 else if (REG_P (goal))
6680 regno = REGNO (goal);
6681 else if (MEM_P (goal))
6682 {
6683 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6684 if (MEM_VOLATILE_P (goal))
6685 return 0;
6686 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6687 return 0;
6688 /* An address with side effects must be reexecuted. */
6689 switch (code)
6690 {
6691 case POST_INC:
6692 case PRE_INC:
6693 case POST_DEC:
6694 case PRE_DEC:
6695 case POST_MODIFY:
6696 case PRE_MODIFY:
6697 return 0;
6698 default:
6699 break;
6700 }
6701 goal_mem = 1;
6702 }
6703 else if (CONSTANT_P (goal))
6704 goal_const = 1;
6705 else if (GET_CODE (goal) == PLUS
6706 && XEXP (goal, 0) == stack_pointer_rtx
6707 && CONSTANT_P (XEXP (goal, 1)))
6708 goal_const = need_stable_sp = 1;
6709 else if (GET_CODE (goal) == PLUS
6710 && XEXP (goal, 0) == frame_pointer_rtx
6711 && CONSTANT_P (XEXP (goal, 1)))
6712 goal_const = 1;
6713 else
6714 return 0;
6715
6716 num = 0;
6717 /* Scan insns back from INSN, looking for one that copies
6718 a value into or out of GOAL.
6719 Stop and give up if we reach a label. */
6720
6721 while (1)
6722 {
6723 p = PREV_INSN (p);
6724 if (p && DEBUG_INSN_P (p))
6725 continue;
6726 num++;
6727 if (p == 0 || LABEL_P (p)
6728 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6729 return 0;
6730
6731 /* Don't reuse register contents from before a setjmp-type
6732 function call; on the second return (from the longjmp) it
6733 might have been clobbered by a later reuse. It doesn't
6734 seem worthwhile to actually go and see if it is actually
6735 reused even if that information would be readily available;
6736 just don't reuse it across the setjmp call. */
6737 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6738 return 0;
6739
6740 if (NONJUMP_INSN_P (p)
6741 /* If we don't want spill regs ... */
6742 && (! (reload_reg_p != 0
6743 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6744 /* ... then ignore insns introduced by reload; they aren't
6745 useful and can cause results in reload_as_needed to be
6746 different from what they were when calculating the need for
6747 spills. If we notice an input-reload insn here, we will
6748 reject it below, but it might hide a usable equivalent.
6749 That makes bad code. It may even fail: perhaps no reg was
6750 spilled for this insn because it was assumed we would find
6751 that equivalent. */
6752 || INSN_UID (p) < reload_first_uid))
6753 {
6754 rtx tem;
6755 pat = single_set (p);
6756
6757 /* First check for something that sets some reg equal to GOAL. */
6758 if (pat != 0
6759 && ((regno >= 0
6760 && true_regnum (SET_SRC (pat)) == regno
6761 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6762 ||
6763 (regno >= 0
6764 && true_regnum (SET_DEST (pat)) == regno
6765 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6766 ||
6767 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6768 /* When looking for stack pointer + const,
6769 make sure we don't use a stack adjust. */
6770 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6771 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6772 || (goal_mem
6773 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6774 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6775 || (goal_mem
6776 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6777 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6778 /* If we are looking for a constant,
6779 and something equivalent to that constant was copied
6780 into a reg, we can use that reg. */
6781 || (goal_const && REG_NOTES (p) != 0
6782 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6783 && ((rtx_equal_p (XEXP (tem, 0), goal)
6784 && (valueno
6785 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6786 || (REG_P (SET_DEST (pat))
6787 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6788 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6789 && CONST_INT_P (goal)
6790 && 0 != (goaltry
6791 = operand_subword (XEXP (tem, 0), 0, 0,
6792 VOIDmode))
6793 && rtx_equal_p (goal, goaltry)
6794 && (valtry
6795 = operand_subword (SET_DEST (pat), 0, 0,
6796 VOIDmode))
6797 && (valueno = true_regnum (valtry)) >= 0)))
6798 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6799 NULL_RTX))
6800 && REG_P (SET_DEST (pat))
6801 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6802 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6803 && CONST_INT_P (goal)
6804 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6805 VOIDmode))
6806 && rtx_equal_p (goal, goaltry)
6807 && (valtry
6808 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6809 && (valueno = true_regnum (valtry)) >= 0)))
6810 {
6811 if (other >= 0)
6812 {
6813 if (valueno != other)
6814 continue;
6815 }
6816 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6817 continue;
6818 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6819 mode, valueno))
6820 continue;
6821 value = valtry;
6822 where = p;
6823 break;
6824 }
6825 }
6826 }
6827
6828 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6829 (or copying VALUE into GOAL, if GOAL is also a register).
6830 Now verify that VALUE is really valid. */
6831
6832 /* VALUENO is the register number of VALUE; a hard register. */
6833
6834 /* Don't try to re-use something that is killed in this insn. We want
6835 to be able to trust REG_UNUSED notes. */
6836 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6837 return 0;
6838
6839 /* If we propose to get the value from the stack pointer or if GOAL is
6840 a MEM based on the stack pointer, we need a stable SP. */
6841 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6842 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6843 goal)))
6844 need_stable_sp = 1;
6845
6846 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6847 if (GET_MODE (value) != mode)
6848 return 0;
6849
6850 /* Reject VALUE if it was loaded from GOAL
6851 and is also a register that appears in the address of GOAL. */
6852
6853 if (goal_mem && value == SET_DEST (single_set (where))
6854 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6855 goal, (rtx*) 0))
6856 return 0;
6857
6858 /* Reject registers that overlap GOAL. */
6859
6860 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6861 nregs = hard_regno_nregs[regno][mode];
6862 else
6863 nregs = 1;
6864 valuenregs = hard_regno_nregs[valueno][mode];
6865
6866 if (!goal_mem && !goal_const
6867 && regno + nregs > valueno && regno < valueno + valuenregs)
6868 return 0;
6869
6870 /* Reject VALUE if it is one of the regs reserved for reloads.
6871 Reload1 knows how to reuse them anyway, and it would get
6872 confused if we allocated one without its knowledge.
6873 (Now that insns introduced by reload are ignored above,
6874 this case shouldn't happen, but I'm not positive.) */
6875
6876 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6877 {
6878 int i;
6879 for (i = 0; i < valuenregs; ++i)
6880 if (reload_reg_p[valueno + i] >= 0)
6881 return 0;
6882 }
6883
6884 /* Reject VALUE if it is a register being used for an input reload
6885 even if it is not one of those reserved. */
6886
6887 if (reload_reg_p != 0)
6888 {
6889 int i;
6890 for (i = 0; i < n_reloads; i++)
6891 if (rld[i].reg_rtx != 0 && rld[i].in)
6892 {
6893 int regno1 = REGNO (rld[i].reg_rtx);
6894 int nregs1 = hard_regno_nregs[regno1]
6895 [GET_MODE (rld[i].reg_rtx)];
6896 if (regno1 < valueno + valuenregs
6897 && regno1 + nregs1 > valueno)
6898 return 0;
6899 }
6900 }
6901
6902 if (goal_mem)
6903 /* We must treat frame pointer as varying here,
6904 since it can vary--in a nonlocal goto as generated by expand_goto. */
6905 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6906
6907 /* Now verify that the values of GOAL and VALUE remain unaltered
6908 until INSN is reached. */
6909
6910 p = insn;
6911 while (1)
6912 {
6913 p = PREV_INSN (p);
6914 if (p == where)
6915 return value;
6916
6917 /* Don't trust the conversion past a function call
6918 if either of the two is in a call-clobbered register, or memory. */
6919 if (CALL_P (p))
6920 {
6921 int i;
6922
6923 if (goal_mem || need_stable_sp)
6924 return 0;
6925
6926 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6927 for (i = 0; i < nregs; ++i)
6928 if (call_used_regs[regno + i]
6929 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6930 return 0;
6931
6932 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6933 for (i = 0; i < valuenregs; ++i)
6934 if (call_used_regs[valueno + i]
6935 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6936 return 0;
6937 }
6938
6939 if (INSN_P (p))
6940 {
6941 pat = PATTERN (p);
6942
6943 /* Watch out for unspec_volatile, and volatile asms. */
6944 if (volatile_insn_p (pat))
6945 return 0;
6946
6947 /* If this insn P stores in either GOAL or VALUE, return 0.
6948 If GOAL is a memory ref and this insn writes memory, return 0.
6949 If GOAL is a memory ref and its address is not constant,
6950 and this insn P changes a register used in GOAL, return 0. */
6951
6952 if (GET_CODE (pat) == COND_EXEC)
6953 pat = COND_EXEC_CODE (pat);
6954 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6955 {
6956 rtx dest = SET_DEST (pat);
6957 while (GET_CODE (dest) == SUBREG
6958 || GET_CODE (dest) == ZERO_EXTRACT
6959 || GET_CODE (dest) == STRICT_LOW_PART)
6960 dest = XEXP (dest, 0);
6961 if (REG_P (dest))
6962 {
6963 int xregno = REGNO (dest);
6964 int xnregs;
6965 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6966 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6967 else
6968 xnregs = 1;
6969 if (xregno < regno + nregs && xregno + xnregs > regno)
6970 return 0;
6971 if (xregno < valueno + valuenregs
6972 && xregno + xnregs > valueno)
6973 return 0;
6974 if (goal_mem_addr_varies
6975 && reg_overlap_mentioned_for_reload_p (dest, goal))
6976 return 0;
6977 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6978 return 0;
6979 }
6980 else if (goal_mem && MEM_P (dest)
6981 && ! push_operand (dest, GET_MODE (dest)))
6982 return 0;
6983 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6984 && reg_equiv_memory_loc (regno) != 0)
6985 return 0;
6986 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6987 return 0;
6988 }
6989 else if (GET_CODE (pat) == PARALLEL)
6990 {
6991 int i;
6992 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6993 {
6994 rtx v1 = XVECEXP (pat, 0, i);
6995 if (GET_CODE (v1) == COND_EXEC)
6996 v1 = COND_EXEC_CODE (v1);
6997 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6998 {
6999 rtx dest = SET_DEST (v1);
7000 while (GET_CODE (dest) == SUBREG
7001 || GET_CODE (dest) == ZERO_EXTRACT
7002 || GET_CODE (dest) == STRICT_LOW_PART)
7003 dest = XEXP (dest, 0);
7004 if (REG_P (dest))
7005 {
7006 int xregno = REGNO (dest);
7007 int xnregs;
7008 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7009 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7010 else
7011 xnregs = 1;
7012 if (xregno < regno + nregs
7013 && xregno + xnregs > regno)
7014 return 0;
7015 if (xregno < valueno + valuenregs
7016 && xregno + xnregs > valueno)
7017 return 0;
7018 if (goal_mem_addr_varies
7019 && reg_overlap_mentioned_for_reload_p (dest,
7020 goal))
7021 return 0;
7022 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7023 return 0;
7024 }
7025 else if (goal_mem && MEM_P (dest)
7026 && ! push_operand (dest, GET_MODE (dest)))
7027 return 0;
7028 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7029 && reg_equiv_memory_loc (regno) != 0)
7030 return 0;
7031 else if (need_stable_sp
7032 && push_operand (dest, GET_MODE (dest)))
7033 return 0;
7034 }
7035 }
7036 }
7037
7038 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7039 {
7040 rtx link;
7041
7042 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7043 link = XEXP (link, 1))
7044 {
7045 pat = XEXP (link, 0);
7046 if (GET_CODE (pat) == CLOBBER)
7047 {
7048 rtx dest = SET_DEST (pat);
7049
7050 if (REG_P (dest))
7051 {
7052 int xregno = REGNO (dest);
7053 int xnregs
7054 = hard_regno_nregs[xregno][GET_MODE (dest)];
7055
7056 if (xregno < regno + nregs
7057 && xregno + xnregs > regno)
7058 return 0;
7059 else if (xregno < valueno + valuenregs
7060 && xregno + xnregs > valueno)
7061 return 0;
7062 else if (goal_mem_addr_varies
7063 && reg_overlap_mentioned_for_reload_p (dest,
7064 goal))
7065 return 0;
7066 }
7067
7068 else if (goal_mem && MEM_P (dest)
7069 && ! push_operand (dest, GET_MODE (dest)))
7070 return 0;
7071 else if (need_stable_sp
7072 && push_operand (dest, GET_MODE (dest)))
7073 return 0;
7074 }
7075 }
7076 }
7077
7078 #if AUTO_INC_DEC
7079 /* If this insn auto-increments or auto-decrements
7080 either regno or valueno, return 0 now.
7081 If GOAL is a memory ref and its address is not constant,
7082 and this insn P increments a register used in GOAL, return 0. */
7083 {
7084 rtx link;
7085
7086 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7087 if (REG_NOTE_KIND (link) == REG_INC
7088 && REG_P (XEXP (link, 0)))
7089 {
7090 int incno = REGNO (XEXP (link, 0));
7091 if (incno < regno + nregs && incno >= regno)
7092 return 0;
7093 if (incno < valueno + valuenregs && incno >= valueno)
7094 return 0;
7095 if (goal_mem_addr_varies
7096 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7097 goal))
7098 return 0;
7099 }
7100 }
7101 #endif
7102 }
7103 }
7104 }
7105 \f
7106 /* Find a place where INCED appears in an increment or decrement operator
7107 within X, and return the amount INCED is incremented or decremented by.
7108 The value is always positive. */
7109
7110 static int
7111 find_inc_amount (rtx x, rtx inced)
7112 {
7113 enum rtx_code code = GET_CODE (x);
7114 const char *fmt;
7115 int i;
7116
7117 if (code == MEM)
7118 {
7119 rtx addr = XEXP (x, 0);
7120 if ((GET_CODE (addr) == PRE_DEC
7121 || GET_CODE (addr) == POST_DEC
7122 || GET_CODE (addr) == PRE_INC
7123 || GET_CODE (addr) == POST_INC)
7124 && XEXP (addr, 0) == inced)
7125 return GET_MODE_SIZE (GET_MODE (x));
7126 else if ((GET_CODE (addr) == PRE_MODIFY
7127 || GET_CODE (addr) == POST_MODIFY)
7128 && GET_CODE (XEXP (addr, 1)) == PLUS
7129 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7130 && XEXP (addr, 0) == inced
7131 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7132 {
7133 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7134 return i < 0 ? -i : i;
7135 }
7136 }
7137
7138 fmt = GET_RTX_FORMAT (code);
7139 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7140 {
7141 if (fmt[i] == 'e')
7142 {
7143 int tem = find_inc_amount (XEXP (x, i), inced);
7144 if (tem != 0)
7145 return tem;
7146 }
7147 if (fmt[i] == 'E')
7148 {
7149 int j;
7150 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7151 {
7152 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7153 if (tem != 0)
7154 return tem;
7155 }
7156 }
7157 }
7158
7159 return 0;
7160 }
7161 \f
7162 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7163 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7164
7165 static int
7166 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7167 rtx insn)
7168 {
7169 rtx link;
7170
7171 if (!AUTO_INC_DEC)
7172 return 0;
7173
7174 gcc_assert (insn);
7175
7176 if (! INSN_P (insn))
7177 return 0;
7178
7179 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7180 if (REG_NOTE_KIND (link) == REG_INC)
7181 {
7182 unsigned int test = (int) REGNO (XEXP (link, 0));
7183 if (test >= regno && test < endregno)
7184 return 1;
7185 }
7186 return 0;
7187 }
7188
7189 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7190 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7191 REG_INC. REGNO must refer to a hard register. */
7192
7193 int
7194 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7195 int sets)
7196 {
7197 unsigned int nregs, endregno;
7198
7199 /* regno must be a hard register. */
7200 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7201
7202 nregs = hard_regno_nregs[regno][mode];
7203 endregno = regno + nregs;
7204
7205 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7206 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7207 && REG_P (XEXP (PATTERN (insn), 0)))
7208 {
7209 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7210
7211 return test >= regno && test < endregno;
7212 }
7213
7214 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7215 return 1;
7216
7217 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7218 {
7219 int i = XVECLEN (PATTERN (insn), 0) - 1;
7220
7221 for (; i >= 0; i--)
7222 {
7223 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7224 if ((GET_CODE (elt) == CLOBBER
7225 || (sets == 1 && GET_CODE (elt) == SET))
7226 && REG_P (XEXP (elt, 0)))
7227 {
7228 unsigned int test = REGNO (XEXP (elt, 0));
7229
7230 if (test >= regno && test < endregno)
7231 return 1;
7232 }
7233 if (sets == 2
7234 && reg_inc_found_and_valid_p (regno, endregno, elt))
7235 return 1;
7236 }
7237 }
7238
7239 return 0;
7240 }
7241
7242 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7243 rtx
7244 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7245 {
7246 int regno;
7247
7248 if (GET_MODE (reloadreg) == mode)
7249 return reloadreg;
7250
7251 regno = REGNO (reloadreg);
7252
7253 if (REG_WORDS_BIG_ENDIAN)
7254 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7255 - (int) hard_regno_nregs[regno][mode];
7256
7257 return gen_rtx_REG (mode, regno);
7258 }
7259
7260 static const char *const reload_when_needed_name[] =
7261 {
7262 "RELOAD_FOR_INPUT",
7263 "RELOAD_FOR_OUTPUT",
7264 "RELOAD_FOR_INSN",
7265 "RELOAD_FOR_INPUT_ADDRESS",
7266 "RELOAD_FOR_INPADDR_ADDRESS",
7267 "RELOAD_FOR_OUTPUT_ADDRESS",
7268 "RELOAD_FOR_OUTADDR_ADDRESS",
7269 "RELOAD_FOR_OPERAND_ADDRESS",
7270 "RELOAD_FOR_OPADDR_ADDR",
7271 "RELOAD_OTHER",
7272 "RELOAD_FOR_OTHER_ADDRESS"
7273 };
7274
7275 /* These functions are used to print the variables set by 'find_reloads' */
7276
7277 DEBUG_FUNCTION void
7278 debug_reload_to_stream (FILE *f)
7279 {
7280 int r;
7281 const char *prefix;
7282
7283 if (! f)
7284 f = stderr;
7285 for (r = 0; r < n_reloads; r++)
7286 {
7287 fprintf (f, "Reload %d: ", r);
7288
7289 if (rld[r].in != 0)
7290 {
7291 fprintf (f, "reload_in (%s) = ",
7292 GET_MODE_NAME (rld[r].inmode));
7293 print_inline_rtx (f, rld[r].in, 24);
7294 fprintf (f, "\n\t");
7295 }
7296
7297 if (rld[r].out != 0)
7298 {
7299 fprintf (f, "reload_out (%s) = ",
7300 GET_MODE_NAME (rld[r].outmode));
7301 print_inline_rtx (f, rld[r].out, 24);
7302 fprintf (f, "\n\t");
7303 }
7304
7305 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7306
7307 fprintf (f, "%s (opnum = %d)",
7308 reload_when_needed_name[(int) rld[r].when_needed],
7309 rld[r].opnum);
7310
7311 if (rld[r].optional)
7312 fprintf (f, ", optional");
7313
7314 if (rld[r].nongroup)
7315 fprintf (f, ", nongroup");
7316
7317 if (rld[r].inc != 0)
7318 fprintf (f, ", inc by %d", rld[r].inc);
7319
7320 if (rld[r].nocombine)
7321 fprintf (f, ", can't combine");
7322
7323 if (rld[r].secondary_p)
7324 fprintf (f, ", secondary_reload_p");
7325
7326 if (rld[r].in_reg != 0)
7327 {
7328 fprintf (f, "\n\treload_in_reg: ");
7329 print_inline_rtx (f, rld[r].in_reg, 24);
7330 }
7331
7332 if (rld[r].out_reg != 0)
7333 {
7334 fprintf (f, "\n\treload_out_reg: ");
7335 print_inline_rtx (f, rld[r].out_reg, 24);
7336 }
7337
7338 if (rld[r].reg_rtx != 0)
7339 {
7340 fprintf (f, "\n\treload_reg_rtx: ");
7341 print_inline_rtx (f, rld[r].reg_rtx, 24);
7342 }
7343
7344 prefix = "\n\t";
7345 if (rld[r].secondary_in_reload != -1)
7346 {
7347 fprintf (f, "%ssecondary_in_reload = %d",
7348 prefix, rld[r].secondary_in_reload);
7349 prefix = ", ";
7350 }
7351
7352 if (rld[r].secondary_out_reload != -1)
7353 fprintf (f, "%ssecondary_out_reload = %d\n",
7354 prefix, rld[r].secondary_out_reload);
7355
7356 prefix = "\n\t";
7357 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7358 {
7359 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7360 insn_data[rld[r].secondary_in_icode].name);
7361 prefix = ", ";
7362 }
7363
7364 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7365 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7366 insn_data[rld[r].secondary_out_icode].name);
7367
7368 fprintf (f, "\n");
7369 }
7370 }
7371
7372 DEBUG_FUNCTION void
7373 debug_reload (void)
7374 {
7375 debug_reload_to_stream (stderr);
7376 }