]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/reload.c
Update copyright years.
[thirdparty/gcc.git] / gcc / reload.c
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
26
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
29
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
36
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
43
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
52
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
55
56 NOTE SIDE EFFECTS:
57
58 find_reloads can alter the operands of the instruction it is called on.
59
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
64
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
67
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
71
72 Using a reload register for several reloads in one insn:
73
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
77
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
81
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
85
86 #define REG_OK_STRICT
87
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
90
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "tm_p.h"
100 #include "optabs.h"
101 #include "regs.h"
102 #include "ira.h"
103 #include "recog.h"
104 #include "rtl-error.h"
105 #include "reload.h"
106 #include "addresses.h"
107 #include "params.h"
108
109 /* True if X is a constant that can be forced into the constant pool.
110 MODE is the mode of the operand, or VOIDmode if not known. */
111 #define CONST_POOL_OK_P(MODE, X) \
112 ((MODE) != VOIDmode \
113 && CONSTANT_P (X) \
114 && GET_CODE (X) != HIGH \
115 && !targetm.cannot_force_const_mem (MODE, X))
116
117 /* True if C is a non-empty register class that has too few registers
118 to be safely used as a reload target class. */
119
120 static inline bool
121 small_register_class_p (reg_class_t rclass)
122 {
123 return (reg_class_size [(int) rclass] == 1
124 || (reg_class_size [(int) rclass] >= 1
125 && targetm.class_likely_spilled_p (rclass)));
126 }
127
128 \f
129 /* All reloads of the current insn are recorded here. See reload.h for
130 comments. */
131 int n_reloads;
132 struct reload rld[MAX_RELOADS];
133
134 /* All the "earlyclobber" operands of the current insn
135 are recorded here. */
136 int n_earlyclobbers;
137 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
138
139 int reload_n_operands;
140
141 /* Replacing reloads.
142
143 If `replace_reloads' is nonzero, then as each reload is recorded
144 an entry is made for it in the table `replacements'.
145 Then later `subst_reloads' can look through that table and
146 perform all the replacements needed. */
147
148 /* Nonzero means record the places to replace. */
149 static int replace_reloads;
150
151 /* Each replacement is recorded with a structure like this. */
152 struct replacement
153 {
154 rtx *where; /* Location to store in */
155 int what; /* which reload this is for */
156 machine_mode mode; /* mode it must have */
157 };
158
159 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
160
161 /* Number of replacements currently recorded. */
162 static int n_replacements;
163
164 /* Used to track what is modified by an operand. */
165 struct decomposition
166 {
167 int reg_flag; /* Nonzero if referencing a register. */
168 int safe; /* Nonzero if this can't conflict with anything. */
169 rtx base; /* Base address for MEM. */
170 HOST_WIDE_INT start; /* Starting offset or register number. */
171 HOST_WIDE_INT end; /* Ending offset or register number. */
172 };
173
174 #ifdef SECONDARY_MEMORY_NEEDED
175
176 /* Save MEMs needed to copy from one class of registers to another. One MEM
177 is used per mode, but normally only one or two modes are ever used.
178
179 We keep two versions, before and after register elimination. The one
180 after register elimination is record separately for each operand. This
181 is done in case the address is not valid to be sure that we separately
182 reload each. */
183
184 static rtx secondary_memlocs[NUM_MACHINE_MODES];
185 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
186 static int secondary_memlocs_elim_used = 0;
187 #endif
188
189 /* The instruction we are doing reloads for;
190 so we can test whether a register dies in it. */
191 static rtx_insn *this_insn;
192
193 /* Nonzero if this instruction is a user-specified asm with operands. */
194 static int this_insn_is_asm;
195
196 /* If hard_regs_live_known is nonzero,
197 we can tell which hard regs are currently live,
198 at least enough to succeed in choosing dummy reloads. */
199 static int hard_regs_live_known;
200
201 /* Indexed by hard reg number,
202 element is nonnegative if hard reg has been spilled.
203 This vector is passed to `find_reloads' as an argument
204 and is not changed here. */
205 static short *static_reload_reg_p;
206
207 /* Set to 1 in subst_reg_equivs if it changes anything. */
208 static int subst_reg_equivs_changed;
209
210 /* On return from push_reload, holds the reload-number for the OUT
211 operand, which can be different for that from the input operand. */
212 static int output_reloadnum;
213
214 /* Compare two RTX's. */
215 #define MATCHES(x, y) \
216 (x == y || (x != 0 && (REG_P (x) \
217 ? REG_P (y) && REGNO (x) == REGNO (y) \
218 : rtx_equal_p (x, y) && ! side_effects_p (x))))
219
220 /* Indicates if two reloads purposes are for similar enough things that we
221 can merge their reloads. */
222 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
223 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
224 || ((when1) == (when2) && (op1) == (op2)) \
225 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
226 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
227 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
228 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
229 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
230
231 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
232 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
233 ((when1) != (when2) \
234 || ! ((op1) == (op2) \
235 || (when1) == RELOAD_FOR_INPUT \
236 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
237 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
238
239 /* If we are going to reload an address, compute the reload type to
240 use. */
241 #define ADDR_TYPE(type) \
242 ((type) == RELOAD_FOR_INPUT_ADDRESS \
243 ? RELOAD_FOR_INPADDR_ADDRESS \
244 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
245 ? RELOAD_FOR_OUTADDR_ADDRESS \
246 : (type)))
247
248 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
249 machine_mode, enum reload_type,
250 enum insn_code *, secondary_reload_info *);
251 static enum reg_class find_valid_class (machine_mode, machine_mode,
252 int, unsigned int);
253 static void push_replacement (rtx *, int, machine_mode);
254 static void dup_replacements (rtx *, rtx *);
255 static void combine_reloads (void);
256 static int find_reusable_reload (rtx *, rtx, enum reg_class,
257 enum reload_type, int, int);
258 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
259 machine_mode, reg_class_t, int, int);
260 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
261 static struct decomposition decompose (rtx);
262 static int immune_p (rtx, rtx, struct decomposition);
263 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
264 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
265 rtx_insn *, int *);
266 static rtx make_memloc (rtx, int);
267 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
268 addr_space_t, rtx *);
269 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
270 int, enum reload_type, int, rtx_insn *);
271 static rtx subst_reg_equivs (rtx, rtx_insn *);
272 static rtx subst_indexed_address (rtx);
273 static void update_auto_inc_notes (rtx_insn *, int, int);
274 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
275 enum rtx_code, enum rtx_code, rtx *,
276 int, enum reload_type,int, rtx_insn *);
277 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
278 machine_mode, int,
279 enum reload_type, int);
280 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
281 int, rtx_insn *, int *);
282 static void copy_replacements_1 (rtx *, rtx *, int);
283 static int find_inc_amount (rtx, rtx);
284 static int refers_to_mem_for_reload_p (rtx);
285 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
286 rtx, rtx *);
287
288 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
289 list yet. */
290
291 static void
292 push_reg_equiv_alt_mem (int regno, rtx mem)
293 {
294 rtx it;
295
296 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
297 if (rtx_equal_p (XEXP (it, 0), mem))
298 return;
299
300 reg_equiv_alt_mem_list (regno)
301 = alloc_EXPR_LIST (REG_EQUIV, mem,
302 reg_equiv_alt_mem_list (regno));
303 }
304 \f
305 /* Determine if any secondary reloads are needed for loading (if IN_P is
306 nonzero) or storing (if IN_P is zero) X to or from a reload register of
307 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
308 are needed, push them.
309
310 Return the reload number of the secondary reload we made, or -1 if
311 we didn't need one. *PICODE is set to the insn_code to use if we do
312 need a secondary reload. */
313
314 static int
315 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
316 enum reg_class reload_class,
317 machine_mode reload_mode, enum reload_type type,
318 enum insn_code *picode, secondary_reload_info *prev_sri)
319 {
320 enum reg_class rclass = NO_REGS;
321 enum reg_class scratch_class;
322 machine_mode mode = reload_mode;
323 enum insn_code icode = CODE_FOR_nothing;
324 enum insn_code t_icode = CODE_FOR_nothing;
325 enum reload_type secondary_type;
326 int s_reload, t_reload = -1;
327 const char *scratch_constraint;
328 secondary_reload_info sri;
329
330 if (type == RELOAD_FOR_INPUT_ADDRESS
331 || type == RELOAD_FOR_OUTPUT_ADDRESS
332 || type == RELOAD_FOR_INPADDR_ADDRESS
333 || type == RELOAD_FOR_OUTADDR_ADDRESS)
334 secondary_type = type;
335 else
336 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
337
338 *picode = CODE_FOR_nothing;
339
340 /* If X is a paradoxical SUBREG, use the inner value to determine both the
341 mode and object being reloaded. */
342 if (paradoxical_subreg_p (x))
343 {
344 x = SUBREG_REG (x);
345 reload_mode = GET_MODE (x);
346 }
347
348 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
349 is still a pseudo-register by now, it *must* have an equivalent MEM
350 but we don't want to assume that), use that equivalent when seeing if
351 a secondary reload is needed since whether or not a reload is needed
352 might be sensitive to the form of the MEM. */
353
354 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
355 && reg_equiv_mem (REGNO (x)))
356 x = reg_equiv_mem (REGNO (x));
357
358 sri.icode = CODE_FOR_nothing;
359 sri.prev_sri = prev_sri;
360 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
361 reload_mode, &sri);
362 icode = (enum insn_code) sri.icode;
363
364 /* If we don't need any secondary registers, done. */
365 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
366 return -1;
367
368 if (rclass != NO_REGS)
369 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
370 reload_mode, type, &t_icode, &sri);
371
372 /* If we will be using an insn, the secondary reload is for a
373 scratch register. */
374
375 if (icode != CODE_FOR_nothing)
376 {
377 /* If IN_P is nonzero, the reload register will be the output in
378 operand 0. If IN_P is zero, the reload register will be the input
379 in operand 1. Outputs should have an initial "=", which we must
380 skip. */
381
382 /* ??? It would be useful to be able to handle only two, or more than
383 three, operands, but for now we can only handle the case of having
384 exactly three: output, input and one temp/scratch. */
385 gcc_assert (insn_data[(int) icode].n_operands == 3);
386
387 /* ??? We currently have no way to represent a reload that needs
388 an icode to reload from an intermediate tertiary reload register.
389 We should probably have a new field in struct reload to tag a
390 chain of scratch operand reloads onto. */
391 gcc_assert (rclass == NO_REGS);
392
393 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
394 gcc_assert (*scratch_constraint == '=');
395 scratch_constraint++;
396 if (*scratch_constraint == '&')
397 scratch_constraint++;
398 scratch_class = (reg_class_for_constraint
399 (lookup_constraint (scratch_constraint)));
400
401 rclass = scratch_class;
402 mode = insn_data[(int) icode].operand[2].mode;
403 }
404
405 /* This case isn't valid, so fail. Reload is allowed to use the same
406 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
407 in the case of a secondary register, we actually need two different
408 registers for correct code. We fail here to prevent the possibility of
409 silently generating incorrect code later.
410
411 The convention is that secondary input reloads are valid only if the
412 secondary_class is different from class. If you have such a case, you
413 can not use secondary reloads, you must work around the problem some
414 other way.
415
416 Allow this when a reload_in/out pattern is being used. I.e. assume
417 that the generated code handles this case. */
418
419 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
420 || t_icode != CODE_FOR_nothing);
421
422 /* See if we can reuse an existing secondary reload. */
423 for (s_reload = 0; s_reload < n_reloads; s_reload++)
424 if (rld[s_reload].secondary_p
425 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
426 || reg_class_subset_p (rld[s_reload].rclass, rclass))
427 && ((in_p && rld[s_reload].inmode == mode)
428 || (! in_p && rld[s_reload].outmode == mode))
429 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
430 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
431 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
432 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
433 && (small_register_class_p (rclass)
434 || targetm.small_register_classes_for_mode_p (VOIDmode))
435 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
436 opnum, rld[s_reload].opnum))
437 {
438 if (in_p)
439 rld[s_reload].inmode = mode;
440 if (! in_p)
441 rld[s_reload].outmode = mode;
442
443 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
444 rld[s_reload].rclass = rclass;
445
446 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
447 rld[s_reload].optional &= optional;
448 rld[s_reload].secondary_p = 1;
449 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
450 opnum, rld[s_reload].opnum))
451 rld[s_reload].when_needed = RELOAD_OTHER;
452
453 break;
454 }
455
456 if (s_reload == n_reloads)
457 {
458 #ifdef SECONDARY_MEMORY_NEEDED
459 /* If we need a memory location to copy between the two reload regs,
460 set it up now. Note that we do the input case before making
461 the reload and the output case after. This is due to the
462 way reloads are output. */
463
464 if (in_p && icode == CODE_FOR_nothing
465 && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode))
466 {
467 get_secondary_mem (x, reload_mode, opnum, type);
468
469 /* We may have just added new reloads. Make sure we add
470 the new reload at the end. */
471 s_reload = n_reloads;
472 }
473 #endif
474
475 /* We need to make a new secondary reload for this register class. */
476 rld[s_reload].in = rld[s_reload].out = 0;
477 rld[s_reload].rclass = rclass;
478
479 rld[s_reload].inmode = in_p ? mode : VOIDmode;
480 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
481 rld[s_reload].reg_rtx = 0;
482 rld[s_reload].optional = optional;
483 rld[s_reload].inc = 0;
484 /* Maybe we could combine these, but it seems too tricky. */
485 rld[s_reload].nocombine = 1;
486 rld[s_reload].in_reg = 0;
487 rld[s_reload].out_reg = 0;
488 rld[s_reload].opnum = opnum;
489 rld[s_reload].when_needed = secondary_type;
490 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
491 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
492 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
493 rld[s_reload].secondary_out_icode
494 = ! in_p ? t_icode : CODE_FOR_nothing;
495 rld[s_reload].secondary_p = 1;
496
497 n_reloads++;
498
499 #ifdef SECONDARY_MEMORY_NEEDED
500 if (! in_p && icode == CODE_FOR_nothing
501 && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode))
502 get_secondary_mem (x, mode, opnum, type);
503 #endif
504 }
505
506 *picode = icode;
507 return s_reload;
508 }
509
510 /* If a secondary reload is needed, return its class. If both an intermediate
511 register and a scratch register is needed, we return the class of the
512 intermediate register. */
513 reg_class_t
514 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
515 rtx x)
516 {
517 enum insn_code icode;
518 secondary_reload_info sri;
519
520 sri.icode = CODE_FOR_nothing;
521 sri.prev_sri = NULL;
522 rclass
523 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
524 icode = (enum insn_code) sri.icode;
525
526 /* If there are no secondary reloads at all, we return NO_REGS.
527 If an intermediate register is needed, we return its class. */
528 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
529 return rclass;
530
531 /* No intermediate register is needed, but we have a special reload
532 pattern, which we assume for now needs a scratch register. */
533 return scratch_reload_class (icode);
534 }
535
536 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
537 three operands, verify that operand 2 is an output operand, and return
538 its register class.
539 ??? We'd like to be able to handle any pattern with at least 2 operands,
540 for zero or more scratch registers, but that needs more infrastructure. */
541 enum reg_class
542 scratch_reload_class (enum insn_code icode)
543 {
544 const char *scratch_constraint;
545 enum reg_class rclass;
546
547 gcc_assert (insn_data[(int) icode].n_operands == 3);
548 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
549 gcc_assert (*scratch_constraint == '=');
550 scratch_constraint++;
551 if (*scratch_constraint == '&')
552 scratch_constraint++;
553 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
554 gcc_assert (rclass != NO_REGS);
555 return rclass;
556 }
557 \f
558 #ifdef SECONDARY_MEMORY_NEEDED
559
560 /* Return a memory location that will be used to copy X in mode MODE.
561 If we haven't already made a location for this mode in this insn,
562 call find_reloads_address on the location being returned. */
563
564 rtx
565 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
566 int opnum, enum reload_type type)
567 {
568 rtx loc;
569 int mem_valid;
570
571 /* By default, if MODE is narrower than a word, widen it to a word.
572 This is required because most machines that require these memory
573 locations do not support short load and stores from all registers
574 (e.g., FP registers). */
575
576 #ifdef SECONDARY_MEMORY_NEEDED_MODE
577 mode = SECONDARY_MEMORY_NEEDED_MODE (mode);
578 #else
579 if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode))
580 mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0);
581 #endif
582
583 /* If we already have made a MEM for this operand in MODE, return it. */
584 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
585 return secondary_memlocs_elim[(int) mode][opnum];
586
587 /* If this is the first time we've tried to get a MEM for this mode,
588 allocate a new one. `something_changed' in reload will get set
589 by noticing that the frame size has changed. */
590
591 if (secondary_memlocs[(int) mode] == 0)
592 {
593 #ifdef SECONDARY_MEMORY_NEEDED_RTX
594 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
595 #else
596 secondary_memlocs[(int) mode]
597 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
598 #endif
599 }
600
601 /* Get a version of the address doing any eliminations needed. If that
602 didn't give us a new MEM, make a new one if it isn't valid. */
603
604 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
605 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
606 MEM_ADDR_SPACE (loc));
607
608 if (! mem_valid && loc == secondary_memlocs[(int) mode])
609 loc = copy_rtx (loc);
610
611 /* The only time the call below will do anything is if the stack
612 offset is too large. In that case IND_LEVELS doesn't matter, so we
613 can just pass a zero. Adjust the type to be the address of the
614 corresponding object. If the address was valid, save the eliminated
615 address. If it wasn't valid, we need to make a reload each time, so
616 don't save it. */
617
618 if (! mem_valid)
619 {
620 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
621 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
622 : RELOAD_OTHER);
623
624 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
625 opnum, type, 0, 0);
626 }
627
628 secondary_memlocs_elim[(int) mode][opnum] = loc;
629 if (secondary_memlocs_elim_used <= (int)mode)
630 secondary_memlocs_elim_used = (int)mode + 1;
631 return loc;
632 }
633
634 /* Clear any secondary memory locations we've made. */
635
636 void
637 clear_secondary_mem (void)
638 {
639 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
640 }
641 #endif /* SECONDARY_MEMORY_NEEDED */
642 \f
643
644 /* Find the largest class which has at least one register valid in
645 mode INNER, and which for every such register, that register number
646 plus N is also valid in OUTER (if in range) and is cheap to move
647 into REGNO. Such a class must exist. */
648
649 static enum reg_class
650 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
651 machine_mode inner ATTRIBUTE_UNUSED, int n,
652 unsigned int dest_regno ATTRIBUTE_UNUSED)
653 {
654 int best_cost = -1;
655 int rclass;
656 int regno;
657 enum reg_class best_class = NO_REGS;
658 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
659 unsigned int best_size = 0;
660 int cost;
661
662 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
663 {
664 int bad = 0;
665 int good = 0;
666 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
667 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
668 {
669 if (HARD_REGNO_MODE_OK (regno, inner))
670 {
671 good = 1;
672 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
673 && ! HARD_REGNO_MODE_OK (regno + n, outer))
674 bad = 1;
675 }
676 }
677
678 if (bad || !good)
679 continue;
680 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
681
682 if ((reg_class_size[rclass] > best_size
683 && (best_cost < 0 || best_cost >= cost))
684 || best_cost > cost)
685 {
686 best_class = (enum reg_class) rclass;
687 best_size = reg_class_size[rclass];
688 best_cost = register_move_cost (outer, (enum reg_class) rclass,
689 dest_class);
690 }
691 }
692
693 gcc_assert (best_size != 0);
694
695 return best_class;
696 }
697
698 /* We are trying to reload a subreg of something that is not a register.
699 Find the largest class which contains only registers valid in
700 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
701 which we would eventually like to obtain the object. */
702
703 static enum reg_class
704 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
705 machine_mode mode ATTRIBUTE_UNUSED,
706 enum reg_class dest_class ATTRIBUTE_UNUSED)
707 {
708 int best_cost = -1;
709 int rclass;
710 int regno;
711 enum reg_class best_class = NO_REGS;
712 unsigned int best_size = 0;
713 int cost;
714
715 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
716 {
717 int bad = 0;
718 for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++)
719 {
720 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
721 && !HARD_REGNO_MODE_OK (regno, mode))
722 bad = 1;
723 }
724
725 if (bad)
726 continue;
727
728 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
729
730 if ((reg_class_size[rclass] > best_size
731 && (best_cost < 0 || best_cost >= cost))
732 || best_cost > cost)
733 {
734 best_class = (enum reg_class) rclass;
735 best_size = reg_class_size[rclass];
736 best_cost = register_move_cost (outer, (enum reg_class) rclass,
737 dest_class);
738 }
739 }
740
741 gcc_assert (best_size != 0);
742
743 #ifdef LIMIT_RELOAD_CLASS
744 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
745 #endif
746 return best_class;
747 }
748 \f
749 /* Return the number of a previously made reload that can be combined with
750 a new one, or n_reloads if none of the existing reloads can be used.
751 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
752 push_reload, they determine the kind of the new reload that we try to
753 combine. P_IN points to the corresponding value of IN, which can be
754 modified by this function.
755 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
756
757 static int
758 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
759 enum reload_type type, int opnum, int dont_share)
760 {
761 rtx in = *p_in;
762 int i;
763 /* We can't merge two reloads if the output of either one is
764 earlyclobbered. */
765
766 if (earlyclobber_operand_p (out))
767 return n_reloads;
768
769 /* We can use an existing reload if the class is right
770 and at least one of IN and OUT is a match
771 and the other is at worst neutral.
772 (A zero compared against anything is neutral.)
773
774 For targets with small register classes, don't use existing reloads
775 unless they are for the same thing since that can cause us to need
776 more reload registers than we otherwise would. */
777
778 for (i = 0; i < n_reloads; i++)
779 if ((reg_class_subset_p (rclass, rld[i].rclass)
780 || reg_class_subset_p (rld[i].rclass, rclass))
781 /* If the existing reload has a register, it must fit our class. */
782 && (rld[i].reg_rtx == 0
783 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
784 true_regnum (rld[i].reg_rtx)))
785 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
786 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
787 || (out != 0 && MATCHES (rld[i].out, out)
788 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
789 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
790 && (small_register_class_p (rclass)
791 || targetm.small_register_classes_for_mode_p (VOIDmode))
792 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
793 return i;
794
795 /* Reloading a plain reg for input can match a reload to postincrement
796 that reg, since the postincrement's value is the right value.
797 Likewise, it can match a preincrement reload, since we regard
798 the preincrementation as happening before any ref in this insn
799 to that register. */
800 for (i = 0; i < n_reloads; i++)
801 if ((reg_class_subset_p (rclass, rld[i].rclass)
802 || reg_class_subset_p (rld[i].rclass, rclass))
803 /* If the existing reload has a register, it must fit our
804 class. */
805 && (rld[i].reg_rtx == 0
806 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
807 true_regnum (rld[i].reg_rtx)))
808 && out == 0 && rld[i].out == 0 && rld[i].in != 0
809 && ((REG_P (in)
810 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
811 && MATCHES (XEXP (rld[i].in, 0), in))
812 || (REG_P (rld[i].in)
813 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
814 && MATCHES (XEXP (in, 0), rld[i].in)))
815 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
816 && (small_register_class_p (rclass)
817 || targetm.small_register_classes_for_mode_p (VOIDmode))
818 && MERGABLE_RELOADS (type, rld[i].when_needed,
819 opnum, rld[i].opnum))
820 {
821 /* Make sure reload_in ultimately has the increment,
822 not the plain register. */
823 if (REG_P (in))
824 *p_in = rld[i].in;
825 return i;
826 }
827 return n_reloads;
828 }
829
830 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
831 expression. MODE is the mode that X will be used in. OUTPUT is true if
832 the function is invoked for the output part of an enclosing reload. */
833
834 static bool
835 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
836 {
837 rtx inner;
838
839 /* Only SUBREGs are problematical. */
840 if (GET_CODE (x) != SUBREG)
841 return false;
842
843 inner = SUBREG_REG (x);
844
845 /* If INNER is a constant or PLUS, then INNER will need reloading. */
846 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
847 return true;
848
849 /* If INNER is not a hard register, then INNER will not need reloading. */
850 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
851 return false;
852
853 /* If INNER is not ok for MODE, then INNER will need reloading. */
854 if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode))
855 return true;
856
857 /* If this is for an output, and the outer part is a word or smaller,
858 INNER is larger than a word and the number of registers in INNER is
859 not the same as the number of words in INNER, then INNER will need
860 reloading (with an in-out reload). */
861 return (output
862 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
863 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
864 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
865 != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)]));
866 }
867
868 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
869 requiring an extra reload register. The caller has already found that
870 IN contains some reference to REGNO, so check that we can produce the
871 new value in a single step. E.g. if we have
872 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
873 instruction that adds one to a register, this should succeed.
874 However, if we have something like
875 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
876 needs to be loaded into a register first, we need a separate reload
877 register.
878 Such PLUS reloads are generated by find_reload_address_part.
879 The out-of-range PLUS expressions are usually introduced in the instruction
880 patterns by register elimination and substituting pseudos without a home
881 by their function-invariant equivalences. */
882 static int
883 can_reload_into (rtx in, int regno, machine_mode mode)
884 {
885 rtx dst;
886 rtx_insn *test_insn;
887 int r = 0;
888 struct recog_data_d save_recog_data;
889
890 /* For matching constraints, we often get notional input reloads where
891 we want to use the original register as the reload register. I.e.
892 technically this is a non-optional input-output reload, but IN is
893 already a valid register, and has been chosen as the reload register.
894 Speed this up, since it trivially works. */
895 if (REG_P (in))
896 return 1;
897
898 /* To test MEMs properly, we'd have to take into account all the reloads
899 that are already scheduled, which can become quite complicated.
900 And since we've already handled address reloads for this MEM, it
901 should always succeed anyway. */
902 if (MEM_P (in))
903 return 1;
904
905 /* If we can make a simple SET insn that does the job, everything should
906 be fine. */
907 dst = gen_rtx_REG (mode, regno);
908 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
909 save_recog_data = recog_data;
910 if (recog_memoized (test_insn) >= 0)
911 {
912 extract_insn (test_insn);
913 r = constrain_operands (1, get_enabled_alternatives (test_insn));
914 }
915 recog_data = save_recog_data;
916 return r;
917 }
918
919 /* Record one reload that needs to be performed.
920 IN is an rtx saying where the data are to be found before this instruction.
921 OUT says where they must be stored after the instruction.
922 (IN is zero for data not read, and OUT is zero for data not written.)
923 INLOC and OUTLOC point to the places in the instructions where
924 IN and OUT were found.
925 If IN and OUT are both nonzero, it means the same register must be used
926 to reload both IN and OUT.
927
928 RCLASS is a register class required for the reloaded data.
929 INMODE is the machine mode that the instruction requires
930 for the reg that replaces IN and OUTMODE is likewise for OUT.
931
932 If IN is zero, then OUT's location and mode should be passed as
933 INLOC and INMODE.
934
935 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
936
937 OPTIONAL nonzero means this reload does not need to be performed:
938 it can be discarded if that is more convenient.
939
940 OPNUM and TYPE say what the purpose of this reload is.
941
942 The return value is the reload-number for this reload.
943
944 If both IN and OUT are nonzero, in some rare cases we might
945 want to make two separate reloads. (Actually we never do this now.)
946 Therefore, the reload-number for OUT is stored in
947 output_reloadnum when we return; the return value applies to IN.
948 Usually (presently always), when IN and OUT are nonzero,
949 the two reload-numbers are equal, but the caller should be careful to
950 distinguish them. */
951
952 int
953 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
954 enum reg_class rclass, machine_mode inmode,
955 machine_mode outmode, int strict_low, int optional,
956 int opnum, enum reload_type type)
957 {
958 int i;
959 int dont_share = 0;
960 int dont_remove_subreg = 0;
961 #ifdef LIMIT_RELOAD_CLASS
962 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
963 #endif
964 int secondary_in_reload = -1, secondary_out_reload = -1;
965 enum insn_code secondary_in_icode = CODE_FOR_nothing;
966 enum insn_code secondary_out_icode = CODE_FOR_nothing;
967 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
968 subreg_in_class = NO_REGS;
969
970 /* INMODE and/or OUTMODE could be VOIDmode if no mode
971 has been specified for the operand. In that case,
972 use the operand's mode as the mode to reload. */
973 if (inmode == VOIDmode && in != 0)
974 inmode = GET_MODE (in);
975 if (outmode == VOIDmode && out != 0)
976 outmode = GET_MODE (out);
977
978 /* If find_reloads and friends until now missed to replace a pseudo
979 with a constant of reg_equiv_constant something went wrong
980 beforehand.
981 Note that it can't simply be done here if we missed it earlier
982 since the constant might need to be pushed into the literal pool
983 and the resulting memref would probably need further
984 reloading. */
985 if (in != 0 && REG_P (in))
986 {
987 int regno = REGNO (in);
988
989 gcc_assert (regno < FIRST_PSEUDO_REGISTER
990 || reg_renumber[regno] >= 0
991 || reg_equiv_constant (regno) == NULL_RTX);
992 }
993
994 /* reg_equiv_constant only contains constants which are obviously
995 not appropriate as destination. So if we would need to replace
996 the destination pseudo with a constant we are in real
997 trouble. */
998 if (out != 0 && REG_P (out))
999 {
1000 int regno = REGNO (out);
1001
1002 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1003 || reg_renumber[regno] >= 0
1004 || reg_equiv_constant (regno) == NULL_RTX);
1005 }
1006
1007 /* If we have a read-write operand with an address side-effect,
1008 change either IN or OUT so the side-effect happens only once. */
1009 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1010 switch (GET_CODE (XEXP (in, 0)))
1011 {
1012 case POST_INC: case POST_DEC: case POST_MODIFY:
1013 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1014 break;
1015
1016 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1017 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1018 break;
1019
1020 default:
1021 break;
1022 }
1023
1024 /* If we are reloading a (SUBREG constant ...), really reload just the
1025 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1026 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1027 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1028 register is a pseudo, also reload the inside expression.
1029 For machines that extend byte loads, do this for any SUBREG of a pseudo
1030 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1031 M2 is an integral mode that gets extended when loaded.
1032 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1033 where either M1 is not valid for R or M2 is wider than a word but we
1034 only need one register to store an M2-sized quantity in R.
1035 (However, if OUT is nonzero, we need to reload the reg *and*
1036 the subreg, so do nothing here, and let following statement handle it.)
1037
1038 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1039 we can't handle it here because CONST_INT does not indicate a mode.
1040
1041 Similarly, we must reload the inside expression if we have a
1042 STRICT_LOW_PART (presumably, in == out in this case).
1043
1044 Also reload the inner expression if it does not require a secondary
1045 reload but the SUBREG does.
1046
1047 Finally, reload the inner expression if it is a register that is in
1048 the class whose registers cannot be referenced in a different size
1049 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1050 cannot reload just the inside since we might end up with the wrong
1051 register class. But if it is inside a STRICT_LOW_PART, we have
1052 no choice, so we hope we do get the right register class there. */
1053
1054 if (in != 0 && GET_CODE (in) == SUBREG
1055 && (subreg_lowpart_p (in) || strict_low)
1056 #ifdef CANNOT_CHANGE_MODE_CLASS
1057 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass)
1058 #endif
1059 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))]
1060 && (CONSTANT_P (SUBREG_REG (in))
1061 || GET_CODE (SUBREG_REG (in)) == PLUS
1062 || strict_low
1063 || (((REG_P (SUBREG_REG (in))
1064 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1065 || MEM_P (SUBREG_REG (in)))
1066 && ((GET_MODE_PRECISION (inmode)
1067 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1068 #ifdef LOAD_EXTEND_OP
1069 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1070 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1071 <= UNITS_PER_WORD)
1072 && (GET_MODE_PRECISION (inmode)
1073 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1074 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in)))
1075 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN)
1076 #endif
1077 #if WORD_REGISTER_OPERATIONS
1078 || ((GET_MODE_PRECISION (inmode)
1079 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in))))
1080 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1081 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1082 / UNITS_PER_WORD)))
1083 #endif
1084 ))
1085 || (REG_P (SUBREG_REG (in))
1086 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1087 /* The case where out is nonzero
1088 is handled differently in the following statement. */
1089 && (out == 0 || subreg_lowpart_p (in))
1090 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1091 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1092 > UNITS_PER_WORD)
1093 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1094 / UNITS_PER_WORD)
1095 != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))]
1096 [GET_MODE (SUBREG_REG (in))]))
1097 || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode)))
1098 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1099 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1100 SUBREG_REG (in))
1101 == NO_REGS))
1102 #ifdef CANNOT_CHANGE_MODE_CLASS
1103 || (REG_P (SUBREG_REG (in))
1104 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1105 && REG_CANNOT_CHANGE_MODE_P
1106 (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode))
1107 #endif
1108 ))
1109 {
1110 #ifdef LIMIT_RELOAD_CLASS
1111 in_subreg_loc = inloc;
1112 #endif
1113 inloc = &SUBREG_REG (in);
1114 in = *inloc;
1115 #if ! defined (LOAD_EXTEND_OP)
1116 if (!WORD_REGISTER_OPERATIONS
1117 && MEM_P (in))
1118 /* This is supposed to happen only for paradoxical subregs made by
1119 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1120 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1121 #endif
1122 inmode = GET_MODE (in);
1123 }
1124
1125 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1126 where M1 is not valid for R if it was not handled by the code above.
1127
1128 Similar issue for (SUBREG constant ...) if it was not handled by the
1129 code above. This can happen if SUBREG_BYTE != 0.
1130
1131 However, we must reload the inner reg *as well as* the subreg in
1132 that case. */
1133
1134 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1135 {
1136 if (REG_P (SUBREG_REG (in)))
1137 subreg_in_class
1138 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1139 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1140 GET_MODE (SUBREG_REG (in)),
1141 SUBREG_BYTE (in),
1142 GET_MODE (in)),
1143 REGNO (SUBREG_REG (in)));
1144 else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF)
1145 subreg_in_class = find_valid_class_1 (inmode,
1146 GET_MODE (SUBREG_REG (in)),
1147 rclass);
1148
1149 /* This relies on the fact that emit_reload_insns outputs the
1150 instructions for input reloads of type RELOAD_OTHER in the same
1151 order as the reloads. Thus if the outer reload is also of type
1152 RELOAD_OTHER, we are guaranteed that this inner reload will be
1153 output before the outer reload. */
1154 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1155 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1156 dont_remove_subreg = 1;
1157 }
1158
1159 /* Similarly for paradoxical and problematical SUBREGs on the output.
1160 Note that there is no reason we need worry about the previous value
1161 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1162 entitled to clobber it all (except in the case of a word mode subreg
1163 or of a STRICT_LOW_PART, in that latter case the constraint should
1164 label it input-output.) */
1165 if (out != 0 && GET_CODE (out) == SUBREG
1166 && (subreg_lowpart_p (out) || strict_low)
1167 #ifdef CANNOT_CHANGE_MODE_CLASS
1168 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass)
1169 #endif
1170 && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))]
1171 && (CONSTANT_P (SUBREG_REG (out))
1172 || strict_low
1173 || (((REG_P (SUBREG_REG (out))
1174 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1175 || MEM_P (SUBREG_REG (out)))
1176 && ((GET_MODE_PRECISION (outmode)
1177 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1178 #if WORD_REGISTER_OPERATIONS
1179 || ((GET_MODE_PRECISION (outmode)
1180 < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out))))
1181 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1182 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1183 / UNITS_PER_WORD)))
1184 #endif
1185 ))
1186 || (REG_P (SUBREG_REG (out))
1187 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1188 /* The case of a word mode subreg
1189 is handled differently in the following statement. */
1190 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1191 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1192 > UNITS_PER_WORD))
1193 && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode))
1194 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1195 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1196 SUBREG_REG (out))
1197 == NO_REGS))
1198 #ifdef CANNOT_CHANGE_MODE_CLASS
1199 || (REG_P (SUBREG_REG (out))
1200 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1201 && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1202 GET_MODE (SUBREG_REG (out)),
1203 outmode))
1204 #endif
1205 ))
1206 {
1207 #ifdef LIMIT_RELOAD_CLASS
1208 out_subreg_loc = outloc;
1209 #endif
1210 outloc = &SUBREG_REG (out);
1211 out = *outloc;
1212 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1213 || GET_MODE_SIZE (GET_MODE (out))
1214 <= GET_MODE_SIZE (outmode));
1215 outmode = GET_MODE (out);
1216 }
1217
1218 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1219 where either M1 is not valid for R or M2 is wider than a word but we
1220 only need one register to store an M2-sized quantity in R.
1221
1222 However, we must reload the inner reg *as well as* the subreg in
1223 that case and the inner reg is an in-out reload. */
1224
1225 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1226 {
1227 enum reg_class in_out_class
1228 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1229 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1230 GET_MODE (SUBREG_REG (out)),
1231 SUBREG_BYTE (out),
1232 GET_MODE (out)),
1233 REGNO (SUBREG_REG (out)));
1234
1235 /* This relies on the fact that emit_reload_insns outputs the
1236 instructions for output reloads of type RELOAD_OTHER in reverse
1237 order of the reloads. Thus if the outer reload is also of type
1238 RELOAD_OTHER, we are guaranteed that this inner reload will be
1239 output after the outer reload. */
1240 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1241 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1242 0, 0, opnum, RELOAD_OTHER);
1243 dont_remove_subreg = 1;
1244 }
1245
1246 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1247 if (in != 0 && out != 0 && MEM_P (out)
1248 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1249 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1250 dont_share = 1;
1251
1252 /* If IN is a SUBREG of a hard register, make a new REG. This
1253 simplifies some of the cases below. */
1254
1255 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1256 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1257 && ! dont_remove_subreg)
1258 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1259
1260 /* Similarly for OUT. */
1261 if (out != 0 && GET_CODE (out) == SUBREG
1262 && REG_P (SUBREG_REG (out))
1263 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1264 && ! dont_remove_subreg)
1265 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1266
1267 /* Narrow down the class of register wanted if that is
1268 desirable on this machine for efficiency. */
1269 {
1270 reg_class_t preferred_class = rclass;
1271
1272 if (in != 0)
1273 preferred_class = targetm.preferred_reload_class (in, rclass);
1274
1275 /* Output reloads may need analogous treatment, different in detail. */
1276 if (out != 0)
1277 preferred_class
1278 = targetm.preferred_output_reload_class (out, preferred_class);
1279
1280 /* Discard what the target said if we cannot do it. */
1281 if (preferred_class != NO_REGS
1282 || (optional && type == RELOAD_FOR_OUTPUT))
1283 rclass = (enum reg_class) preferred_class;
1284 }
1285
1286 /* Make sure we use a class that can handle the actual pseudo
1287 inside any subreg. For example, on the 386, QImode regs
1288 can appear within SImode subregs. Although GENERAL_REGS
1289 can handle SImode, QImode needs a smaller class. */
1290 #ifdef LIMIT_RELOAD_CLASS
1291 if (in_subreg_loc)
1292 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1293 else if (in != 0 && GET_CODE (in) == SUBREG)
1294 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1295
1296 if (out_subreg_loc)
1297 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1298 if (out != 0 && GET_CODE (out) == SUBREG)
1299 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1300 #endif
1301
1302 /* Verify that this class is at least possible for the mode that
1303 is specified. */
1304 if (this_insn_is_asm)
1305 {
1306 machine_mode mode;
1307 if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode))
1308 mode = inmode;
1309 else
1310 mode = outmode;
1311 if (mode == VOIDmode)
1312 {
1313 error_for_asm (this_insn, "cannot reload integer constant "
1314 "operand in %<asm%>");
1315 mode = word_mode;
1316 if (in != 0)
1317 inmode = word_mode;
1318 if (out != 0)
1319 outmode = word_mode;
1320 }
1321 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1322 if (HARD_REGNO_MODE_OK (i, mode)
1323 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1324 break;
1325 if (i == FIRST_PSEUDO_REGISTER)
1326 {
1327 error_for_asm (this_insn, "impossible register constraint "
1328 "in %<asm%>");
1329 /* Avoid further trouble with this insn. */
1330 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1331 /* We used to continue here setting class to ALL_REGS, but it triggers
1332 sanity check on i386 for:
1333 void foo(long double d)
1334 {
1335 asm("" :: "a" (d));
1336 }
1337 Returning zero here ought to be safe as we take care in
1338 find_reloads to not process the reloads when instruction was
1339 replaced by USE. */
1340
1341 return 0;
1342 }
1343 }
1344
1345 /* Optional output reloads are always OK even if we have no register class,
1346 since the function of these reloads is only to have spill_reg_store etc.
1347 set, so that the storing insn can be deleted later. */
1348 gcc_assert (rclass != NO_REGS
1349 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1350
1351 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1352
1353 if (i == n_reloads)
1354 {
1355 /* See if we need a secondary reload register to move between CLASS
1356 and IN or CLASS and OUT. Get the icode and push any required reloads
1357 needed for each of them if so. */
1358
1359 if (in != 0)
1360 secondary_in_reload
1361 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1362 &secondary_in_icode, NULL);
1363 if (out != 0 && GET_CODE (out) != SCRATCH)
1364 secondary_out_reload
1365 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1366 type, &secondary_out_icode, NULL);
1367
1368 /* We found no existing reload suitable for re-use.
1369 So add an additional reload. */
1370
1371 #ifdef SECONDARY_MEMORY_NEEDED
1372 if (subreg_in_class == NO_REGS
1373 && in != 0
1374 && (REG_P (in)
1375 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1376 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1377 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1378 /* If a memory location is needed for the copy, make one. */
1379 if (subreg_in_class != NO_REGS
1380 && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode))
1381 get_secondary_mem (in, inmode, opnum, type);
1382 #endif
1383
1384 i = n_reloads;
1385 rld[i].in = in;
1386 rld[i].out = out;
1387 rld[i].rclass = rclass;
1388 rld[i].inmode = inmode;
1389 rld[i].outmode = outmode;
1390 rld[i].reg_rtx = 0;
1391 rld[i].optional = optional;
1392 rld[i].inc = 0;
1393 rld[i].nocombine = 0;
1394 rld[i].in_reg = inloc ? *inloc : 0;
1395 rld[i].out_reg = outloc ? *outloc : 0;
1396 rld[i].opnum = opnum;
1397 rld[i].when_needed = type;
1398 rld[i].secondary_in_reload = secondary_in_reload;
1399 rld[i].secondary_out_reload = secondary_out_reload;
1400 rld[i].secondary_in_icode = secondary_in_icode;
1401 rld[i].secondary_out_icode = secondary_out_icode;
1402 rld[i].secondary_p = 0;
1403
1404 n_reloads++;
1405
1406 #ifdef SECONDARY_MEMORY_NEEDED
1407 if (out != 0
1408 && (REG_P (out)
1409 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1410 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1411 && SECONDARY_MEMORY_NEEDED (rclass,
1412 REGNO_REG_CLASS (reg_or_subregno (out)),
1413 outmode))
1414 get_secondary_mem (out, outmode, opnum, type);
1415 #endif
1416 }
1417 else
1418 {
1419 /* We are reusing an existing reload,
1420 but we may have additional information for it.
1421 For example, we may now have both IN and OUT
1422 while the old one may have just one of them. */
1423
1424 /* The modes can be different. If they are, we want to reload in
1425 the larger mode, so that the value is valid for both modes. */
1426 if (inmode != VOIDmode
1427 && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode))
1428 rld[i].inmode = inmode;
1429 if (outmode != VOIDmode
1430 && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode))
1431 rld[i].outmode = outmode;
1432 if (in != 0)
1433 {
1434 rtx in_reg = inloc ? *inloc : 0;
1435 /* If we merge reloads for two distinct rtl expressions that
1436 are identical in content, there might be duplicate address
1437 reloads. Remove the extra set now, so that if we later find
1438 that we can inherit this reload, we can get rid of the
1439 address reloads altogether.
1440
1441 Do not do this if both reloads are optional since the result
1442 would be an optional reload which could potentially leave
1443 unresolved address replacements.
1444
1445 It is not sufficient to call transfer_replacements since
1446 choose_reload_regs will remove the replacements for address
1447 reloads of inherited reloads which results in the same
1448 problem. */
1449 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1450 && ! (rld[i].optional && optional))
1451 {
1452 /* We must keep the address reload with the lower operand
1453 number alive. */
1454 if (opnum > rld[i].opnum)
1455 {
1456 remove_address_replacements (in);
1457 in = rld[i].in;
1458 in_reg = rld[i].in_reg;
1459 }
1460 else
1461 remove_address_replacements (rld[i].in);
1462 }
1463 /* When emitting reloads we don't necessarily look at the in-
1464 and outmode, but also directly at the operands (in and out).
1465 So we can't simply overwrite them with whatever we have found
1466 for this (to-be-merged) reload, we have to "merge" that too.
1467 Reusing another reload already verified that we deal with the
1468 same operands, just possibly in different modes. So we
1469 overwrite the operands only when the new mode is larger.
1470 See also PR33613. */
1471 if (!rld[i].in
1472 || GET_MODE_SIZE (GET_MODE (in))
1473 > GET_MODE_SIZE (GET_MODE (rld[i].in)))
1474 rld[i].in = in;
1475 if (!rld[i].in_reg
1476 || (in_reg
1477 && GET_MODE_SIZE (GET_MODE (in_reg))
1478 > GET_MODE_SIZE (GET_MODE (rld[i].in_reg))))
1479 rld[i].in_reg = in_reg;
1480 }
1481 if (out != 0)
1482 {
1483 if (!rld[i].out
1484 || (out
1485 && GET_MODE_SIZE (GET_MODE (out))
1486 > GET_MODE_SIZE (GET_MODE (rld[i].out))))
1487 rld[i].out = out;
1488 if (outloc
1489 && (!rld[i].out_reg
1490 || GET_MODE_SIZE (GET_MODE (*outloc))
1491 > GET_MODE_SIZE (GET_MODE (rld[i].out_reg))))
1492 rld[i].out_reg = *outloc;
1493 }
1494 if (reg_class_subset_p (rclass, rld[i].rclass))
1495 rld[i].rclass = rclass;
1496 rld[i].optional &= optional;
1497 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1498 opnum, rld[i].opnum))
1499 rld[i].when_needed = RELOAD_OTHER;
1500 rld[i].opnum = MIN (rld[i].opnum, opnum);
1501 }
1502
1503 /* If the ostensible rtx being reloaded differs from the rtx found
1504 in the location to substitute, this reload is not safe to combine
1505 because we cannot reliably tell whether it appears in the insn. */
1506
1507 if (in != 0 && in != *inloc)
1508 rld[i].nocombine = 1;
1509
1510 #if 0
1511 /* This was replaced by changes in find_reloads_address_1 and the new
1512 function inc_for_reload, which go with a new meaning of reload_inc. */
1513
1514 /* If this is an IN/OUT reload in an insn that sets the CC,
1515 it must be for an autoincrement. It doesn't work to store
1516 the incremented value after the insn because that would clobber the CC.
1517 So we must do the increment of the value reloaded from,
1518 increment it, store it back, then decrement again. */
1519 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1520 {
1521 out = 0;
1522 rld[i].out = 0;
1523 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1524 /* If we did not find a nonzero amount-to-increment-by,
1525 that contradicts the belief that IN is being incremented
1526 in an address in this insn. */
1527 gcc_assert (rld[i].inc != 0);
1528 }
1529 #endif
1530
1531 /* If we will replace IN and OUT with the reload-reg,
1532 record where they are located so that substitution need
1533 not do a tree walk. */
1534
1535 if (replace_reloads)
1536 {
1537 if (inloc != 0)
1538 {
1539 struct replacement *r = &replacements[n_replacements++];
1540 r->what = i;
1541 r->where = inloc;
1542 r->mode = inmode;
1543 }
1544 if (outloc != 0 && outloc != inloc)
1545 {
1546 struct replacement *r = &replacements[n_replacements++];
1547 r->what = i;
1548 r->where = outloc;
1549 r->mode = outmode;
1550 }
1551 }
1552
1553 /* If this reload is just being introduced and it has both
1554 an incoming quantity and an outgoing quantity that are
1555 supposed to be made to match, see if either one of the two
1556 can serve as the place to reload into.
1557
1558 If one of them is acceptable, set rld[i].reg_rtx
1559 to that one. */
1560
1561 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1562 {
1563 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1564 inmode, outmode,
1565 rld[i].rclass, i,
1566 earlyclobber_operand_p (out));
1567
1568 /* If the outgoing register already contains the same value
1569 as the incoming one, we can dispense with loading it.
1570 The easiest way to tell the caller that is to give a phony
1571 value for the incoming operand (same as outgoing one). */
1572 if (rld[i].reg_rtx == out
1573 && (REG_P (in) || CONSTANT_P (in))
1574 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1575 static_reload_reg_p, i, inmode))
1576 rld[i].in = out;
1577 }
1578
1579 /* If this is an input reload and the operand contains a register that
1580 dies in this insn and is used nowhere else, see if it is the right class
1581 to be used for this reload. Use it if so. (This occurs most commonly
1582 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1583 this if it is also an output reload that mentions the register unless
1584 the output is a SUBREG that clobbers an entire register.
1585
1586 Note that the operand might be one of the spill regs, if it is a
1587 pseudo reg and we are in a block where spilling has not taken place.
1588 But if there is no spilling in this block, that is OK.
1589 An explicitly used hard reg cannot be a spill reg. */
1590
1591 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1592 {
1593 rtx note;
1594 int regno;
1595 machine_mode rel_mode = inmode;
1596
1597 if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode))
1598 rel_mode = outmode;
1599
1600 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1601 if (REG_NOTE_KIND (note) == REG_DEAD
1602 && REG_P (XEXP (note, 0))
1603 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1604 && reg_mentioned_p (XEXP (note, 0), in)
1605 /* Check that a former pseudo is valid; see find_dummy_reload. */
1606 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1607 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1608 ORIGINAL_REGNO (XEXP (note, 0)))
1609 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
1610 && ! refers_to_regno_for_reload_p (regno,
1611 end_hard_regno (rel_mode,
1612 regno),
1613 PATTERN (this_insn), inloc)
1614 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1615 /* If this is also an output reload, IN cannot be used as
1616 the reload register if it is set in this insn unless IN
1617 is also OUT. */
1618 && (out == 0 || in == out
1619 || ! hard_reg_set_here_p (regno,
1620 end_hard_regno (rel_mode, regno),
1621 PATTERN (this_insn)))
1622 /* ??? Why is this code so different from the previous?
1623 Is there any simple coherent way to describe the two together?
1624 What's going on here. */
1625 && (in != out
1626 || (GET_CODE (in) == SUBREG
1627 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1628 / UNITS_PER_WORD)
1629 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1630 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1631 /* Make sure the operand fits in the reg that dies. */
1632 && (GET_MODE_SIZE (rel_mode)
1633 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1634 && HARD_REGNO_MODE_OK (regno, inmode)
1635 && HARD_REGNO_MODE_OK (regno, outmode))
1636 {
1637 unsigned int offs;
1638 unsigned int nregs = MAX (hard_regno_nregs[regno][inmode],
1639 hard_regno_nregs[regno][outmode]);
1640
1641 for (offs = 0; offs < nregs; offs++)
1642 if (fixed_regs[regno + offs]
1643 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1644 regno + offs))
1645 break;
1646
1647 if (offs == nregs
1648 && (! (refers_to_regno_for_reload_p
1649 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1650 || can_reload_into (in, regno, inmode)))
1651 {
1652 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1653 break;
1654 }
1655 }
1656 }
1657
1658 if (out)
1659 output_reloadnum = i;
1660
1661 return i;
1662 }
1663
1664 /* Record an additional place we must replace a value
1665 for which we have already recorded a reload.
1666 RELOADNUM is the value returned by push_reload
1667 when the reload was recorded.
1668 This is used in insn patterns that use match_dup. */
1669
1670 static void
1671 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1672 {
1673 if (replace_reloads)
1674 {
1675 struct replacement *r = &replacements[n_replacements++];
1676 r->what = reloadnum;
1677 r->where = loc;
1678 r->mode = mode;
1679 }
1680 }
1681
1682 /* Duplicate any replacement we have recorded to apply at
1683 location ORIG_LOC to also be performed at DUP_LOC.
1684 This is used in insn patterns that use match_dup. */
1685
1686 static void
1687 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1688 {
1689 int i, n = n_replacements;
1690
1691 for (i = 0; i < n; i++)
1692 {
1693 struct replacement *r = &replacements[i];
1694 if (r->where == orig_loc)
1695 push_replacement (dup_loc, r->what, r->mode);
1696 }
1697 }
1698 \f
1699 /* Transfer all replacements that used to be in reload FROM to be in
1700 reload TO. */
1701
1702 void
1703 transfer_replacements (int to, int from)
1704 {
1705 int i;
1706
1707 for (i = 0; i < n_replacements; i++)
1708 if (replacements[i].what == from)
1709 replacements[i].what = to;
1710 }
1711 \f
1712 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1713 or a subpart of it. If we have any replacements registered for IN_RTX,
1714 cancel the reloads that were supposed to load them.
1715 Return nonzero if we canceled any reloads. */
1716 int
1717 remove_address_replacements (rtx in_rtx)
1718 {
1719 int i, j;
1720 char reload_flags[MAX_RELOADS];
1721 int something_changed = 0;
1722
1723 memset (reload_flags, 0, sizeof reload_flags);
1724 for (i = 0, j = 0; i < n_replacements; i++)
1725 {
1726 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1727 reload_flags[replacements[i].what] |= 1;
1728 else
1729 {
1730 replacements[j++] = replacements[i];
1731 reload_flags[replacements[i].what] |= 2;
1732 }
1733 }
1734 /* Note that the following store must be done before the recursive calls. */
1735 n_replacements = j;
1736
1737 for (i = n_reloads - 1; i >= 0; i--)
1738 {
1739 if (reload_flags[i] == 1)
1740 {
1741 deallocate_reload_reg (i);
1742 remove_address_replacements (rld[i].in);
1743 rld[i].in = 0;
1744 something_changed = 1;
1745 }
1746 }
1747 return something_changed;
1748 }
1749 \f
1750 /* If there is only one output reload, and it is not for an earlyclobber
1751 operand, try to combine it with a (logically unrelated) input reload
1752 to reduce the number of reload registers needed.
1753
1754 This is safe if the input reload does not appear in
1755 the value being output-reloaded, because this implies
1756 it is not needed any more once the original insn completes.
1757
1758 If that doesn't work, see we can use any of the registers that
1759 die in this insn as a reload register. We can if it is of the right
1760 class and does not appear in the value being output-reloaded. */
1761
1762 static void
1763 combine_reloads (void)
1764 {
1765 int i, regno;
1766 int output_reload = -1;
1767 int secondary_out = -1;
1768 rtx note;
1769
1770 /* Find the output reload; return unless there is exactly one
1771 and that one is mandatory. */
1772
1773 for (i = 0; i < n_reloads; i++)
1774 if (rld[i].out != 0)
1775 {
1776 if (output_reload >= 0)
1777 return;
1778 output_reload = i;
1779 }
1780
1781 if (output_reload < 0 || rld[output_reload].optional)
1782 return;
1783
1784 /* An input-output reload isn't combinable. */
1785
1786 if (rld[output_reload].in != 0)
1787 return;
1788
1789 /* If this reload is for an earlyclobber operand, we can't do anything. */
1790 if (earlyclobber_operand_p (rld[output_reload].out))
1791 return;
1792
1793 /* If there is a reload for part of the address of this operand, we would
1794 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1795 its life to the point where doing this combine would not lower the
1796 number of spill registers needed. */
1797 for (i = 0; i < n_reloads; i++)
1798 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1799 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1800 && rld[i].opnum == rld[output_reload].opnum)
1801 return;
1802
1803 /* Check each input reload; can we combine it? */
1804
1805 for (i = 0; i < n_reloads; i++)
1806 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1807 /* Life span of this reload must not extend past main insn. */
1808 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1809 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1810 && rld[i].when_needed != RELOAD_OTHER
1811 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1812 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1813 [(int) rld[output_reload].outmode])
1814 && rld[i].inc == 0
1815 && rld[i].reg_rtx == 0
1816 #ifdef SECONDARY_MEMORY_NEEDED
1817 /* Don't combine two reloads with different secondary
1818 memory locations. */
1819 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1820 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1821 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1822 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1823 #endif
1824 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1825 ? (rld[i].rclass == rld[output_reload].rclass)
1826 : (reg_class_subset_p (rld[i].rclass,
1827 rld[output_reload].rclass)
1828 || reg_class_subset_p (rld[output_reload].rclass,
1829 rld[i].rclass)))
1830 && (MATCHES (rld[i].in, rld[output_reload].out)
1831 /* Args reversed because the first arg seems to be
1832 the one that we imagine being modified
1833 while the second is the one that might be affected. */
1834 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1835 rld[i].in)
1836 /* However, if the input is a register that appears inside
1837 the output, then we also can't share.
1838 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1839 If the same reload reg is used for both reg 69 and the
1840 result to be stored in memory, then that result
1841 will clobber the address of the memory ref. */
1842 && ! (REG_P (rld[i].in)
1843 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1844 rld[output_reload].out))))
1845 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1846 rld[i].when_needed != RELOAD_FOR_INPUT)
1847 && (reg_class_size[(int) rld[i].rclass]
1848 || targetm.small_register_classes_for_mode_p (VOIDmode))
1849 /* We will allow making things slightly worse by combining an
1850 input and an output, but no worse than that. */
1851 && (rld[i].when_needed == RELOAD_FOR_INPUT
1852 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1853 {
1854 int j;
1855
1856 /* We have found a reload to combine with! */
1857 rld[i].out = rld[output_reload].out;
1858 rld[i].out_reg = rld[output_reload].out_reg;
1859 rld[i].outmode = rld[output_reload].outmode;
1860 /* Mark the old output reload as inoperative. */
1861 rld[output_reload].out = 0;
1862 /* The combined reload is needed for the entire insn. */
1863 rld[i].when_needed = RELOAD_OTHER;
1864 /* If the output reload had a secondary reload, copy it. */
1865 if (rld[output_reload].secondary_out_reload != -1)
1866 {
1867 rld[i].secondary_out_reload
1868 = rld[output_reload].secondary_out_reload;
1869 rld[i].secondary_out_icode
1870 = rld[output_reload].secondary_out_icode;
1871 }
1872
1873 #ifdef SECONDARY_MEMORY_NEEDED
1874 /* Copy any secondary MEM. */
1875 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1876 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1877 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1878 #endif
1879 /* If required, minimize the register class. */
1880 if (reg_class_subset_p (rld[output_reload].rclass,
1881 rld[i].rclass))
1882 rld[i].rclass = rld[output_reload].rclass;
1883
1884 /* Transfer all replacements from the old reload to the combined. */
1885 for (j = 0; j < n_replacements; j++)
1886 if (replacements[j].what == output_reload)
1887 replacements[j].what = i;
1888
1889 return;
1890 }
1891
1892 /* If this insn has only one operand that is modified or written (assumed
1893 to be the first), it must be the one corresponding to this reload. It
1894 is safe to use anything that dies in this insn for that output provided
1895 that it does not occur in the output (we already know it isn't an
1896 earlyclobber. If this is an asm insn, give up. */
1897
1898 if (INSN_CODE (this_insn) == -1)
1899 return;
1900
1901 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1902 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1903 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1904 return;
1905
1906 /* See if some hard register that dies in this insn and is not used in
1907 the output is the right class. Only works if the register we pick
1908 up can fully hold our output reload. */
1909 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1910 if (REG_NOTE_KIND (note) == REG_DEAD
1911 && REG_P (XEXP (note, 0))
1912 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1913 rld[output_reload].out)
1914 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1915 && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode)
1916 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1917 regno)
1918 && (hard_regno_nregs[regno][rld[output_reload].outmode]
1919 <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))])
1920 /* Ensure that a secondary or tertiary reload for this output
1921 won't want this register. */
1922 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1923 || (!(TEST_HARD_REG_BIT
1924 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1925 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1926 || !(TEST_HARD_REG_BIT
1927 (reg_class_contents[(int) rld[secondary_out].rclass],
1928 regno)))))
1929 && !fixed_regs[regno]
1930 /* Check that a former pseudo is valid; see find_dummy_reload. */
1931 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1932 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1933 ORIGINAL_REGNO (XEXP (note, 0)))
1934 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
1935 {
1936 rld[output_reload].reg_rtx
1937 = gen_rtx_REG (rld[output_reload].outmode, regno);
1938 return;
1939 }
1940 }
1941 \f
1942 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1943 See if one of IN and OUT is a register that may be used;
1944 this is desirable since a spill-register won't be needed.
1945 If so, return the register rtx that proves acceptable.
1946
1947 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1948 RCLASS is the register class required for the reload.
1949
1950 If FOR_REAL is >= 0, it is the number of the reload,
1951 and in some cases when it can be discovered that OUT doesn't need
1952 to be computed, clear out rld[FOR_REAL].out.
1953
1954 If FOR_REAL is -1, this should not be done, because this call
1955 is just to see if a register can be found, not to find and install it.
1956
1957 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1958 puts an additional constraint on being able to use IN for OUT since
1959 IN must not appear elsewhere in the insn (it is assumed that IN itself
1960 is safe from the earlyclobber). */
1961
1962 static rtx
1963 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1964 machine_mode inmode, machine_mode outmode,
1965 reg_class_t rclass, int for_real, int earlyclobber)
1966 {
1967 rtx in = real_in;
1968 rtx out = real_out;
1969 int in_offset = 0;
1970 int out_offset = 0;
1971 rtx value = 0;
1972
1973 /* If operands exceed a word, we can't use either of them
1974 unless they have the same size. */
1975 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1976 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1977 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1978 return 0;
1979
1980 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1981 respectively refers to a hard register. */
1982
1983 /* Find the inside of any subregs. */
1984 while (GET_CODE (out) == SUBREG)
1985 {
1986 if (REG_P (SUBREG_REG (out))
1987 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1988 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1989 GET_MODE (SUBREG_REG (out)),
1990 SUBREG_BYTE (out),
1991 GET_MODE (out));
1992 out = SUBREG_REG (out);
1993 }
1994 while (GET_CODE (in) == SUBREG)
1995 {
1996 if (REG_P (SUBREG_REG (in))
1997 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1998 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1999 GET_MODE (SUBREG_REG (in)),
2000 SUBREG_BYTE (in),
2001 GET_MODE (in));
2002 in = SUBREG_REG (in);
2003 }
2004
2005 /* Narrow down the reg class, the same way push_reload will;
2006 otherwise we might find a dummy now, but push_reload won't. */
2007 {
2008 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
2009 if (preferred_class != NO_REGS)
2010 rclass = (enum reg_class) preferred_class;
2011 }
2012
2013 /* See if OUT will do. */
2014 if (REG_P (out)
2015 && REGNO (out) < FIRST_PSEUDO_REGISTER)
2016 {
2017 unsigned int regno = REGNO (out) + out_offset;
2018 unsigned int nwords = hard_regno_nregs[regno][outmode];
2019 rtx saved_rtx;
2020
2021 /* When we consider whether the insn uses OUT,
2022 ignore references within IN. They don't prevent us
2023 from copying IN into OUT, because those refs would
2024 move into the insn that reloads IN.
2025
2026 However, we only ignore IN in its role as this reload.
2027 If the insn uses IN elsewhere and it contains OUT,
2028 that counts. We can't be sure it's the "same" operand
2029 so it might not go through this reload.
2030
2031 We also need to avoid using OUT if it, or part of it, is a
2032 fixed register. Modifying such registers, even transiently,
2033 may have undefined effects on the machine, such as modifying
2034 the stack pointer. */
2035 saved_rtx = *inloc;
2036 *inloc = const0_rtx;
2037
2038 if (regno < FIRST_PSEUDO_REGISTER
2039 && HARD_REGNO_MODE_OK (regno, outmode)
2040 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2041 PATTERN (this_insn), outloc))
2042 {
2043 unsigned int i;
2044
2045 for (i = 0; i < nwords; i++)
2046 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2047 regno + i)
2048 || fixed_regs[regno + i])
2049 break;
2050
2051 if (i == nwords)
2052 {
2053 if (REG_P (real_out))
2054 value = real_out;
2055 else
2056 value = gen_rtx_REG (outmode, regno);
2057 }
2058 }
2059
2060 *inloc = saved_rtx;
2061 }
2062
2063 /* Consider using IN if OUT was not acceptable
2064 or if OUT dies in this insn (like the quotient in a divmod insn).
2065 We can't use IN unless it is dies in this insn,
2066 which means we must know accurately which hard regs are live.
2067 Also, the result can't go in IN if IN is used within OUT,
2068 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2069 if (hard_regs_live_known
2070 && REG_P (in)
2071 && REGNO (in) < FIRST_PSEUDO_REGISTER
2072 && (value == 0
2073 || find_reg_note (this_insn, REG_UNUSED, real_out))
2074 && find_reg_note (this_insn, REG_DEAD, real_in)
2075 && !fixed_regs[REGNO (in)]
2076 && HARD_REGNO_MODE_OK (REGNO (in),
2077 /* The only case where out and real_out might
2078 have different modes is where real_out
2079 is a subreg, and in that case, out
2080 has a real mode. */
2081 (GET_MODE (out) != VOIDmode
2082 ? GET_MODE (out) : outmode))
2083 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2084 /* However only do this if we can be sure that this input
2085 operand doesn't correspond with an uninitialized pseudo.
2086 global can assign some hardreg to it that is the same as
2087 the one assigned to a different, also live pseudo (as it
2088 can ignore the conflict). We must never introduce writes
2089 to such hardregs, as they would clobber the other live
2090 pseudo. See PR 20973. */
2091 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2092 ORIGINAL_REGNO (in))
2093 /* Similarly, only do this if we can be sure that the death
2094 note is still valid. global can assign some hardreg to
2095 the pseudo referenced in the note and simultaneously a
2096 subword of this hardreg to a different, also live pseudo,
2097 because only another subword of the hardreg is actually
2098 used in the insn. This cannot happen if the pseudo has
2099 been assigned exactly one hardreg. See PR 33732. */
2100 && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1)))
2101 {
2102 unsigned int regno = REGNO (in) + in_offset;
2103 unsigned int nwords = hard_regno_nregs[regno][inmode];
2104
2105 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2106 && ! hard_reg_set_here_p (regno, regno + nwords,
2107 PATTERN (this_insn))
2108 && (! earlyclobber
2109 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2110 PATTERN (this_insn), inloc)))
2111 {
2112 unsigned int i;
2113
2114 for (i = 0; i < nwords; i++)
2115 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2116 regno + i))
2117 break;
2118
2119 if (i == nwords)
2120 {
2121 /* If we were going to use OUT as the reload reg
2122 and changed our mind, it means OUT is a dummy that
2123 dies here. So don't bother copying value to it. */
2124 if (for_real >= 0 && value == real_out)
2125 rld[for_real].out = 0;
2126 if (REG_P (real_in))
2127 value = real_in;
2128 else
2129 value = gen_rtx_REG (inmode, regno);
2130 }
2131 }
2132 }
2133
2134 return value;
2135 }
2136 \f
2137 /* This page contains subroutines used mainly for determining
2138 whether the IN or an OUT of a reload can serve as the
2139 reload register. */
2140
2141 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2142
2143 int
2144 earlyclobber_operand_p (rtx x)
2145 {
2146 int i;
2147
2148 for (i = 0; i < n_earlyclobbers; i++)
2149 if (reload_earlyclobbers[i] == x)
2150 return 1;
2151
2152 return 0;
2153 }
2154
2155 /* Return 1 if expression X alters a hard reg in the range
2156 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2157 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2158 X should be the body of an instruction. */
2159
2160 static int
2161 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2162 {
2163 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2164 {
2165 rtx op0 = SET_DEST (x);
2166
2167 while (GET_CODE (op0) == SUBREG)
2168 op0 = SUBREG_REG (op0);
2169 if (REG_P (op0))
2170 {
2171 unsigned int r = REGNO (op0);
2172
2173 /* See if this reg overlaps range under consideration. */
2174 if (r < end_regno
2175 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2176 return 1;
2177 }
2178 }
2179 else if (GET_CODE (x) == PARALLEL)
2180 {
2181 int i = XVECLEN (x, 0) - 1;
2182
2183 for (; i >= 0; i--)
2184 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2185 return 1;
2186 }
2187
2188 return 0;
2189 }
2190
2191 /* Return 1 if ADDR is a valid memory address for mode MODE
2192 in address space AS, and check that each pseudo reg has the
2193 proper kind of hard reg. */
2194
2195 int
2196 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2197 rtx addr, addr_space_t as)
2198 {
2199 #ifdef GO_IF_LEGITIMATE_ADDRESS
2200 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2201 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2202 return 0;
2203
2204 win:
2205 return 1;
2206 #else
2207 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2208 #endif
2209 }
2210 \f
2211 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2212 if they are the same hard reg, and has special hacks for
2213 autoincrement and autodecrement.
2214 This is specifically intended for find_reloads to use
2215 in determining whether two operands match.
2216 X is the operand whose number is the lower of the two.
2217
2218 The value is 2 if Y contains a pre-increment that matches
2219 a non-incrementing address in X. */
2220
2221 /* ??? To be completely correct, we should arrange to pass
2222 for X the output operand and for Y the input operand.
2223 For now, we assume that the output operand has the lower number
2224 because that is natural in (SET output (... input ...)). */
2225
2226 int
2227 operands_match_p (rtx x, rtx y)
2228 {
2229 int i;
2230 RTX_CODE code = GET_CODE (x);
2231 const char *fmt;
2232 int success_2;
2233
2234 if (x == y)
2235 return 1;
2236 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2237 && (REG_P (y) || (GET_CODE (y) == SUBREG
2238 && REG_P (SUBREG_REG (y)))))
2239 {
2240 int j;
2241
2242 if (code == SUBREG)
2243 {
2244 i = REGNO (SUBREG_REG (x));
2245 if (i >= FIRST_PSEUDO_REGISTER)
2246 goto slow;
2247 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2248 GET_MODE (SUBREG_REG (x)),
2249 SUBREG_BYTE (x),
2250 GET_MODE (x));
2251 }
2252 else
2253 i = REGNO (x);
2254
2255 if (GET_CODE (y) == SUBREG)
2256 {
2257 j = REGNO (SUBREG_REG (y));
2258 if (j >= FIRST_PSEUDO_REGISTER)
2259 goto slow;
2260 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2261 GET_MODE (SUBREG_REG (y)),
2262 SUBREG_BYTE (y),
2263 GET_MODE (y));
2264 }
2265 else
2266 j = REGNO (y);
2267
2268 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2269 multiple hard register group of scalar integer registers, so that
2270 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2271 register. */
2272 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
2273 && SCALAR_INT_MODE_P (GET_MODE (x))
2274 && i < FIRST_PSEUDO_REGISTER)
2275 i += hard_regno_nregs[i][GET_MODE (x)] - 1;
2276 if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD
2277 && SCALAR_INT_MODE_P (GET_MODE (y))
2278 && j < FIRST_PSEUDO_REGISTER)
2279 j += hard_regno_nregs[j][GET_MODE (y)] - 1;
2280
2281 return i == j;
2282 }
2283 /* If two operands must match, because they are really a single
2284 operand of an assembler insn, then two postincrements are invalid
2285 because the assembler insn would increment only once.
2286 On the other hand, a postincrement matches ordinary indexing
2287 if the postincrement is the output operand. */
2288 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2289 return operands_match_p (XEXP (x, 0), y);
2290 /* Two preincrements are invalid
2291 because the assembler insn would increment only once.
2292 On the other hand, a preincrement matches ordinary indexing
2293 if the preincrement is the input operand.
2294 In this case, return 2, since some callers need to do special
2295 things when this happens. */
2296 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2297 || GET_CODE (y) == PRE_MODIFY)
2298 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2299
2300 slow:
2301
2302 /* Now we have disposed of all the cases in which different rtx codes
2303 can match. */
2304 if (code != GET_CODE (y))
2305 return 0;
2306
2307 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2308 if (GET_MODE (x) != GET_MODE (y))
2309 return 0;
2310
2311 /* MEMs referring to different address space are not equivalent. */
2312 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2313 return 0;
2314
2315 switch (code)
2316 {
2317 CASE_CONST_UNIQUE:
2318 return 0;
2319
2320 case LABEL_REF:
2321 return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y);
2322 case SYMBOL_REF:
2323 return XSTR (x, 0) == XSTR (y, 0);
2324
2325 default:
2326 break;
2327 }
2328
2329 /* Compare the elements. If any pair of corresponding elements
2330 fail to match, return 0 for the whole things. */
2331
2332 success_2 = 0;
2333 fmt = GET_RTX_FORMAT (code);
2334 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2335 {
2336 int val, j;
2337 switch (fmt[i])
2338 {
2339 case 'w':
2340 if (XWINT (x, i) != XWINT (y, i))
2341 return 0;
2342 break;
2343
2344 case 'i':
2345 if (XINT (x, i) != XINT (y, i))
2346 return 0;
2347 break;
2348
2349 case 'e':
2350 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2351 if (val == 0)
2352 return 0;
2353 /* If any subexpression returns 2,
2354 we should return 2 if we are successful. */
2355 if (val == 2)
2356 success_2 = 1;
2357 break;
2358
2359 case '0':
2360 break;
2361
2362 case 'E':
2363 if (XVECLEN (x, i) != XVECLEN (y, i))
2364 return 0;
2365 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2366 {
2367 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2368 if (val == 0)
2369 return 0;
2370 if (val == 2)
2371 success_2 = 1;
2372 }
2373 break;
2374
2375 /* It is believed that rtx's at this level will never
2376 contain anything but integers and other rtx's,
2377 except for within LABEL_REFs and SYMBOL_REFs. */
2378 default:
2379 gcc_unreachable ();
2380 }
2381 }
2382 return 1 + success_2;
2383 }
2384 \f
2385 /* Describe the range of registers or memory referenced by X.
2386 If X is a register, set REG_FLAG and put the first register
2387 number into START and the last plus one into END.
2388 If X is a memory reference, put a base address into BASE
2389 and a range of integer offsets into START and END.
2390 If X is pushing on the stack, we can assume it causes no trouble,
2391 so we set the SAFE field. */
2392
2393 static struct decomposition
2394 decompose (rtx x)
2395 {
2396 struct decomposition val;
2397 int all_const = 0;
2398
2399 memset (&val, 0, sizeof (val));
2400
2401 switch (GET_CODE (x))
2402 {
2403 case MEM:
2404 {
2405 rtx base = NULL_RTX, offset = 0;
2406 rtx addr = XEXP (x, 0);
2407
2408 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2409 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2410 {
2411 val.base = XEXP (addr, 0);
2412 val.start = -GET_MODE_SIZE (GET_MODE (x));
2413 val.end = GET_MODE_SIZE (GET_MODE (x));
2414 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2415 return val;
2416 }
2417
2418 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2419 {
2420 if (GET_CODE (XEXP (addr, 1)) == PLUS
2421 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2422 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2423 {
2424 val.base = XEXP (addr, 0);
2425 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2426 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2427 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2428 return val;
2429 }
2430 }
2431
2432 if (GET_CODE (addr) == CONST)
2433 {
2434 addr = XEXP (addr, 0);
2435 all_const = 1;
2436 }
2437 if (GET_CODE (addr) == PLUS)
2438 {
2439 if (CONSTANT_P (XEXP (addr, 0)))
2440 {
2441 base = XEXP (addr, 1);
2442 offset = XEXP (addr, 0);
2443 }
2444 else if (CONSTANT_P (XEXP (addr, 1)))
2445 {
2446 base = XEXP (addr, 0);
2447 offset = XEXP (addr, 1);
2448 }
2449 }
2450
2451 if (offset == 0)
2452 {
2453 base = addr;
2454 offset = const0_rtx;
2455 }
2456 if (GET_CODE (offset) == CONST)
2457 offset = XEXP (offset, 0);
2458 if (GET_CODE (offset) == PLUS)
2459 {
2460 if (CONST_INT_P (XEXP (offset, 0)))
2461 {
2462 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2463 offset = XEXP (offset, 0);
2464 }
2465 else if (CONST_INT_P (XEXP (offset, 1)))
2466 {
2467 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2468 offset = XEXP (offset, 1);
2469 }
2470 else
2471 {
2472 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2473 offset = const0_rtx;
2474 }
2475 }
2476 else if (!CONST_INT_P (offset))
2477 {
2478 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2479 offset = const0_rtx;
2480 }
2481
2482 if (all_const && GET_CODE (base) == PLUS)
2483 base = gen_rtx_CONST (GET_MODE (base), base);
2484
2485 gcc_assert (CONST_INT_P (offset));
2486
2487 val.start = INTVAL (offset);
2488 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2489 val.base = base;
2490 }
2491 break;
2492
2493 case REG:
2494 val.reg_flag = 1;
2495 val.start = true_regnum (x);
2496 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2497 {
2498 /* A pseudo with no hard reg. */
2499 val.start = REGNO (x);
2500 val.end = val.start + 1;
2501 }
2502 else
2503 /* A hard reg. */
2504 val.end = end_hard_regno (GET_MODE (x), val.start);
2505 break;
2506
2507 case SUBREG:
2508 if (!REG_P (SUBREG_REG (x)))
2509 /* This could be more precise, but it's good enough. */
2510 return decompose (SUBREG_REG (x));
2511 val.reg_flag = 1;
2512 val.start = true_regnum (x);
2513 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2514 return decompose (SUBREG_REG (x));
2515 else
2516 /* A hard reg. */
2517 val.end = val.start + subreg_nregs (x);
2518 break;
2519
2520 case SCRATCH:
2521 /* This hasn't been assigned yet, so it can't conflict yet. */
2522 val.safe = 1;
2523 break;
2524
2525 default:
2526 gcc_assert (CONSTANT_P (x));
2527 val.safe = 1;
2528 break;
2529 }
2530 return val;
2531 }
2532
2533 /* Return 1 if altering Y will not modify the value of X.
2534 Y is also described by YDATA, which should be decompose (Y). */
2535
2536 static int
2537 immune_p (rtx x, rtx y, struct decomposition ydata)
2538 {
2539 struct decomposition xdata;
2540
2541 if (ydata.reg_flag)
2542 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2543 if (ydata.safe)
2544 return 1;
2545
2546 gcc_assert (MEM_P (y));
2547 /* If Y is memory and X is not, Y can't affect X. */
2548 if (!MEM_P (x))
2549 return 1;
2550
2551 xdata = decompose (x);
2552
2553 if (! rtx_equal_p (xdata.base, ydata.base))
2554 {
2555 /* If bases are distinct symbolic constants, there is no overlap. */
2556 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2557 return 1;
2558 /* Constants and stack slots never overlap. */
2559 if (CONSTANT_P (xdata.base)
2560 && (ydata.base == frame_pointer_rtx
2561 || ydata.base == hard_frame_pointer_rtx
2562 || ydata.base == stack_pointer_rtx))
2563 return 1;
2564 if (CONSTANT_P (ydata.base)
2565 && (xdata.base == frame_pointer_rtx
2566 || xdata.base == hard_frame_pointer_rtx
2567 || xdata.base == stack_pointer_rtx))
2568 return 1;
2569 /* If either base is variable, we don't know anything. */
2570 return 0;
2571 }
2572
2573 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2574 }
2575
2576 /* Similar, but calls decompose. */
2577
2578 int
2579 safe_from_earlyclobber (rtx op, rtx clobber)
2580 {
2581 struct decomposition early_data;
2582
2583 early_data = decompose (clobber);
2584 return immune_p (op, clobber, early_data);
2585 }
2586 \f
2587 /* Main entry point of this file: search the body of INSN
2588 for values that need reloading and record them with push_reload.
2589 REPLACE nonzero means record also where the values occur
2590 so that subst_reloads can be used.
2591
2592 IND_LEVELS says how many levels of indirection are supported by this
2593 machine; a value of zero means that a memory reference is not a valid
2594 memory address.
2595
2596 LIVE_KNOWN says we have valid information about which hard
2597 regs are live at each point in the program; this is true when
2598 we are called from global_alloc but false when stupid register
2599 allocation has been done.
2600
2601 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2602 which is nonnegative if the reg has been commandeered for reloading into.
2603 It is copied into STATIC_RELOAD_REG_P and referenced from there
2604 by various subroutines.
2605
2606 Return TRUE if some operands need to be changed, because of swapping
2607 commutative operands, reg_equiv_address substitution, or whatever. */
2608
2609 int
2610 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2611 short *reload_reg_p)
2612 {
2613 int insn_code_number;
2614 int i, j;
2615 int noperands;
2616 /* These start out as the constraints for the insn
2617 and they are chewed up as we consider alternatives. */
2618 const char *constraints[MAX_RECOG_OPERANDS];
2619 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2620 a register. */
2621 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2622 char pref_or_nothing[MAX_RECOG_OPERANDS];
2623 /* Nonzero for a MEM operand whose entire address needs a reload.
2624 May be -1 to indicate the entire address may or may not need a reload. */
2625 int address_reloaded[MAX_RECOG_OPERANDS];
2626 /* Nonzero for an address operand that needs to be completely reloaded.
2627 May be -1 to indicate the entire operand may or may not need a reload. */
2628 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2629 /* Value of enum reload_type to use for operand. */
2630 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2631 /* Value of enum reload_type to use within address of operand. */
2632 enum reload_type address_type[MAX_RECOG_OPERANDS];
2633 /* Save the usage of each operand. */
2634 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2635 int no_input_reloads = 0, no_output_reloads = 0;
2636 int n_alternatives;
2637 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2638 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2639 char this_alternative_win[MAX_RECOG_OPERANDS];
2640 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2641 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2642 int this_alternative_matches[MAX_RECOG_OPERANDS];
2643 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2644 int this_alternative_number;
2645 int goal_alternative_number = 0;
2646 int operand_reloadnum[MAX_RECOG_OPERANDS];
2647 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2648 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2649 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2650 char goal_alternative_win[MAX_RECOG_OPERANDS];
2651 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2652 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2653 int goal_alternative_swapped;
2654 int best;
2655 int commutative;
2656 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2657 rtx substed_operand[MAX_RECOG_OPERANDS];
2658 rtx body = PATTERN (insn);
2659 rtx set = single_set (insn);
2660 int goal_earlyclobber = 0, this_earlyclobber;
2661 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2662 int retval = 0;
2663
2664 this_insn = insn;
2665 n_reloads = 0;
2666 n_replacements = 0;
2667 n_earlyclobbers = 0;
2668 replace_reloads = replace;
2669 hard_regs_live_known = live_known;
2670 static_reload_reg_p = reload_reg_p;
2671
2672 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2673 neither are insns that SET cc0. Insns that use CC0 are not allowed
2674 to have any input reloads. */
2675 if (JUMP_P (insn) || CALL_P (insn))
2676 no_output_reloads = 1;
2677
2678 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2679 no_input_reloads = 1;
2680 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2681 no_output_reloads = 1;
2682
2683 #ifdef SECONDARY_MEMORY_NEEDED
2684 /* The eliminated forms of any secondary memory locations are per-insn, so
2685 clear them out here. */
2686
2687 if (secondary_memlocs_elim_used)
2688 {
2689 memset (secondary_memlocs_elim, 0,
2690 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2691 secondary_memlocs_elim_used = 0;
2692 }
2693 #endif
2694
2695 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2696 is cheap to move between them. If it is not, there may not be an insn
2697 to do the copy, so we may need a reload. */
2698 if (GET_CODE (body) == SET
2699 && REG_P (SET_DEST (body))
2700 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2701 && REG_P (SET_SRC (body))
2702 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2703 && register_move_cost (GET_MODE (SET_SRC (body)),
2704 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2705 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2706 return 0;
2707
2708 extract_insn (insn);
2709
2710 noperands = reload_n_operands = recog_data.n_operands;
2711 n_alternatives = recog_data.n_alternatives;
2712
2713 /* Just return "no reloads" if insn has no operands with constraints. */
2714 if (noperands == 0 || n_alternatives == 0)
2715 return 0;
2716
2717 insn_code_number = INSN_CODE (insn);
2718 this_insn_is_asm = insn_code_number < 0;
2719
2720 memcpy (operand_mode, recog_data.operand_mode,
2721 noperands * sizeof (machine_mode));
2722 memcpy (constraints, recog_data.constraints,
2723 noperands * sizeof (const char *));
2724
2725 commutative = -1;
2726
2727 /* If we will need to know, later, whether some pair of operands
2728 are the same, we must compare them now and save the result.
2729 Reloading the base and index registers will clobber them
2730 and afterward they will fail to match. */
2731
2732 for (i = 0; i < noperands; i++)
2733 {
2734 const char *p;
2735 int c;
2736 char *end;
2737
2738 substed_operand[i] = recog_data.operand[i];
2739 p = constraints[i];
2740
2741 modified[i] = RELOAD_READ;
2742
2743 /* Scan this operand's constraint to see if it is an output operand,
2744 an in-out operand, is commutative, or should match another. */
2745
2746 while ((c = *p))
2747 {
2748 p += CONSTRAINT_LEN (c, p);
2749 switch (c)
2750 {
2751 case '=':
2752 modified[i] = RELOAD_WRITE;
2753 break;
2754 case '+':
2755 modified[i] = RELOAD_READ_WRITE;
2756 break;
2757 case '%':
2758 {
2759 /* The last operand should not be marked commutative. */
2760 gcc_assert (i != noperands - 1);
2761
2762 /* We currently only support one commutative pair of
2763 operands. Some existing asm code currently uses more
2764 than one pair. Previously, that would usually work,
2765 but sometimes it would crash the compiler. We
2766 continue supporting that case as well as we can by
2767 silently ignoring all but the first pair. In the
2768 future we may handle it correctly. */
2769 if (commutative < 0)
2770 commutative = i;
2771 else
2772 gcc_assert (this_insn_is_asm);
2773 }
2774 break;
2775 /* Use of ISDIGIT is tempting here, but it may get expensive because
2776 of locale support we don't want. */
2777 case '0': case '1': case '2': case '3': case '4':
2778 case '5': case '6': case '7': case '8': case '9':
2779 {
2780 c = strtoul (p - 1, &end, 10);
2781 p = end;
2782
2783 operands_match[c][i]
2784 = operands_match_p (recog_data.operand[c],
2785 recog_data.operand[i]);
2786
2787 /* An operand may not match itself. */
2788 gcc_assert (c != i);
2789
2790 /* If C can be commuted with C+1, and C might need to match I,
2791 then C+1 might also need to match I. */
2792 if (commutative >= 0)
2793 {
2794 if (c == commutative || c == commutative + 1)
2795 {
2796 int other = c + (c == commutative ? 1 : -1);
2797 operands_match[other][i]
2798 = operands_match_p (recog_data.operand[other],
2799 recog_data.operand[i]);
2800 }
2801 if (i == commutative || i == commutative + 1)
2802 {
2803 int other = i + (i == commutative ? 1 : -1);
2804 operands_match[c][other]
2805 = operands_match_p (recog_data.operand[c],
2806 recog_data.operand[other]);
2807 }
2808 /* Note that C is supposed to be less than I.
2809 No need to consider altering both C and I because in
2810 that case we would alter one into the other. */
2811 }
2812 }
2813 }
2814 }
2815 }
2816
2817 /* Examine each operand that is a memory reference or memory address
2818 and reload parts of the addresses into index registers.
2819 Also here any references to pseudo regs that didn't get hard regs
2820 but are equivalent to constants get replaced in the insn itself
2821 with those constants. Nobody will ever see them again.
2822
2823 Finally, set up the preferred classes of each operand. */
2824
2825 for (i = 0; i < noperands; i++)
2826 {
2827 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2828
2829 address_reloaded[i] = 0;
2830 address_operand_reloaded[i] = 0;
2831 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2832 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2833 : RELOAD_OTHER);
2834 address_type[i]
2835 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2836 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2837 : RELOAD_OTHER);
2838
2839 if (*constraints[i] == 0)
2840 /* Ignore things like match_operator operands. */
2841 ;
2842 else if (insn_extra_address_constraint
2843 (lookup_constraint (constraints[i])))
2844 {
2845 address_operand_reloaded[i]
2846 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2847 recog_data.operand[i],
2848 recog_data.operand_loc[i],
2849 i, operand_type[i], ind_levels, insn);
2850
2851 /* If we now have a simple operand where we used to have a
2852 PLUS or MULT, re-recognize and try again. */
2853 if ((OBJECT_P (*recog_data.operand_loc[i])
2854 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2855 && (GET_CODE (recog_data.operand[i]) == MULT
2856 || GET_CODE (recog_data.operand[i]) == PLUS))
2857 {
2858 INSN_CODE (insn) = -1;
2859 retval = find_reloads (insn, replace, ind_levels, live_known,
2860 reload_reg_p);
2861 return retval;
2862 }
2863
2864 recog_data.operand[i] = *recog_data.operand_loc[i];
2865 substed_operand[i] = recog_data.operand[i];
2866
2867 /* Address operands are reloaded in their existing mode,
2868 no matter what is specified in the machine description. */
2869 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2870
2871 /* If the address is a single CONST_INT pick address mode
2872 instead otherwise we will later not know in which mode
2873 the reload should be performed. */
2874 if (operand_mode[i] == VOIDmode)
2875 operand_mode[i] = Pmode;
2876
2877 }
2878 else if (code == MEM)
2879 {
2880 address_reloaded[i]
2881 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2882 recog_data.operand_loc[i],
2883 XEXP (recog_data.operand[i], 0),
2884 &XEXP (recog_data.operand[i], 0),
2885 i, address_type[i], ind_levels, insn);
2886 recog_data.operand[i] = *recog_data.operand_loc[i];
2887 substed_operand[i] = recog_data.operand[i];
2888 }
2889 else if (code == SUBREG)
2890 {
2891 rtx reg = SUBREG_REG (recog_data.operand[i]);
2892 rtx op
2893 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2894 ind_levels,
2895 set != 0
2896 && &SET_DEST (set) == recog_data.operand_loc[i],
2897 insn,
2898 &address_reloaded[i]);
2899
2900 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2901 that didn't get a hard register, emit a USE with a REG_EQUAL
2902 note in front so that we might inherit a previous, possibly
2903 wider reload. */
2904
2905 if (replace
2906 && MEM_P (op)
2907 && REG_P (reg)
2908 && (GET_MODE_SIZE (GET_MODE (reg))
2909 >= GET_MODE_SIZE (GET_MODE (op)))
2910 && reg_equiv_constant (REGNO (reg)) == 0)
2911 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2912 insn),
2913 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2914
2915 substed_operand[i] = recog_data.operand[i] = op;
2916 }
2917 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2918 /* We can get a PLUS as an "operand" as a result of register
2919 elimination. See eliminate_regs and gen_reload. We handle
2920 a unary operator by reloading the operand. */
2921 substed_operand[i] = recog_data.operand[i]
2922 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2923 ind_levels, 0, insn,
2924 &address_reloaded[i]);
2925 else if (code == REG)
2926 {
2927 /* This is equivalent to calling find_reloads_toplev.
2928 The code is duplicated for speed.
2929 When we find a pseudo always equivalent to a constant,
2930 we replace it by the constant. We must be sure, however,
2931 that we don't try to replace it in the insn in which it
2932 is being set. */
2933 int regno = REGNO (recog_data.operand[i]);
2934 if (reg_equiv_constant (regno) != 0
2935 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2936 {
2937 /* Record the existing mode so that the check if constants are
2938 allowed will work when operand_mode isn't specified. */
2939
2940 if (operand_mode[i] == VOIDmode)
2941 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2942
2943 substed_operand[i] = recog_data.operand[i]
2944 = reg_equiv_constant (regno);
2945 }
2946 if (reg_equiv_memory_loc (regno) != 0
2947 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2948 /* We need not give a valid is_set_dest argument since the case
2949 of a constant equivalence was checked above. */
2950 substed_operand[i] = recog_data.operand[i]
2951 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2952 ind_levels, 0, insn,
2953 &address_reloaded[i]);
2954 }
2955 /* If the operand is still a register (we didn't replace it with an
2956 equivalent), get the preferred class to reload it into. */
2957 code = GET_CODE (recog_data.operand[i]);
2958 preferred_class[i]
2959 = ((code == REG && REGNO (recog_data.operand[i])
2960 >= FIRST_PSEUDO_REGISTER)
2961 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2962 : NO_REGS);
2963 pref_or_nothing[i]
2964 = (code == REG
2965 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2966 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2967 }
2968
2969 /* If this is simply a copy from operand 1 to operand 0, merge the
2970 preferred classes for the operands. */
2971 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2972 && recog_data.operand[1] == SET_SRC (set))
2973 {
2974 preferred_class[0] = preferred_class[1]
2975 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2976 pref_or_nothing[0] |= pref_or_nothing[1];
2977 pref_or_nothing[1] |= pref_or_nothing[0];
2978 }
2979
2980 /* Now see what we need for pseudo-regs that didn't get hard regs
2981 or got the wrong kind of hard reg. For this, we must consider
2982 all the operands together against the register constraints. */
2983
2984 best = MAX_RECOG_OPERANDS * 2 + 600;
2985
2986 goal_alternative_swapped = 0;
2987
2988 /* The constraints are made of several alternatives.
2989 Each operand's constraint looks like foo,bar,... with commas
2990 separating the alternatives. The first alternatives for all
2991 operands go together, the second alternatives go together, etc.
2992
2993 First loop over alternatives. */
2994
2995 alternative_mask enabled = get_enabled_alternatives (insn);
2996 for (this_alternative_number = 0;
2997 this_alternative_number < n_alternatives;
2998 this_alternative_number++)
2999 {
3000 int swapped;
3001
3002 if (!TEST_BIT (enabled, this_alternative_number))
3003 {
3004 int i;
3005
3006 for (i = 0; i < recog_data.n_operands; i++)
3007 constraints[i] = skip_alternative (constraints[i]);
3008
3009 continue;
3010 }
3011
3012 /* If insn is commutative (it's safe to exchange a certain pair
3013 of operands) then we need to try each alternative twice, the
3014 second time matching those two operands as if we had
3015 exchanged them. To do this, really exchange them in
3016 operands. */
3017 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3018 {
3019 /* Loop over operands for one constraint alternative. */
3020 /* LOSERS counts those that don't fit this alternative
3021 and would require loading. */
3022 int losers = 0;
3023 /* BAD is set to 1 if it some operand can't fit this alternative
3024 even after reloading. */
3025 int bad = 0;
3026 /* REJECT is a count of how undesirable this alternative says it is
3027 if any reloading is required. If the alternative matches exactly
3028 then REJECT is ignored, but otherwise it gets this much
3029 counted against it in addition to the reloading needed. Each
3030 ? counts three times here since we want the disparaging caused by
3031 a bad register class to only count 1/3 as much. */
3032 int reject = 0;
3033
3034 if (swapped)
3035 {
3036 recog_data.operand[commutative] = substed_operand[commutative + 1];
3037 recog_data.operand[commutative + 1] = substed_operand[commutative];
3038 /* Swap the duplicates too. */
3039 for (i = 0; i < recog_data.n_dups; i++)
3040 if (recog_data.dup_num[i] == commutative
3041 || recog_data.dup_num[i] == commutative + 1)
3042 *recog_data.dup_loc[i]
3043 = recog_data.operand[(int) recog_data.dup_num[i]];
3044
3045 std::swap (preferred_class[commutative],
3046 preferred_class[commutative + 1]);
3047 std::swap (pref_or_nothing[commutative],
3048 pref_or_nothing[commutative + 1]);
3049 std::swap (address_reloaded[commutative],
3050 address_reloaded[commutative + 1]);
3051 }
3052
3053 this_earlyclobber = 0;
3054
3055 for (i = 0; i < noperands; i++)
3056 {
3057 const char *p = constraints[i];
3058 char *end;
3059 int len;
3060 int win = 0;
3061 int did_match = 0;
3062 /* 0 => this operand can be reloaded somehow for this alternative. */
3063 int badop = 1;
3064 /* 0 => this operand can be reloaded if the alternative allows regs. */
3065 int winreg = 0;
3066 int c;
3067 int m;
3068 rtx operand = recog_data.operand[i];
3069 int offset = 0;
3070 /* Nonzero means this is a MEM that must be reloaded into a reg
3071 regardless of what the constraint says. */
3072 int force_reload = 0;
3073 int offmemok = 0;
3074 /* Nonzero if a constant forced into memory would be OK for this
3075 operand. */
3076 int constmemok = 0;
3077 int earlyclobber = 0;
3078 enum constraint_num cn;
3079 enum reg_class cl;
3080
3081 /* If the predicate accepts a unary operator, it means that
3082 we need to reload the operand, but do not do this for
3083 match_operator and friends. */
3084 if (UNARY_P (operand) && *p != 0)
3085 operand = XEXP (operand, 0);
3086
3087 /* If the operand is a SUBREG, extract
3088 the REG or MEM (or maybe even a constant) within.
3089 (Constants can occur as a result of reg_equiv_constant.) */
3090
3091 while (GET_CODE (operand) == SUBREG)
3092 {
3093 /* Offset only matters when operand is a REG and
3094 it is a hard reg. This is because it is passed
3095 to reg_fits_class_p if it is a REG and all pseudos
3096 return 0 from that function. */
3097 if (REG_P (SUBREG_REG (operand))
3098 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3099 {
3100 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3101 GET_MODE (SUBREG_REG (operand)),
3102 SUBREG_BYTE (operand),
3103 GET_MODE (operand)) < 0)
3104 force_reload = 1;
3105 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3106 GET_MODE (SUBREG_REG (operand)),
3107 SUBREG_BYTE (operand),
3108 GET_MODE (operand));
3109 }
3110 operand = SUBREG_REG (operand);
3111 /* Force reload if this is a constant or PLUS or if there may
3112 be a problem accessing OPERAND in the outer mode. */
3113 if (CONSTANT_P (operand)
3114 || GET_CODE (operand) == PLUS
3115 /* We must force a reload of paradoxical SUBREGs
3116 of a MEM because the alignment of the inner value
3117 may not be enough to do the outer reference. On
3118 big-endian machines, it may also reference outside
3119 the object.
3120
3121 On machines that extend byte operations and we have a
3122 SUBREG where both the inner and outer modes are no wider
3123 than a word and the inner mode is narrower, is integral,
3124 and gets extended when loaded from memory, combine.c has
3125 made assumptions about the behavior of the machine in such
3126 register access. If the data is, in fact, in memory we
3127 must always load using the size assumed to be in the
3128 register and let the insn do the different-sized
3129 accesses.
3130
3131 This is doubly true if WORD_REGISTER_OPERATIONS. In
3132 this case eliminate_regs has left non-paradoxical
3133 subregs for push_reload to see. Make sure it does
3134 by forcing the reload.
3135
3136 ??? When is it right at this stage to have a subreg
3137 of a mem that is _not_ to be handled specially? IMO
3138 those should have been reduced to just a mem. */
3139 || ((MEM_P (operand)
3140 || (REG_P (operand)
3141 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3142 #if !WORD_REGISTER_OPERATIONS
3143 && (((GET_MODE_BITSIZE (GET_MODE (operand))
3144 < BIGGEST_ALIGNMENT)
3145 && (GET_MODE_SIZE (operand_mode[i])
3146 > GET_MODE_SIZE (GET_MODE (operand))))
3147 || BYTES_BIG_ENDIAN
3148 #ifdef LOAD_EXTEND_OP
3149 || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3150 && (GET_MODE_SIZE (GET_MODE (operand))
3151 <= UNITS_PER_WORD)
3152 && (GET_MODE_SIZE (operand_mode[i])
3153 > GET_MODE_SIZE (GET_MODE (operand)))
3154 && INTEGRAL_MODE_P (GET_MODE (operand))
3155 && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN)
3156 #endif
3157 )
3158 #endif
3159 )
3160 )
3161 force_reload = 1;
3162 }
3163
3164 this_alternative[i] = NO_REGS;
3165 this_alternative_win[i] = 0;
3166 this_alternative_match_win[i] = 0;
3167 this_alternative_offmemok[i] = 0;
3168 this_alternative_earlyclobber[i] = 0;
3169 this_alternative_matches[i] = -1;
3170
3171 /* An empty constraint or empty alternative
3172 allows anything which matched the pattern. */
3173 if (*p == 0 || *p == ',')
3174 win = 1, badop = 0;
3175
3176 /* Scan this alternative's specs for this operand;
3177 set WIN if the operand fits any letter in this alternative.
3178 Otherwise, clear BADOP if this operand could
3179 fit some letter after reloads,
3180 or set WINREG if this operand could fit after reloads
3181 provided the constraint allows some registers. */
3182
3183 do
3184 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3185 {
3186 case '\0':
3187 len = 0;
3188 break;
3189 case ',':
3190 c = '\0';
3191 break;
3192
3193 case '?':
3194 reject += 6;
3195 break;
3196
3197 case '!':
3198 reject = 600;
3199 break;
3200
3201 case '#':
3202 /* Ignore rest of this alternative as far as
3203 reloading is concerned. */
3204 do
3205 p++;
3206 while (*p && *p != ',');
3207 len = 0;
3208 break;
3209
3210 case '0': case '1': case '2': case '3': case '4':
3211 case '5': case '6': case '7': case '8': case '9':
3212 m = strtoul (p, &end, 10);
3213 p = end;
3214 len = 0;
3215
3216 this_alternative_matches[i] = m;
3217 /* We are supposed to match a previous operand.
3218 If we do, we win if that one did.
3219 If we do not, count both of the operands as losers.
3220 (This is too conservative, since most of the time
3221 only a single reload insn will be needed to make
3222 the two operands win. As a result, this alternative
3223 may be rejected when it is actually desirable.) */
3224 if ((swapped && (m != commutative || i != commutative + 1))
3225 /* If we are matching as if two operands were swapped,
3226 also pretend that operands_match had been computed
3227 with swapped.
3228 But if I is the second of those and C is the first,
3229 don't exchange them, because operands_match is valid
3230 only on one side of its diagonal. */
3231 ? (operands_match
3232 [(m == commutative || m == commutative + 1)
3233 ? 2 * commutative + 1 - m : m]
3234 [(i == commutative || i == commutative + 1)
3235 ? 2 * commutative + 1 - i : i])
3236 : operands_match[m][i])
3237 {
3238 /* If we are matching a non-offsettable address where an
3239 offsettable address was expected, then we must reject
3240 this combination, because we can't reload it. */
3241 if (this_alternative_offmemok[m]
3242 && MEM_P (recog_data.operand[m])
3243 && this_alternative[m] == NO_REGS
3244 && ! this_alternative_win[m])
3245 bad = 1;
3246
3247 did_match = this_alternative_win[m];
3248 }
3249 else
3250 {
3251 /* Operands don't match. */
3252 rtx value;
3253 int loc1, loc2;
3254 /* Retroactively mark the operand we had to match
3255 as a loser, if it wasn't already. */
3256 if (this_alternative_win[m])
3257 losers++;
3258 this_alternative_win[m] = 0;
3259 if (this_alternative[m] == NO_REGS)
3260 bad = 1;
3261 /* But count the pair only once in the total badness of
3262 this alternative, if the pair can be a dummy reload.
3263 The pointers in operand_loc are not swapped; swap
3264 them by hand if necessary. */
3265 if (swapped && i == commutative)
3266 loc1 = commutative + 1;
3267 else if (swapped && i == commutative + 1)
3268 loc1 = commutative;
3269 else
3270 loc1 = i;
3271 if (swapped && m == commutative)
3272 loc2 = commutative + 1;
3273 else if (swapped && m == commutative + 1)
3274 loc2 = commutative;
3275 else
3276 loc2 = m;
3277 value
3278 = find_dummy_reload (recog_data.operand[i],
3279 recog_data.operand[m],
3280 recog_data.operand_loc[loc1],
3281 recog_data.operand_loc[loc2],
3282 operand_mode[i], operand_mode[m],
3283 this_alternative[m], -1,
3284 this_alternative_earlyclobber[m]);
3285
3286 if (value != 0)
3287 losers--;
3288 }
3289 /* This can be fixed with reloads if the operand
3290 we are supposed to match can be fixed with reloads. */
3291 badop = 0;
3292 this_alternative[i] = this_alternative[m];
3293
3294 /* If we have to reload this operand and some previous
3295 operand also had to match the same thing as this
3296 operand, we don't know how to do that. So reject this
3297 alternative. */
3298 if (! did_match || force_reload)
3299 for (j = 0; j < i; j++)
3300 if (this_alternative_matches[j]
3301 == this_alternative_matches[i])
3302 {
3303 badop = 1;
3304 break;
3305 }
3306 break;
3307
3308 case 'p':
3309 /* All necessary reloads for an address_operand
3310 were handled in find_reloads_address. */
3311 this_alternative[i]
3312 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3313 ADDRESS, SCRATCH);
3314 win = 1;
3315 badop = 0;
3316 break;
3317
3318 case TARGET_MEM_CONSTRAINT:
3319 if (force_reload)
3320 break;
3321 if (MEM_P (operand)
3322 || (REG_P (operand)
3323 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3324 && reg_renumber[REGNO (operand)] < 0))
3325 win = 1;
3326 if (CONST_POOL_OK_P (operand_mode[i], operand))
3327 badop = 0;
3328 constmemok = 1;
3329 break;
3330
3331 case '<':
3332 if (MEM_P (operand)
3333 && ! address_reloaded[i]
3334 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3335 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3336 win = 1;
3337 break;
3338
3339 case '>':
3340 if (MEM_P (operand)
3341 && ! address_reloaded[i]
3342 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3343 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3344 win = 1;
3345 break;
3346
3347 /* Memory operand whose address is not offsettable. */
3348 case 'V':
3349 if (force_reload)
3350 break;
3351 if (MEM_P (operand)
3352 && ! (ind_levels ? offsettable_memref_p (operand)
3353 : offsettable_nonstrict_memref_p (operand))
3354 /* Certain mem addresses will become offsettable
3355 after they themselves are reloaded. This is important;
3356 we don't want our own handling of unoffsettables
3357 to override the handling of reg_equiv_address. */
3358 && !(REG_P (XEXP (operand, 0))
3359 && (ind_levels == 0
3360 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3361 win = 1;
3362 break;
3363
3364 /* Memory operand whose address is offsettable. */
3365 case 'o':
3366 if (force_reload)
3367 break;
3368 if ((MEM_P (operand)
3369 /* If IND_LEVELS, find_reloads_address won't reload a
3370 pseudo that didn't get a hard reg, so we have to
3371 reject that case. */
3372 && ((ind_levels ? offsettable_memref_p (operand)
3373 : offsettable_nonstrict_memref_p (operand))
3374 /* A reloaded address is offsettable because it is now
3375 just a simple register indirect. */
3376 || address_reloaded[i] == 1))
3377 || (REG_P (operand)
3378 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3379 && reg_renumber[REGNO (operand)] < 0
3380 /* If reg_equiv_address is nonzero, we will be
3381 loading it into a register; hence it will be
3382 offsettable, but we cannot say that reg_equiv_mem
3383 is offsettable without checking. */
3384 && ((reg_equiv_mem (REGNO (operand)) != 0
3385 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3386 || (reg_equiv_address (REGNO (operand)) != 0))))
3387 win = 1;
3388 if (CONST_POOL_OK_P (operand_mode[i], operand)
3389 || MEM_P (operand))
3390 badop = 0;
3391 constmemok = 1;
3392 offmemok = 1;
3393 break;
3394
3395 case '&':
3396 /* Output operand that is stored before the need for the
3397 input operands (and their index registers) is over. */
3398 earlyclobber = 1, this_earlyclobber = 1;
3399 break;
3400
3401 case 'X':
3402 force_reload = 0;
3403 win = 1;
3404 break;
3405
3406 case 'g':
3407 if (! force_reload
3408 /* A PLUS is never a valid operand, but reload can make
3409 it from a register when eliminating registers. */
3410 && GET_CODE (operand) != PLUS
3411 /* A SCRATCH is not a valid operand. */
3412 && GET_CODE (operand) != SCRATCH
3413 && (! CONSTANT_P (operand)
3414 || ! flag_pic
3415 || LEGITIMATE_PIC_OPERAND_P (operand))
3416 && (GENERAL_REGS == ALL_REGS
3417 || !REG_P (operand)
3418 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3419 && reg_renumber[REGNO (operand)] < 0)))
3420 win = 1;
3421 cl = GENERAL_REGS;
3422 goto reg;
3423
3424 default:
3425 cn = lookup_constraint (p);
3426 switch (get_constraint_type (cn))
3427 {
3428 case CT_REGISTER:
3429 cl = reg_class_for_constraint (cn);
3430 if (cl != NO_REGS)
3431 goto reg;
3432 break;
3433
3434 case CT_CONST_INT:
3435 if (CONST_INT_P (operand)
3436 && (insn_const_int_ok_for_constraint
3437 (INTVAL (operand), cn)))
3438 win = true;
3439 break;
3440
3441 case CT_MEMORY:
3442 if (force_reload)
3443 break;
3444 if (constraint_satisfied_p (operand, cn))
3445 win = 1;
3446 /* If the address was already reloaded,
3447 we win as well. */
3448 else if (MEM_P (operand) && address_reloaded[i] == 1)
3449 win = 1;
3450 /* Likewise if the address will be reloaded because
3451 reg_equiv_address is nonzero. For reg_equiv_mem
3452 we have to check. */
3453 else if (REG_P (operand)
3454 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3455 && reg_renumber[REGNO (operand)] < 0
3456 && ((reg_equiv_mem (REGNO (operand)) != 0
3457 && (constraint_satisfied_p
3458 (reg_equiv_mem (REGNO (operand)),
3459 cn)))
3460 || (reg_equiv_address (REGNO (operand))
3461 != 0)))
3462 win = 1;
3463
3464 /* If we didn't already win, we can reload
3465 constants via force_const_mem, and other
3466 MEMs by reloading the address like for 'o'. */
3467 if (CONST_POOL_OK_P (operand_mode[i], operand)
3468 || MEM_P (operand))
3469 badop = 0;
3470 constmemok = 1;
3471 offmemok = 1;
3472 break;
3473
3474 case CT_ADDRESS:
3475 if (constraint_satisfied_p (operand, cn))
3476 win = 1;
3477
3478 /* If we didn't already win, we can reload
3479 the address into a base register. */
3480 this_alternative[i]
3481 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3482 ADDRESS, SCRATCH);
3483 badop = 0;
3484 break;
3485
3486 case CT_FIXED_FORM:
3487 if (constraint_satisfied_p (operand, cn))
3488 win = 1;
3489 break;
3490 }
3491 break;
3492
3493 reg:
3494 this_alternative[i]
3495 = reg_class_subunion[this_alternative[i]][cl];
3496 if (GET_MODE (operand) == BLKmode)
3497 break;
3498 winreg = 1;
3499 if (REG_P (operand)
3500 && reg_fits_class_p (operand, this_alternative[i],
3501 offset, GET_MODE (recog_data.operand[i])))
3502 win = 1;
3503 break;
3504 }
3505 while ((p += len), c);
3506
3507 if (swapped == (commutative >= 0 ? 1 : 0))
3508 constraints[i] = p;
3509
3510 /* If this operand could be handled with a reg,
3511 and some reg is allowed, then this operand can be handled. */
3512 if (winreg && this_alternative[i] != NO_REGS
3513 && (win || !class_only_fixed_regs[this_alternative[i]]))
3514 badop = 0;
3515
3516 /* Record which operands fit this alternative. */
3517 this_alternative_earlyclobber[i] = earlyclobber;
3518 if (win && ! force_reload)
3519 this_alternative_win[i] = 1;
3520 else if (did_match && ! force_reload)
3521 this_alternative_match_win[i] = 1;
3522 else
3523 {
3524 int const_to_mem = 0;
3525
3526 this_alternative_offmemok[i] = offmemok;
3527 losers++;
3528 if (badop)
3529 bad = 1;
3530 /* Alternative loses if it has no regs for a reg operand. */
3531 if (REG_P (operand)
3532 && this_alternative[i] == NO_REGS
3533 && this_alternative_matches[i] < 0)
3534 bad = 1;
3535
3536 /* If this is a constant that is reloaded into the desired
3537 class by copying it to memory first, count that as another
3538 reload. This is consistent with other code and is
3539 required to avoid choosing another alternative when
3540 the constant is moved into memory by this function on
3541 an early reload pass. Note that the test here is
3542 precisely the same as in the code below that calls
3543 force_const_mem. */
3544 if (CONST_POOL_OK_P (operand_mode[i], operand)
3545 && ((targetm.preferred_reload_class (operand,
3546 this_alternative[i])
3547 == NO_REGS)
3548 || no_input_reloads))
3549 {
3550 const_to_mem = 1;
3551 if (this_alternative[i] != NO_REGS)
3552 losers++;
3553 }
3554
3555 /* Alternative loses if it requires a type of reload not
3556 permitted for this insn. We can always reload SCRATCH
3557 and objects with a REG_UNUSED note. */
3558 if (GET_CODE (operand) != SCRATCH
3559 && modified[i] != RELOAD_READ && no_output_reloads
3560 && ! find_reg_note (insn, REG_UNUSED, operand))
3561 bad = 1;
3562 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3563 && ! const_to_mem)
3564 bad = 1;
3565
3566 /* If we can't reload this value at all, reject this
3567 alternative. Note that we could also lose due to
3568 LIMIT_RELOAD_CLASS, but we don't check that
3569 here. */
3570
3571 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3572 {
3573 if (targetm.preferred_reload_class (operand,
3574 this_alternative[i])
3575 == NO_REGS)
3576 reject = 600;
3577
3578 if (operand_type[i] == RELOAD_FOR_OUTPUT
3579 && (targetm.preferred_output_reload_class (operand,
3580 this_alternative[i])
3581 == NO_REGS))
3582 reject = 600;
3583 }
3584
3585 /* We prefer to reload pseudos over reloading other things,
3586 since such reloads may be able to be eliminated later.
3587 If we are reloading a SCRATCH, we won't be generating any
3588 insns, just using a register, so it is also preferred.
3589 So bump REJECT in other cases. Don't do this in the
3590 case where we are forcing a constant into memory and
3591 it will then win since we don't want to have a different
3592 alternative match then. */
3593 if (! (REG_P (operand)
3594 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3595 && GET_CODE (operand) != SCRATCH
3596 && ! (const_to_mem && constmemok))
3597 reject += 2;
3598
3599 /* Input reloads can be inherited more often than output
3600 reloads can be removed, so penalize output reloads. */
3601 if (operand_type[i] != RELOAD_FOR_INPUT
3602 && GET_CODE (operand) != SCRATCH)
3603 reject++;
3604 }
3605
3606 /* If this operand is a pseudo register that didn't get
3607 a hard reg and this alternative accepts some
3608 register, see if the class that we want is a subset
3609 of the preferred class for this register. If not,
3610 but it intersects that class, use the preferred class
3611 instead. If it does not intersect the preferred
3612 class, show that usage of this alternative should be
3613 discouraged; it will be discouraged more still if the
3614 register is `preferred or nothing'. We do this
3615 because it increases the chance of reusing our spill
3616 register in a later insn and avoiding a pair of
3617 memory stores and loads.
3618
3619 Don't bother with this if this alternative will
3620 accept this operand.
3621
3622 Don't do this for a multiword operand, since it is
3623 only a small win and has the risk of requiring more
3624 spill registers, which could cause a large loss.
3625
3626 Don't do this if the preferred class has only one
3627 register because we might otherwise exhaust the
3628 class. */
3629
3630 if (! win && ! did_match
3631 && this_alternative[i] != NO_REGS
3632 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3633 && reg_class_size [(int) preferred_class[i]] > 0
3634 && ! small_register_class_p (preferred_class[i]))
3635 {
3636 if (! reg_class_subset_p (this_alternative[i],
3637 preferred_class[i]))
3638 {
3639 /* Since we don't have a way of forming the intersection,
3640 we just do something special if the preferred class
3641 is a subset of the class we have; that's the most
3642 common case anyway. */
3643 if (reg_class_subset_p (preferred_class[i],
3644 this_alternative[i]))
3645 this_alternative[i] = preferred_class[i];
3646 else
3647 reject += (2 + 2 * pref_or_nothing[i]);
3648 }
3649 }
3650 }
3651
3652 /* Now see if any output operands that are marked "earlyclobber"
3653 in this alternative conflict with any input operands
3654 or any memory addresses. */
3655
3656 for (i = 0; i < noperands; i++)
3657 if (this_alternative_earlyclobber[i]
3658 && (this_alternative_win[i] || this_alternative_match_win[i]))
3659 {
3660 struct decomposition early_data;
3661
3662 early_data = decompose (recog_data.operand[i]);
3663
3664 gcc_assert (modified[i] != RELOAD_READ);
3665
3666 if (this_alternative[i] == NO_REGS)
3667 {
3668 this_alternative_earlyclobber[i] = 0;
3669 gcc_assert (this_insn_is_asm);
3670 error_for_asm (this_insn,
3671 "%<&%> constraint used with no register class");
3672 }
3673
3674 for (j = 0; j < noperands; j++)
3675 /* Is this an input operand or a memory ref? */
3676 if ((MEM_P (recog_data.operand[j])
3677 || modified[j] != RELOAD_WRITE)
3678 && j != i
3679 /* Ignore things like match_operator operands. */
3680 && !recog_data.is_operator[j]
3681 /* Don't count an input operand that is constrained to match
3682 the early clobber operand. */
3683 && ! (this_alternative_matches[j] == i
3684 && rtx_equal_p (recog_data.operand[i],
3685 recog_data.operand[j]))
3686 /* Is it altered by storing the earlyclobber operand? */
3687 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3688 early_data))
3689 {
3690 /* If the output is in a non-empty few-regs class,
3691 it's costly to reload it, so reload the input instead. */
3692 if (small_register_class_p (this_alternative[i])
3693 && (REG_P (recog_data.operand[j])
3694 || GET_CODE (recog_data.operand[j]) == SUBREG))
3695 {
3696 losers++;
3697 this_alternative_win[j] = 0;
3698 this_alternative_match_win[j] = 0;
3699 }
3700 else
3701 break;
3702 }
3703 /* If an earlyclobber operand conflicts with something,
3704 it must be reloaded, so request this and count the cost. */
3705 if (j != noperands)
3706 {
3707 losers++;
3708 this_alternative_win[i] = 0;
3709 this_alternative_match_win[j] = 0;
3710 for (j = 0; j < noperands; j++)
3711 if (this_alternative_matches[j] == i
3712 && this_alternative_match_win[j])
3713 {
3714 this_alternative_win[j] = 0;
3715 this_alternative_match_win[j] = 0;
3716 losers++;
3717 }
3718 }
3719 }
3720
3721 /* If one alternative accepts all the operands, no reload required,
3722 choose that alternative; don't consider the remaining ones. */
3723 if (losers == 0)
3724 {
3725 /* Unswap these so that they are never swapped at `finish'. */
3726 if (swapped)
3727 {
3728 recog_data.operand[commutative] = substed_operand[commutative];
3729 recog_data.operand[commutative + 1]
3730 = substed_operand[commutative + 1];
3731 }
3732 for (i = 0; i < noperands; i++)
3733 {
3734 goal_alternative_win[i] = this_alternative_win[i];
3735 goal_alternative_match_win[i] = this_alternative_match_win[i];
3736 goal_alternative[i] = this_alternative[i];
3737 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3738 goal_alternative_matches[i] = this_alternative_matches[i];
3739 goal_alternative_earlyclobber[i]
3740 = this_alternative_earlyclobber[i];
3741 }
3742 goal_alternative_number = this_alternative_number;
3743 goal_alternative_swapped = swapped;
3744 goal_earlyclobber = this_earlyclobber;
3745 goto finish;
3746 }
3747
3748 /* REJECT, set by the ! and ? constraint characters and when a register
3749 would be reloaded into a non-preferred class, discourages the use of
3750 this alternative for a reload goal. REJECT is incremented by six
3751 for each ? and two for each non-preferred class. */
3752 losers = losers * 6 + reject;
3753
3754 /* If this alternative can be made to work by reloading,
3755 and it needs less reloading than the others checked so far,
3756 record it as the chosen goal for reloading. */
3757 if (! bad)
3758 {
3759 if (best > losers)
3760 {
3761 for (i = 0; i < noperands; i++)
3762 {
3763 goal_alternative[i] = this_alternative[i];
3764 goal_alternative_win[i] = this_alternative_win[i];
3765 goal_alternative_match_win[i]
3766 = this_alternative_match_win[i];
3767 goal_alternative_offmemok[i]
3768 = this_alternative_offmemok[i];
3769 goal_alternative_matches[i] = this_alternative_matches[i];
3770 goal_alternative_earlyclobber[i]
3771 = this_alternative_earlyclobber[i];
3772 }
3773 goal_alternative_swapped = swapped;
3774 best = losers;
3775 goal_alternative_number = this_alternative_number;
3776 goal_earlyclobber = this_earlyclobber;
3777 }
3778 }
3779
3780 if (swapped)
3781 {
3782 /* If the commutative operands have been swapped, swap
3783 them back in order to check the next alternative. */
3784 recog_data.operand[commutative] = substed_operand[commutative];
3785 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3786 /* Unswap the duplicates too. */
3787 for (i = 0; i < recog_data.n_dups; i++)
3788 if (recog_data.dup_num[i] == commutative
3789 || recog_data.dup_num[i] == commutative + 1)
3790 *recog_data.dup_loc[i]
3791 = recog_data.operand[(int) recog_data.dup_num[i]];
3792
3793 /* Unswap the operand related information as well. */
3794 std::swap (preferred_class[commutative],
3795 preferred_class[commutative + 1]);
3796 std::swap (pref_or_nothing[commutative],
3797 pref_or_nothing[commutative + 1]);
3798 std::swap (address_reloaded[commutative],
3799 address_reloaded[commutative + 1]);
3800 }
3801 }
3802 }
3803
3804 /* The operands don't meet the constraints.
3805 goal_alternative describes the alternative
3806 that we could reach by reloading the fewest operands.
3807 Reload so as to fit it. */
3808
3809 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3810 {
3811 /* No alternative works with reloads?? */
3812 if (insn_code_number >= 0)
3813 fatal_insn ("unable to generate reloads for:", insn);
3814 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3815 /* Avoid further trouble with this insn. */
3816 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3817 n_reloads = 0;
3818 return 0;
3819 }
3820
3821 /* Jump to `finish' from above if all operands are valid already.
3822 In that case, goal_alternative_win is all 1. */
3823 finish:
3824
3825 /* Right now, for any pair of operands I and J that are required to match,
3826 with I < J,
3827 goal_alternative_matches[J] is I.
3828 Set up goal_alternative_matched as the inverse function:
3829 goal_alternative_matched[I] = J. */
3830
3831 for (i = 0; i < noperands; i++)
3832 goal_alternative_matched[i] = -1;
3833
3834 for (i = 0; i < noperands; i++)
3835 if (! goal_alternative_win[i]
3836 && goal_alternative_matches[i] >= 0)
3837 goal_alternative_matched[goal_alternative_matches[i]] = i;
3838
3839 for (i = 0; i < noperands; i++)
3840 goal_alternative_win[i] |= goal_alternative_match_win[i];
3841
3842 /* If the best alternative is with operands 1 and 2 swapped,
3843 consider them swapped before reporting the reloads. Update the
3844 operand numbers of any reloads already pushed. */
3845
3846 if (goal_alternative_swapped)
3847 {
3848 std::swap (substed_operand[commutative],
3849 substed_operand[commutative + 1]);
3850 std::swap (recog_data.operand[commutative],
3851 recog_data.operand[commutative + 1]);
3852 std::swap (*recog_data.operand_loc[commutative],
3853 *recog_data.operand_loc[commutative + 1]);
3854
3855 for (i = 0; i < recog_data.n_dups; i++)
3856 if (recog_data.dup_num[i] == commutative
3857 || recog_data.dup_num[i] == commutative + 1)
3858 *recog_data.dup_loc[i]
3859 = recog_data.operand[(int) recog_data.dup_num[i]];
3860
3861 for (i = 0; i < n_reloads; i++)
3862 {
3863 if (rld[i].opnum == commutative)
3864 rld[i].opnum = commutative + 1;
3865 else if (rld[i].opnum == commutative + 1)
3866 rld[i].opnum = commutative;
3867 }
3868 }
3869
3870 for (i = 0; i < noperands; i++)
3871 {
3872 operand_reloadnum[i] = -1;
3873
3874 /* If this is an earlyclobber operand, we need to widen the scope.
3875 The reload must remain valid from the start of the insn being
3876 reloaded until after the operand is stored into its destination.
3877 We approximate this with RELOAD_OTHER even though we know that we
3878 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3879
3880 One special case that is worth checking is when we have an
3881 output that is earlyclobber but isn't used past the insn (typically
3882 a SCRATCH). In this case, we only need have the reload live
3883 through the insn itself, but not for any of our input or output
3884 reloads.
3885 But we must not accidentally narrow the scope of an existing
3886 RELOAD_OTHER reload - leave these alone.
3887
3888 In any case, anything needed to address this operand can remain
3889 however they were previously categorized. */
3890
3891 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3892 operand_type[i]
3893 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3894 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3895 }
3896
3897 /* Any constants that aren't allowed and can't be reloaded
3898 into registers are here changed into memory references. */
3899 for (i = 0; i < noperands; i++)
3900 if (! goal_alternative_win[i])
3901 {
3902 rtx op = recog_data.operand[i];
3903 rtx subreg = NULL_RTX;
3904 rtx plus = NULL_RTX;
3905 machine_mode mode = operand_mode[i];
3906
3907 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3908 push_reload so we have to let them pass here. */
3909 if (GET_CODE (op) == SUBREG)
3910 {
3911 subreg = op;
3912 op = SUBREG_REG (op);
3913 mode = GET_MODE (op);
3914 }
3915
3916 if (GET_CODE (op) == PLUS)
3917 {
3918 plus = op;
3919 op = XEXP (op, 1);
3920 }
3921
3922 if (CONST_POOL_OK_P (mode, op)
3923 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3924 == NO_REGS)
3925 || no_input_reloads))
3926 {
3927 int this_address_reloaded;
3928 rtx tem = force_const_mem (mode, op);
3929
3930 /* If we stripped a SUBREG or a PLUS above add it back. */
3931 if (plus != NULL_RTX)
3932 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3933
3934 if (subreg != NULL_RTX)
3935 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3936
3937 this_address_reloaded = 0;
3938 substed_operand[i] = recog_data.operand[i]
3939 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3940 0, insn, &this_address_reloaded);
3941
3942 /* If the alternative accepts constant pool refs directly
3943 there will be no reload needed at all. */
3944 if (plus == NULL_RTX
3945 && subreg == NULL_RTX
3946 && alternative_allows_const_pool_ref (this_address_reloaded == 0
3947 ? substed_operand[i]
3948 : NULL,
3949 recog_data.constraints[i],
3950 goal_alternative_number))
3951 goal_alternative_win[i] = 1;
3952 }
3953 }
3954
3955 /* Record the values of the earlyclobber operands for the caller. */
3956 if (goal_earlyclobber)
3957 for (i = 0; i < noperands; i++)
3958 if (goal_alternative_earlyclobber[i])
3959 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3960
3961 /* Now record reloads for all the operands that need them. */
3962 for (i = 0; i < noperands; i++)
3963 if (! goal_alternative_win[i])
3964 {
3965 /* Operands that match previous ones have already been handled. */
3966 if (goal_alternative_matches[i] >= 0)
3967 ;
3968 /* Handle an operand with a nonoffsettable address
3969 appearing where an offsettable address will do
3970 by reloading the address into a base register.
3971
3972 ??? We can also do this when the operand is a register and
3973 reg_equiv_mem is not offsettable, but this is a bit tricky,
3974 so we don't bother with it. It may not be worth doing. */
3975 else if (goal_alternative_matched[i] == -1
3976 && goal_alternative_offmemok[i]
3977 && MEM_P (recog_data.operand[i]))
3978 {
3979 /* If the address to be reloaded is a VOIDmode constant,
3980 use the default address mode as mode of the reload register,
3981 as would have been done by find_reloads_address. */
3982 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3983 machine_mode address_mode;
3984
3985 address_mode = get_address_mode (recog_data.operand[i]);
3986 operand_reloadnum[i]
3987 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3988 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3989 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3990 address_mode,
3991 VOIDmode, 0, 0, i, RELOAD_FOR_INPUT);
3992 rld[operand_reloadnum[i]].inc
3993 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3994
3995 /* If this operand is an output, we will have made any
3996 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3997 now we are treating part of the operand as an input, so
3998 we must change these to RELOAD_FOR_INPUT_ADDRESS. */
3999
4000 if (modified[i] == RELOAD_WRITE)
4001 {
4002 for (j = 0; j < n_reloads; j++)
4003 {
4004 if (rld[j].opnum == i)
4005 {
4006 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4007 rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS;
4008 else if (rld[j].when_needed
4009 == RELOAD_FOR_OUTADDR_ADDRESS)
4010 rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS;
4011 }
4012 }
4013 }
4014 }
4015 else if (goal_alternative_matched[i] == -1)
4016 {
4017 operand_reloadnum[i]
4018 = push_reload ((modified[i] != RELOAD_WRITE
4019 ? recog_data.operand[i] : 0),
4020 (modified[i] != RELOAD_READ
4021 ? recog_data.operand[i] : 0),
4022 (modified[i] != RELOAD_WRITE
4023 ? recog_data.operand_loc[i] : 0),
4024 (modified[i] != RELOAD_READ
4025 ? recog_data.operand_loc[i] : 0),
4026 (enum reg_class) goal_alternative[i],
4027 (modified[i] == RELOAD_WRITE
4028 ? VOIDmode : operand_mode[i]),
4029 (modified[i] == RELOAD_READ
4030 ? VOIDmode : operand_mode[i]),
4031 (insn_code_number < 0 ? 0
4032 : insn_data[insn_code_number].operand[i].strict_low),
4033 0, i, operand_type[i]);
4034 }
4035 /* In a matching pair of operands, one must be input only
4036 and the other must be output only.
4037 Pass the input operand as IN and the other as OUT. */
4038 else if (modified[i] == RELOAD_READ
4039 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4040 {
4041 operand_reloadnum[i]
4042 = push_reload (recog_data.operand[i],
4043 recog_data.operand[goal_alternative_matched[i]],
4044 recog_data.operand_loc[i],
4045 recog_data.operand_loc[goal_alternative_matched[i]],
4046 (enum reg_class) goal_alternative[i],
4047 operand_mode[i],
4048 operand_mode[goal_alternative_matched[i]],
4049 0, 0, i, RELOAD_OTHER);
4050 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4051 }
4052 else if (modified[i] == RELOAD_WRITE
4053 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4054 {
4055 operand_reloadnum[goal_alternative_matched[i]]
4056 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4057 recog_data.operand[i],
4058 recog_data.operand_loc[goal_alternative_matched[i]],
4059 recog_data.operand_loc[i],
4060 (enum reg_class) goal_alternative[i],
4061 operand_mode[goal_alternative_matched[i]],
4062 operand_mode[i],
4063 0, 0, i, RELOAD_OTHER);
4064 operand_reloadnum[i] = output_reloadnum;
4065 }
4066 else
4067 {
4068 gcc_assert (insn_code_number < 0);
4069 error_for_asm (insn, "inconsistent operand constraints "
4070 "in an %<asm%>");
4071 /* Avoid further trouble with this insn. */
4072 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4073 n_reloads = 0;
4074 return 0;
4075 }
4076 }
4077 else if (goal_alternative_matched[i] < 0
4078 && goal_alternative_matches[i] < 0
4079 && address_operand_reloaded[i] != 1
4080 && optimize)
4081 {
4082 /* For each non-matching operand that's a MEM or a pseudo-register
4083 that didn't get a hard register, make an optional reload.
4084 This may get done even if the insn needs no reloads otherwise. */
4085
4086 rtx operand = recog_data.operand[i];
4087
4088 while (GET_CODE (operand) == SUBREG)
4089 operand = SUBREG_REG (operand);
4090 if ((MEM_P (operand)
4091 || (REG_P (operand)
4092 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4093 /* If this is only for an output, the optional reload would not
4094 actually cause us to use a register now, just note that
4095 something is stored here. */
4096 && (goal_alternative[i] != NO_REGS
4097 || modified[i] == RELOAD_WRITE)
4098 && ! no_input_reloads
4099 /* An optional output reload might allow to delete INSN later.
4100 We mustn't make in-out reloads on insns that are not permitted
4101 output reloads.
4102 If this is an asm, we can't delete it; we must not even call
4103 push_reload for an optional output reload in this case,
4104 because we can't be sure that the constraint allows a register,
4105 and push_reload verifies the constraints for asms. */
4106 && (modified[i] == RELOAD_READ
4107 || (! no_output_reloads && ! this_insn_is_asm)))
4108 operand_reloadnum[i]
4109 = push_reload ((modified[i] != RELOAD_WRITE
4110 ? recog_data.operand[i] : 0),
4111 (modified[i] != RELOAD_READ
4112 ? recog_data.operand[i] : 0),
4113 (modified[i] != RELOAD_WRITE
4114 ? recog_data.operand_loc[i] : 0),
4115 (modified[i] != RELOAD_READ
4116 ? recog_data.operand_loc[i] : 0),
4117 (enum reg_class) goal_alternative[i],
4118 (modified[i] == RELOAD_WRITE
4119 ? VOIDmode : operand_mode[i]),
4120 (modified[i] == RELOAD_READ
4121 ? VOIDmode : operand_mode[i]),
4122 (insn_code_number < 0 ? 0
4123 : insn_data[insn_code_number].operand[i].strict_low),
4124 1, i, operand_type[i]);
4125 /* If a memory reference remains (either as a MEM or a pseudo that
4126 did not get a hard register), yet we can't make an optional
4127 reload, check if this is actually a pseudo register reference;
4128 we then need to emit a USE and/or a CLOBBER so that reload
4129 inheritance will do the right thing. */
4130 else if (replace
4131 && (MEM_P (operand)
4132 || (REG_P (operand)
4133 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4134 && reg_renumber [REGNO (operand)] < 0)))
4135 {
4136 operand = *recog_data.operand_loc[i];
4137
4138 while (GET_CODE (operand) == SUBREG)
4139 operand = SUBREG_REG (operand);
4140 if (REG_P (operand))
4141 {
4142 if (modified[i] != RELOAD_WRITE)
4143 /* We mark the USE with QImode so that we recognize
4144 it as one that can be safely deleted at the end
4145 of reload. */
4146 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4147 insn), QImode);
4148 if (modified[i] != RELOAD_READ)
4149 emit_insn_after (gen_clobber (operand), insn);
4150 }
4151 }
4152 }
4153 else if (goal_alternative_matches[i] >= 0
4154 && goal_alternative_win[goal_alternative_matches[i]]
4155 && modified[i] == RELOAD_READ
4156 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4157 && ! no_input_reloads && ! no_output_reloads
4158 && optimize)
4159 {
4160 /* Similarly, make an optional reload for a pair of matching
4161 objects that are in MEM or a pseudo that didn't get a hard reg. */
4162
4163 rtx operand = recog_data.operand[i];
4164
4165 while (GET_CODE (operand) == SUBREG)
4166 operand = SUBREG_REG (operand);
4167 if ((MEM_P (operand)
4168 || (REG_P (operand)
4169 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4170 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4171 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4172 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4173 recog_data.operand[i],
4174 recog_data.operand_loc[goal_alternative_matches[i]],
4175 recog_data.operand_loc[i],
4176 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4177 operand_mode[goal_alternative_matches[i]],
4178 operand_mode[i],
4179 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4180 }
4181
4182 /* Perform whatever substitutions on the operands we are supposed
4183 to make due to commutativity or replacement of registers
4184 with equivalent constants or memory slots. */
4185
4186 for (i = 0; i < noperands; i++)
4187 {
4188 /* We only do this on the last pass through reload, because it is
4189 possible for some data (like reg_equiv_address) to be changed during
4190 later passes. Moreover, we lose the opportunity to get a useful
4191 reload_{in,out}_reg when we do these replacements. */
4192
4193 if (replace)
4194 {
4195 rtx substitution = substed_operand[i];
4196
4197 *recog_data.operand_loc[i] = substitution;
4198
4199 /* If we're replacing an operand with a LABEL_REF, we need to
4200 make sure that there's a REG_LABEL_OPERAND note attached to
4201 this instruction. */
4202 if (GET_CODE (substitution) == LABEL_REF
4203 && !find_reg_note (insn, REG_LABEL_OPERAND,
4204 LABEL_REF_LABEL (substitution))
4205 /* For a JUMP_P, if it was a branch target it must have
4206 already been recorded as such. */
4207 && (!JUMP_P (insn)
4208 || !label_is_jump_target_p (LABEL_REF_LABEL (substitution),
4209 insn)))
4210 {
4211 add_reg_note (insn, REG_LABEL_OPERAND,
4212 LABEL_REF_LABEL (substitution));
4213 if (LABEL_P (LABEL_REF_LABEL (substitution)))
4214 ++LABEL_NUSES (LABEL_REF_LABEL (substitution));
4215 }
4216
4217 }
4218 else
4219 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4220 }
4221
4222 /* If this insn pattern contains any MATCH_DUP's, make sure that
4223 they will be substituted if the operands they match are substituted.
4224 Also do now any substitutions we already did on the operands.
4225
4226 Don't do this if we aren't making replacements because we might be
4227 propagating things allocated by frame pointer elimination into places
4228 it doesn't expect. */
4229
4230 if (insn_code_number >= 0 && replace)
4231 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4232 {
4233 int opno = recog_data.dup_num[i];
4234 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4235 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4236 }
4237
4238 #if 0
4239 /* This loses because reloading of prior insns can invalidate the equivalence
4240 (or at least find_equiv_reg isn't smart enough to find it any more),
4241 causing this insn to need more reload regs than it needed before.
4242 It may be too late to make the reload regs available.
4243 Now this optimization is done safely in choose_reload_regs. */
4244
4245 /* For each reload of a reg into some other class of reg,
4246 search for an existing equivalent reg (same value now) in the right class.
4247 We can use it as long as we don't need to change its contents. */
4248 for (i = 0; i < n_reloads; i++)
4249 if (rld[i].reg_rtx == 0
4250 && rld[i].in != 0
4251 && REG_P (rld[i].in)
4252 && rld[i].out == 0)
4253 {
4254 rld[i].reg_rtx
4255 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4256 static_reload_reg_p, 0, rld[i].inmode);
4257 /* Prevent generation of insn to load the value
4258 because the one we found already has the value. */
4259 if (rld[i].reg_rtx)
4260 rld[i].in = rld[i].reg_rtx;
4261 }
4262 #endif
4263
4264 /* If we detected error and replaced asm instruction by USE, forget about the
4265 reloads. */
4266 if (GET_CODE (PATTERN (insn)) == USE
4267 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4268 n_reloads = 0;
4269
4270 /* Perhaps an output reload can be combined with another
4271 to reduce needs by one. */
4272 if (!goal_earlyclobber)
4273 combine_reloads ();
4274
4275 /* If we have a pair of reloads for parts of an address, they are reloading
4276 the same object, the operands themselves were not reloaded, and they
4277 are for two operands that are supposed to match, merge the reloads and
4278 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4279
4280 for (i = 0; i < n_reloads; i++)
4281 {
4282 int k;
4283
4284 for (j = i + 1; j < n_reloads; j++)
4285 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4286 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4287 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4288 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4289 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4290 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4291 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4292 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4293 && rtx_equal_p (rld[i].in, rld[j].in)
4294 && (operand_reloadnum[rld[i].opnum] < 0
4295 || rld[operand_reloadnum[rld[i].opnum]].optional)
4296 && (operand_reloadnum[rld[j].opnum] < 0
4297 || rld[operand_reloadnum[rld[j].opnum]].optional)
4298 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4299 || (goal_alternative_matches[rld[j].opnum]
4300 == rld[i].opnum)))
4301 {
4302 for (k = 0; k < n_replacements; k++)
4303 if (replacements[k].what == j)
4304 replacements[k].what = i;
4305
4306 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4307 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4308 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4309 else
4310 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4311 rld[j].in = 0;
4312 }
4313 }
4314
4315 /* Scan all the reloads and update their type.
4316 If a reload is for the address of an operand and we didn't reload
4317 that operand, change the type. Similarly, change the operand number
4318 of a reload when two operands match. If a reload is optional, treat it
4319 as though the operand isn't reloaded.
4320
4321 ??? This latter case is somewhat odd because if we do the optional
4322 reload, it means the object is hanging around. Thus we need only
4323 do the address reload if the optional reload was NOT done.
4324
4325 Change secondary reloads to be the address type of their operand, not
4326 the normal type.
4327
4328 If an operand's reload is now RELOAD_OTHER, change any
4329 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4330 RELOAD_FOR_OTHER_ADDRESS. */
4331
4332 for (i = 0; i < n_reloads; i++)
4333 {
4334 if (rld[i].secondary_p
4335 && rld[i].when_needed == operand_type[rld[i].opnum])
4336 rld[i].when_needed = address_type[rld[i].opnum];
4337
4338 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4339 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4340 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4341 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4342 && (operand_reloadnum[rld[i].opnum] < 0
4343 || rld[operand_reloadnum[rld[i].opnum]].optional))
4344 {
4345 /* If we have a secondary reload to go along with this reload,
4346 change its type to RELOAD_FOR_OPADDR_ADDR. */
4347
4348 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4349 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4350 && rld[i].secondary_in_reload != -1)
4351 {
4352 int secondary_in_reload = rld[i].secondary_in_reload;
4353
4354 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4355
4356 /* If there's a tertiary reload we have to change it also. */
4357 if (secondary_in_reload > 0
4358 && rld[secondary_in_reload].secondary_in_reload != -1)
4359 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4360 = RELOAD_FOR_OPADDR_ADDR;
4361 }
4362
4363 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4364 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4365 && rld[i].secondary_out_reload != -1)
4366 {
4367 int secondary_out_reload = rld[i].secondary_out_reload;
4368
4369 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4370
4371 /* If there's a tertiary reload we have to change it also. */
4372 if (secondary_out_reload
4373 && rld[secondary_out_reload].secondary_out_reload != -1)
4374 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4375 = RELOAD_FOR_OPADDR_ADDR;
4376 }
4377
4378 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4379 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4380 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4381 else
4382 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4383 }
4384
4385 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4386 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4387 && operand_reloadnum[rld[i].opnum] >= 0
4388 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4389 == RELOAD_OTHER))
4390 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4391
4392 if (goal_alternative_matches[rld[i].opnum] >= 0)
4393 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4394 }
4395
4396 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4397 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4398 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4399
4400 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4401 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4402 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4403 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4404 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4405 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4406 This is complicated by the fact that a single operand can have more
4407 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4408 choose_reload_regs without affecting code quality, and cases that
4409 actually fail are extremely rare, so it turns out to be better to fix
4410 the problem here by not generating cases that choose_reload_regs will
4411 fail for. */
4412 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4413 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4414 a single operand.
4415 We can reduce the register pressure by exploiting that a
4416 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4417 does not conflict with any of them, if it is only used for the first of
4418 the RELOAD_FOR_X_ADDRESS reloads. */
4419 {
4420 int first_op_addr_num = -2;
4421 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4422 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4423 int need_change = 0;
4424 /* We use last_op_addr_reload and the contents of the above arrays
4425 first as flags - -2 means no instance encountered, -1 means exactly
4426 one instance encountered.
4427 If more than one instance has been encountered, we store the reload
4428 number of the first reload of the kind in question; reload numbers
4429 are known to be non-negative. */
4430 for (i = 0; i < noperands; i++)
4431 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4432 for (i = n_reloads - 1; i >= 0; i--)
4433 {
4434 switch (rld[i].when_needed)
4435 {
4436 case RELOAD_FOR_OPERAND_ADDRESS:
4437 if (++first_op_addr_num >= 0)
4438 {
4439 first_op_addr_num = i;
4440 need_change = 1;
4441 }
4442 break;
4443 case RELOAD_FOR_INPUT_ADDRESS:
4444 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4445 {
4446 first_inpaddr_num[rld[i].opnum] = i;
4447 need_change = 1;
4448 }
4449 break;
4450 case RELOAD_FOR_OUTPUT_ADDRESS:
4451 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4452 {
4453 first_outpaddr_num[rld[i].opnum] = i;
4454 need_change = 1;
4455 }
4456 break;
4457 default:
4458 break;
4459 }
4460 }
4461
4462 if (need_change)
4463 {
4464 for (i = 0; i < n_reloads; i++)
4465 {
4466 int first_num;
4467 enum reload_type type;
4468
4469 switch (rld[i].when_needed)
4470 {
4471 case RELOAD_FOR_OPADDR_ADDR:
4472 first_num = first_op_addr_num;
4473 type = RELOAD_FOR_OPERAND_ADDRESS;
4474 break;
4475 case RELOAD_FOR_INPADDR_ADDRESS:
4476 first_num = first_inpaddr_num[rld[i].opnum];
4477 type = RELOAD_FOR_INPUT_ADDRESS;
4478 break;
4479 case RELOAD_FOR_OUTADDR_ADDRESS:
4480 first_num = first_outpaddr_num[rld[i].opnum];
4481 type = RELOAD_FOR_OUTPUT_ADDRESS;
4482 break;
4483 default:
4484 continue;
4485 }
4486 if (first_num < 0)
4487 continue;
4488 else if (i > first_num)
4489 rld[i].when_needed = type;
4490 else
4491 {
4492 /* Check if the only TYPE reload that uses reload I is
4493 reload FIRST_NUM. */
4494 for (j = n_reloads - 1; j > first_num; j--)
4495 {
4496 if (rld[j].when_needed == type
4497 && (rld[i].secondary_p
4498 ? rld[j].secondary_in_reload == i
4499 : reg_mentioned_p (rld[i].in, rld[j].in)))
4500 {
4501 rld[i].when_needed = type;
4502 break;
4503 }
4504 }
4505 }
4506 }
4507 }
4508 }
4509
4510 /* See if we have any reloads that are now allowed to be merged
4511 because we've changed when the reload is needed to
4512 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4513 check for the most common cases. */
4514
4515 for (i = 0; i < n_reloads; i++)
4516 if (rld[i].in != 0 && rld[i].out == 0
4517 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4518 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4519 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4520 for (j = 0; j < n_reloads; j++)
4521 if (i != j && rld[j].in != 0 && rld[j].out == 0
4522 && rld[j].when_needed == rld[i].when_needed
4523 && MATCHES (rld[i].in, rld[j].in)
4524 && rld[i].rclass == rld[j].rclass
4525 && !rld[i].nocombine && !rld[j].nocombine
4526 && rld[i].reg_rtx == rld[j].reg_rtx)
4527 {
4528 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4529 transfer_replacements (i, j);
4530 rld[j].in = 0;
4531 }
4532
4533 /* If we made any reloads for addresses, see if they violate a
4534 "no input reloads" requirement for this insn. But loads that we
4535 do after the insn (such as for output addresses) are fine. */
4536 if (HAVE_cc0 && no_input_reloads)
4537 for (i = 0; i < n_reloads; i++)
4538 gcc_assert (rld[i].in == 0
4539 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4540 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4541
4542 /* Compute reload_mode and reload_nregs. */
4543 for (i = 0; i < n_reloads; i++)
4544 {
4545 rld[i].mode
4546 = (rld[i].inmode == VOIDmode
4547 || (GET_MODE_SIZE (rld[i].outmode)
4548 > GET_MODE_SIZE (rld[i].inmode)))
4549 ? rld[i].outmode : rld[i].inmode;
4550
4551 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4552 }
4553
4554 /* Special case a simple move with an input reload and a
4555 destination of a hard reg, if the hard reg is ok, use it. */
4556 for (i = 0; i < n_reloads; i++)
4557 if (rld[i].when_needed == RELOAD_FOR_INPUT
4558 && GET_CODE (PATTERN (insn)) == SET
4559 && REG_P (SET_DEST (PATTERN (insn)))
4560 && (SET_SRC (PATTERN (insn)) == rld[i].in
4561 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4562 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4563 {
4564 rtx dest = SET_DEST (PATTERN (insn));
4565 unsigned int regno = REGNO (dest);
4566
4567 if (regno < FIRST_PSEUDO_REGISTER
4568 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4569 && HARD_REGNO_MODE_OK (regno, rld[i].mode))
4570 {
4571 int nr = hard_regno_nregs[regno][rld[i].mode];
4572 int ok = 1, nri;
4573
4574 for (nri = 1; nri < nr; nri ++)
4575 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4576 {
4577 ok = 0;
4578 break;
4579 }
4580
4581 if (ok)
4582 rld[i].reg_rtx = dest;
4583 }
4584 }
4585
4586 return retval;
4587 }
4588
4589 /* Return true if alternative number ALTNUM in constraint-string
4590 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4591 MEM gives the reference if it didn't need any reloads, otherwise it
4592 is null. */
4593
4594 static bool
4595 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4596 const char *constraint, int altnum)
4597 {
4598 int c;
4599
4600 /* Skip alternatives before the one requested. */
4601 while (altnum > 0)
4602 {
4603 while (*constraint++ != ',')
4604 ;
4605 altnum--;
4606 }
4607 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4608 If one of them is present, this alternative accepts the result of
4609 passing a constant-pool reference through find_reloads_toplev.
4610
4611 The same is true of extra memory constraints if the address
4612 was reloaded into a register. However, the target may elect
4613 to disallow the original constant address, forcing it to be
4614 reloaded into a register instead. */
4615 for (; (c = *constraint) && c != ',' && c != '#';
4616 constraint += CONSTRAINT_LEN (c, constraint))
4617 {
4618 enum constraint_num cn = lookup_constraint (constraint);
4619 if (insn_extra_memory_constraint (cn)
4620 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4621 return true;
4622 }
4623 return false;
4624 }
4625 \f
4626 /* Scan X for memory references and scan the addresses for reloading.
4627 Also checks for references to "constant" regs that we want to eliminate
4628 and replaces them with the values they stand for.
4629 We may alter X destructively if it contains a reference to such.
4630 If X is just a constant reg, we return the equivalent value
4631 instead of X.
4632
4633 IND_LEVELS says how many levels of indirect addressing this machine
4634 supports.
4635
4636 OPNUM and TYPE identify the purpose of the reload.
4637
4638 IS_SET_DEST is true if X is the destination of a SET, which is not
4639 appropriate to be replaced by a constant.
4640
4641 INSN, if nonzero, is the insn in which we do the reload. It is used
4642 to determine if we may generate output reloads, and where to put USEs
4643 for pseudos that we have to replace with stack slots.
4644
4645 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4646 result of find_reloads_address. */
4647
4648 static rtx
4649 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4650 int ind_levels, int is_set_dest, rtx_insn *insn,
4651 int *address_reloaded)
4652 {
4653 RTX_CODE code = GET_CODE (x);
4654
4655 const char *fmt = GET_RTX_FORMAT (code);
4656 int i;
4657 int copied;
4658
4659 if (code == REG)
4660 {
4661 /* This code is duplicated for speed in find_reloads. */
4662 int regno = REGNO (x);
4663 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4664 x = reg_equiv_constant (regno);
4665 #if 0
4666 /* This creates (subreg (mem...)) which would cause an unnecessary
4667 reload of the mem. */
4668 else if (reg_equiv_mem (regno) != 0)
4669 x = reg_equiv_mem (regno);
4670 #endif
4671 else if (reg_equiv_memory_loc (regno)
4672 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4673 {
4674 rtx mem = make_memloc (x, regno);
4675 if (reg_equiv_address (regno)
4676 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4677 {
4678 /* If this is not a toplevel operand, find_reloads doesn't see
4679 this substitution. We have to emit a USE of the pseudo so
4680 that delete_output_reload can see it. */
4681 if (replace_reloads && recog_data.operand[opnum] != x)
4682 /* We mark the USE with QImode so that we recognize it
4683 as one that can be safely deleted at the end of
4684 reload. */
4685 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4686 QImode);
4687 x = mem;
4688 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4689 opnum, type, ind_levels, insn);
4690 if (!rtx_equal_p (x, mem))
4691 push_reg_equiv_alt_mem (regno, x);
4692 if (address_reloaded)
4693 *address_reloaded = i;
4694 }
4695 }
4696 return x;
4697 }
4698 if (code == MEM)
4699 {
4700 rtx tem = x;
4701
4702 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4703 opnum, type, ind_levels, insn);
4704 if (address_reloaded)
4705 *address_reloaded = i;
4706
4707 return tem;
4708 }
4709
4710 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4711 {
4712 /* Check for SUBREG containing a REG that's equivalent to a
4713 constant. If the constant has a known value, truncate it
4714 right now. Similarly if we are extracting a single-word of a
4715 multi-word constant. If the constant is symbolic, allow it
4716 to be substituted normally. push_reload will strip the
4717 subreg later. The constant must not be VOIDmode, because we
4718 will lose the mode of the register (this should never happen
4719 because one of the cases above should handle it). */
4720
4721 int regno = REGNO (SUBREG_REG (x));
4722 rtx tem;
4723
4724 if (regno >= FIRST_PSEUDO_REGISTER
4725 && reg_renumber[regno] < 0
4726 && reg_equiv_constant (regno) != 0)
4727 {
4728 tem =
4729 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4730 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4731 gcc_assert (tem);
4732 if (CONSTANT_P (tem)
4733 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4734 {
4735 tem = force_const_mem (GET_MODE (x), tem);
4736 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4737 &XEXP (tem, 0), opnum, type,
4738 ind_levels, insn);
4739 if (address_reloaded)
4740 *address_reloaded = i;
4741 }
4742 return tem;
4743 }
4744
4745 /* If the subreg contains a reg that will be converted to a mem,
4746 attempt to convert the whole subreg to a (narrower or wider)
4747 memory reference instead. If this succeeds, we're done --
4748 otherwise fall through to check whether the inner reg still
4749 needs address reloads anyway. */
4750
4751 if (regno >= FIRST_PSEUDO_REGISTER
4752 && reg_equiv_memory_loc (regno) != 0)
4753 {
4754 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4755 insn, address_reloaded);
4756 if (tem)
4757 return tem;
4758 }
4759 }
4760
4761 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4762 {
4763 if (fmt[i] == 'e')
4764 {
4765 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4766 ind_levels, is_set_dest, insn,
4767 address_reloaded);
4768 /* If we have replaced a reg with it's equivalent memory loc -
4769 that can still be handled here e.g. if it's in a paradoxical
4770 subreg - we must make the change in a copy, rather than using
4771 a destructive change. This way, find_reloads can still elect
4772 not to do the change. */
4773 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4774 {
4775 x = shallow_copy_rtx (x);
4776 copied = 1;
4777 }
4778 XEXP (x, i) = new_part;
4779 }
4780 }
4781 return x;
4782 }
4783
4784 /* Return a mem ref for the memory equivalent of reg REGNO.
4785 This mem ref is not shared with anything. */
4786
4787 static rtx
4788 make_memloc (rtx ad, int regno)
4789 {
4790 /* We must rerun eliminate_regs, in case the elimination
4791 offsets have changed. */
4792 rtx tem
4793 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4794 0);
4795
4796 /* If TEM might contain a pseudo, we must copy it to avoid
4797 modifying it when we do the substitution for the reload. */
4798 if (rtx_varies_p (tem, 0))
4799 tem = copy_rtx (tem);
4800
4801 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4802 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4803
4804 /* Copy the result if it's still the same as the equivalence, to avoid
4805 modifying it when we do the substitution for the reload. */
4806 if (tem == reg_equiv_memory_loc (regno))
4807 tem = copy_rtx (tem);
4808 return tem;
4809 }
4810
4811 /* Returns true if AD could be turned into a valid memory reference
4812 to mode MODE in address space AS by reloading the part pointed to
4813 by PART into a register. */
4814
4815 static int
4816 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4817 addr_space_t as, rtx *part)
4818 {
4819 int retv;
4820 rtx tem = *part;
4821 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4822
4823 *part = reg;
4824 retv = memory_address_addr_space_p (mode, ad, as);
4825 *part = tem;
4826
4827 return retv;
4828 }
4829
4830 /* Record all reloads needed for handling memory address AD
4831 which appears in *LOC in a memory reference to mode MODE
4832 which itself is found in location *MEMREFLOC.
4833 Note that we take shortcuts assuming that no multi-reg machine mode
4834 occurs as part of an address.
4835
4836 OPNUM and TYPE specify the purpose of this reload.
4837
4838 IND_LEVELS says how many levels of indirect addressing this machine
4839 supports.
4840
4841 INSN, if nonzero, is the insn in which we do the reload. It is used
4842 to determine if we may generate output reloads, and where to put USEs
4843 for pseudos that we have to replace with stack slots.
4844
4845 Value is one if this address is reloaded or replaced as a whole; it is
4846 zero if the top level of this address was not reloaded or replaced, and
4847 it is -1 if it may or may not have been reloaded or replaced.
4848
4849 Note that there is no verification that the address will be valid after
4850 this routine does its work. Instead, we rely on the fact that the address
4851 was valid when reload started. So we need only undo things that reload
4852 could have broken. These are wrong register types, pseudos not allocated
4853 to a hard register, and frame pointer elimination. */
4854
4855 static int
4856 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4857 rtx *loc, int opnum, enum reload_type type,
4858 int ind_levels, rtx_insn *insn)
4859 {
4860 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4861 : ADDR_SPACE_GENERIC;
4862 int regno;
4863 int removed_and = 0;
4864 int op_index;
4865 rtx tem;
4866
4867 /* If the address is a register, see if it is a legitimate address and
4868 reload if not. We first handle the cases where we need not reload
4869 or where we must reload in a non-standard way. */
4870
4871 if (REG_P (ad))
4872 {
4873 regno = REGNO (ad);
4874
4875 if (reg_equiv_constant (regno) != 0)
4876 {
4877 find_reloads_address_part (reg_equiv_constant (regno), loc,
4878 base_reg_class (mode, as, MEM, SCRATCH),
4879 GET_MODE (ad), opnum, type, ind_levels);
4880 return 1;
4881 }
4882
4883 tem = reg_equiv_memory_loc (regno);
4884 if (tem != 0)
4885 {
4886 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4887 {
4888 tem = make_memloc (ad, regno);
4889 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4890 XEXP (tem, 0),
4891 MEM_ADDR_SPACE (tem)))
4892 {
4893 rtx orig = tem;
4894
4895 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4896 &XEXP (tem, 0), opnum,
4897 ADDR_TYPE (type), ind_levels, insn);
4898 if (!rtx_equal_p (tem, orig))
4899 push_reg_equiv_alt_mem (regno, tem);
4900 }
4901 /* We can avoid a reload if the register's equivalent memory
4902 expression is valid as an indirect memory address.
4903 But not all addresses are valid in a mem used as an indirect
4904 address: only reg or reg+constant. */
4905
4906 if (ind_levels > 0
4907 && strict_memory_address_addr_space_p (mode, tem, as)
4908 && (REG_P (XEXP (tem, 0))
4909 || (GET_CODE (XEXP (tem, 0)) == PLUS
4910 && REG_P (XEXP (XEXP (tem, 0), 0))
4911 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4912 {
4913 /* TEM is not the same as what we'll be replacing the
4914 pseudo with after reload, put a USE in front of INSN
4915 in the final reload pass. */
4916 if (replace_reloads
4917 && num_not_at_initial_offset
4918 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4919 {
4920 *loc = tem;
4921 /* We mark the USE with QImode so that we
4922 recognize it as one that can be safely
4923 deleted at the end of reload. */
4924 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4925 insn), QImode);
4926
4927 /* This doesn't really count as replacing the address
4928 as a whole, since it is still a memory access. */
4929 }
4930 return 0;
4931 }
4932 ad = tem;
4933 }
4934 }
4935
4936 /* The only remaining case where we can avoid a reload is if this is a
4937 hard register that is valid as a base register and which is not the
4938 subject of a CLOBBER in this insn. */
4939
4940 else if (regno < FIRST_PSEUDO_REGISTER
4941 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4942 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4943 return 0;
4944
4945 /* If we do not have one of the cases above, we must do the reload. */
4946 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4947 base_reg_class (mode, as, MEM, SCRATCH),
4948 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4949 return 1;
4950 }
4951
4952 if (strict_memory_address_addr_space_p (mode, ad, as))
4953 {
4954 /* The address appears valid, so reloads are not needed.
4955 But the address may contain an eliminable register.
4956 This can happen because a machine with indirect addressing
4957 may consider a pseudo register by itself a valid address even when
4958 it has failed to get a hard reg.
4959 So do a tree-walk to find and eliminate all such regs. */
4960
4961 /* But first quickly dispose of a common case. */
4962 if (GET_CODE (ad) == PLUS
4963 && CONST_INT_P (XEXP (ad, 1))
4964 && REG_P (XEXP (ad, 0))
4965 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4966 return 0;
4967
4968 subst_reg_equivs_changed = 0;
4969 *loc = subst_reg_equivs (ad, insn);
4970
4971 if (! subst_reg_equivs_changed)
4972 return 0;
4973
4974 /* Check result for validity after substitution. */
4975 if (strict_memory_address_addr_space_p (mode, ad, as))
4976 return 0;
4977 }
4978
4979 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4980 do
4981 {
4982 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4983 {
4984 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4985 ind_levels, win);
4986 }
4987 break;
4988 win:
4989 *memrefloc = copy_rtx (*memrefloc);
4990 XEXP (*memrefloc, 0) = ad;
4991 move_replacements (&ad, &XEXP (*memrefloc, 0));
4992 return -1;
4993 }
4994 while (0);
4995 #endif
4996
4997 /* The address is not valid. We have to figure out why. First see if
4998 we have an outer AND and remove it if so. Then analyze what's inside. */
4999
5000 if (GET_CODE (ad) == AND)
5001 {
5002 removed_and = 1;
5003 loc = &XEXP (ad, 0);
5004 ad = *loc;
5005 }
5006
5007 /* One possibility for why the address is invalid is that it is itself
5008 a MEM. This can happen when the frame pointer is being eliminated, a
5009 pseudo is not allocated to a hard register, and the offset between the
5010 frame and stack pointers is not its initial value. In that case the
5011 pseudo will have been replaced by a MEM referring to the
5012 stack pointer. */
5013 if (MEM_P (ad))
5014 {
5015 /* First ensure that the address in this MEM is valid. Then, unless
5016 indirect addresses are valid, reload the MEM into a register. */
5017 tem = ad;
5018 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5019 opnum, ADDR_TYPE (type),
5020 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5021
5022 /* If tem was changed, then we must create a new memory reference to
5023 hold it and store it back into memrefloc. */
5024 if (tem != ad && memrefloc)
5025 {
5026 *memrefloc = copy_rtx (*memrefloc);
5027 copy_replacements (tem, XEXP (*memrefloc, 0));
5028 loc = &XEXP (*memrefloc, 0);
5029 if (removed_and)
5030 loc = &XEXP (*loc, 0);
5031 }
5032
5033 /* Check similar cases as for indirect addresses as above except
5034 that we can allow pseudos and a MEM since they should have been
5035 taken care of above. */
5036
5037 if (ind_levels == 0
5038 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5039 || MEM_P (XEXP (tem, 0))
5040 || ! (REG_P (XEXP (tem, 0))
5041 || (GET_CODE (XEXP (tem, 0)) == PLUS
5042 && REG_P (XEXP (XEXP (tem, 0), 0))
5043 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5044 {
5045 /* Must use TEM here, not AD, since it is the one that will
5046 have any subexpressions reloaded, if needed. */
5047 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5048 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5049 VOIDmode, 0,
5050 0, opnum, type);
5051 return ! removed_and;
5052 }
5053 else
5054 return 0;
5055 }
5056
5057 /* If we have address of a stack slot but it's not valid because the
5058 displacement is too large, compute the sum in a register.
5059 Handle all base registers here, not just fp/ap/sp, because on some
5060 targets (namely SH) we can also get too large displacements from
5061 big-endian corrections. */
5062 else if (GET_CODE (ad) == PLUS
5063 && REG_P (XEXP (ad, 0))
5064 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5065 && CONST_INT_P (XEXP (ad, 1))
5066 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5067 CONST_INT)
5068 /* Similarly, if we were to reload the base register and the
5069 mem+offset address is still invalid, then we want to reload
5070 the whole address, not just the base register. */
5071 || ! maybe_memory_address_addr_space_p
5072 (mode, ad, as, &(XEXP (ad, 0)))))
5073
5074 {
5075 /* Unshare the MEM rtx so we can safely alter it. */
5076 if (memrefloc)
5077 {
5078 *memrefloc = copy_rtx (*memrefloc);
5079 loc = &XEXP (*memrefloc, 0);
5080 if (removed_and)
5081 loc = &XEXP (*loc, 0);
5082 }
5083
5084 if (double_reg_address_ok
5085 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5086 PLUS, CONST_INT))
5087 {
5088 /* Unshare the sum as well. */
5089 *loc = ad = copy_rtx (ad);
5090
5091 /* Reload the displacement into an index reg.
5092 We assume the frame pointer or arg pointer is a base reg. */
5093 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5094 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5095 type, ind_levels);
5096 return 0;
5097 }
5098 else
5099 {
5100 /* If the sum of two regs is not necessarily valid,
5101 reload the sum into a base reg.
5102 That will at least work. */
5103 find_reloads_address_part (ad, loc,
5104 base_reg_class (mode, as, MEM, SCRATCH),
5105 GET_MODE (ad), opnum, type, ind_levels);
5106 }
5107 return ! removed_and;
5108 }
5109
5110 /* If we have an indexed stack slot, there are three possible reasons why
5111 it might be invalid: The index might need to be reloaded, the address
5112 might have been made by frame pointer elimination and hence have a
5113 constant out of range, or both reasons might apply.
5114
5115 We can easily check for an index needing reload, but even if that is the
5116 case, we might also have an invalid constant. To avoid making the
5117 conservative assumption and requiring two reloads, we see if this address
5118 is valid when not interpreted strictly. If it is, the only problem is
5119 that the index needs a reload and find_reloads_address_1 will take care
5120 of it.
5121
5122 Handle all base registers here, not just fp/ap/sp, because on some
5123 targets (namely SPARC) we can also get invalid addresses from preventive
5124 subreg big-endian corrections made by find_reloads_toplev. We
5125 can also get expressions involving LO_SUM (rather than PLUS) from
5126 find_reloads_subreg_address.
5127
5128 If we decide to do something, it must be that `double_reg_address_ok'
5129 is true. We generate a reload of the base register + constant and
5130 rework the sum so that the reload register will be added to the index.
5131 This is safe because we know the address isn't shared.
5132
5133 We check for the base register as both the first and second operand of
5134 the innermost PLUS and/or LO_SUM. */
5135
5136 for (op_index = 0; op_index < 2; ++op_index)
5137 {
5138 rtx operand, addend;
5139 enum rtx_code inner_code;
5140
5141 if (GET_CODE (ad) != PLUS)
5142 continue;
5143
5144 inner_code = GET_CODE (XEXP (ad, 0));
5145 if (!(GET_CODE (ad) == PLUS
5146 && CONST_INT_P (XEXP (ad, 1))
5147 && (inner_code == PLUS || inner_code == LO_SUM)))
5148 continue;
5149
5150 operand = XEXP (XEXP (ad, 0), op_index);
5151 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5152 continue;
5153
5154 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5155
5156 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5157 GET_CODE (addend))
5158 || operand == frame_pointer_rtx
5159 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5160 && operand == hard_frame_pointer_rtx)
5161 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5162 && operand == arg_pointer_rtx)
5163 || operand == stack_pointer_rtx)
5164 && ! maybe_memory_address_addr_space_p
5165 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5166 {
5167 rtx offset_reg;
5168 enum reg_class cls;
5169
5170 offset_reg = plus_constant (GET_MODE (ad), operand,
5171 INTVAL (XEXP (ad, 1)));
5172
5173 /* Form the adjusted address. */
5174 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5175 ad = gen_rtx_PLUS (GET_MODE (ad),
5176 op_index == 0 ? offset_reg : addend,
5177 op_index == 0 ? addend : offset_reg);
5178 else
5179 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5180 op_index == 0 ? offset_reg : addend,
5181 op_index == 0 ? addend : offset_reg);
5182 *loc = ad;
5183
5184 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5185 find_reloads_address_part (XEXP (ad, op_index),
5186 &XEXP (ad, op_index), cls,
5187 GET_MODE (ad), opnum, type, ind_levels);
5188 find_reloads_address_1 (mode, as,
5189 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5190 GET_CODE (XEXP (ad, op_index)),
5191 &XEXP (ad, 1 - op_index), opnum,
5192 type, 0, insn);
5193
5194 return 0;
5195 }
5196 }
5197
5198 /* See if address becomes valid when an eliminable register
5199 in a sum is replaced. */
5200
5201 tem = ad;
5202 if (GET_CODE (ad) == PLUS)
5203 tem = subst_indexed_address (ad);
5204 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5205 {
5206 /* Ok, we win that way. Replace any additional eliminable
5207 registers. */
5208
5209 subst_reg_equivs_changed = 0;
5210 tem = subst_reg_equivs (tem, insn);
5211
5212 /* Make sure that didn't make the address invalid again. */
5213
5214 if (! subst_reg_equivs_changed
5215 || strict_memory_address_addr_space_p (mode, tem, as))
5216 {
5217 *loc = tem;
5218 return 0;
5219 }
5220 }
5221
5222 /* If constants aren't valid addresses, reload the constant address
5223 into a register. */
5224 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5225 {
5226 machine_mode address_mode = GET_MODE (ad);
5227 if (address_mode == VOIDmode)
5228 address_mode = targetm.addr_space.address_mode (as);
5229
5230 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5231 Unshare it so we can safely alter it. */
5232 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5233 && CONSTANT_POOL_ADDRESS_P (ad))
5234 {
5235 *memrefloc = copy_rtx (*memrefloc);
5236 loc = &XEXP (*memrefloc, 0);
5237 if (removed_and)
5238 loc = &XEXP (*loc, 0);
5239 }
5240
5241 find_reloads_address_part (ad, loc,
5242 base_reg_class (mode, as, MEM, SCRATCH),
5243 address_mode, opnum, type, ind_levels);
5244 return ! removed_and;
5245 }
5246
5247 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5248 opnum, type, ind_levels, insn);
5249 }
5250 \f
5251 /* Find all pseudo regs appearing in AD
5252 that are eliminable in favor of equivalent values
5253 and do not have hard regs; replace them by their equivalents.
5254 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5255 front of it for pseudos that we have to replace with stack slots. */
5256
5257 static rtx
5258 subst_reg_equivs (rtx ad, rtx_insn *insn)
5259 {
5260 RTX_CODE code = GET_CODE (ad);
5261 int i;
5262 const char *fmt;
5263
5264 switch (code)
5265 {
5266 case HIGH:
5267 case CONST:
5268 CASE_CONST_ANY:
5269 case SYMBOL_REF:
5270 case LABEL_REF:
5271 case PC:
5272 case CC0:
5273 return ad;
5274
5275 case REG:
5276 {
5277 int regno = REGNO (ad);
5278
5279 if (reg_equiv_constant (regno) != 0)
5280 {
5281 subst_reg_equivs_changed = 1;
5282 return reg_equiv_constant (regno);
5283 }
5284 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5285 {
5286 rtx mem = make_memloc (ad, regno);
5287 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5288 {
5289 subst_reg_equivs_changed = 1;
5290 /* We mark the USE with QImode so that we recognize it
5291 as one that can be safely deleted at the end of
5292 reload. */
5293 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5294 QImode);
5295 return mem;
5296 }
5297 }
5298 }
5299 return ad;
5300
5301 case PLUS:
5302 /* Quickly dispose of a common case. */
5303 if (XEXP (ad, 0) == frame_pointer_rtx
5304 && CONST_INT_P (XEXP (ad, 1)))
5305 return ad;
5306 break;
5307
5308 default:
5309 break;
5310 }
5311
5312 fmt = GET_RTX_FORMAT (code);
5313 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5314 if (fmt[i] == 'e')
5315 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5316 return ad;
5317 }
5318 \f
5319 /* Compute the sum of X and Y, making canonicalizations assumed in an
5320 address, namely: sum constant integers, surround the sum of two
5321 constants with a CONST, put the constant as the second operand, and
5322 group the constant on the outermost sum.
5323
5324 This routine assumes both inputs are already in canonical form. */
5325
5326 rtx
5327 form_sum (machine_mode mode, rtx x, rtx y)
5328 {
5329 rtx tem;
5330
5331 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5332 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5333
5334 if (CONST_INT_P (x))
5335 return plus_constant (mode, y, INTVAL (x));
5336 else if (CONST_INT_P (y))
5337 return plus_constant (mode, x, INTVAL (y));
5338 else if (CONSTANT_P (x))
5339 tem = x, x = y, y = tem;
5340
5341 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5342 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5343
5344 /* Note that if the operands of Y are specified in the opposite
5345 order in the recursive calls below, infinite recursion will occur. */
5346 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5347 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5348
5349 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5350 constant will have been placed second. */
5351 if (CONSTANT_P (x) && CONSTANT_P (y))
5352 {
5353 if (GET_CODE (x) == CONST)
5354 x = XEXP (x, 0);
5355 if (GET_CODE (y) == CONST)
5356 y = XEXP (y, 0);
5357
5358 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5359 }
5360
5361 return gen_rtx_PLUS (mode, x, y);
5362 }
5363 \f
5364 /* If ADDR is a sum containing a pseudo register that should be
5365 replaced with a constant (from reg_equiv_constant),
5366 return the result of doing so, and also apply the associative
5367 law so that the result is more likely to be a valid address.
5368 (But it is not guaranteed to be one.)
5369
5370 Note that at most one register is replaced, even if more are
5371 replaceable. Also, we try to put the result into a canonical form
5372 so it is more likely to be a valid address.
5373
5374 In all other cases, return ADDR. */
5375
5376 static rtx
5377 subst_indexed_address (rtx addr)
5378 {
5379 rtx op0 = 0, op1 = 0, op2 = 0;
5380 rtx tem;
5381 int regno;
5382
5383 if (GET_CODE (addr) == PLUS)
5384 {
5385 /* Try to find a register to replace. */
5386 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5387 if (REG_P (op0)
5388 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5389 && reg_renumber[regno] < 0
5390 && reg_equiv_constant (regno) != 0)
5391 op0 = reg_equiv_constant (regno);
5392 else if (REG_P (op1)
5393 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5394 && reg_renumber[regno] < 0
5395 && reg_equiv_constant (regno) != 0)
5396 op1 = reg_equiv_constant (regno);
5397 else if (GET_CODE (op0) == PLUS
5398 && (tem = subst_indexed_address (op0)) != op0)
5399 op0 = tem;
5400 else if (GET_CODE (op1) == PLUS
5401 && (tem = subst_indexed_address (op1)) != op1)
5402 op1 = tem;
5403 else
5404 return addr;
5405
5406 /* Pick out up to three things to add. */
5407 if (GET_CODE (op1) == PLUS)
5408 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5409 else if (GET_CODE (op0) == PLUS)
5410 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5411
5412 /* Compute the sum. */
5413 if (op2 != 0)
5414 op1 = form_sum (GET_MODE (addr), op1, op2);
5415 if (op1 != 0)
5416 op0 = form_sum (GET_MODE (addr), op0, op1);
5417
5418 return op0;
5419 }
5420 return addr;
5421 }
5422 \f
5423 /* Update the REG_INC notes for an insn. It updates all REG_INC
5424 notes for the instruction which refer to REGNO the to refer
5425 to the reload number.
5426
5427 INSN is the insn for which any REG_INC notes need updating.
5428
5429 REGNO is the register number which has been reloaded.
5430
5431 RELOADNUM is the reload number. */
5432
5433 static void
5434 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5435 int reloadnum ATTRIBUTE_UNUSED)
5436 {
5437 if (!AUTO_INC_DEC)
5438 return;
5439
5440 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5441 if (REG_NOTE_KIND (link) == REG_INC
5442 && (int) REGNO (XEXP (link, 0)) == regno)
5443 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5444 }
5445 \f
5446 /* Record the pseudo registers we must reload into hard registers in a
5447 subexpression of a would-be memory address, X referring to a value
5448 in mode MODE. (This function is not called if the address we find
5449 is strictly valid.)
5450
5451 CONTEXT = 1 means we are considering regs as index regs,
5452 = 0 means we are considering them as base regs.
5453 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5454 or an autoinc code.
5455 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5456 is the code of the index part of the address. Otherwise, pass SCRATCH
5457 for this argument.
5458 OPNUM and TYPE specify the purpose of any reloads made.
5459
5460 IND_LEVELS says how many levels of indirect addressing are
5461 supported at this point in the address.
5462
5463 INSN, if nonzero, is the insn in which we do the reload. It is used
5464 to determine if we may generate output reloads.
5465
5466 We return nonzero if X, as a whole, is reloaded or replaced. */
5467
5468 /* Note that we take shortcuts assuming that no multi-reg machine mode
5469 occurs as part of an address.
5470 Also, this is not fully machine-customizable; it works for machines
5471 such as VAXen and 68000's and 32000's, but other possible machines
5472 could have addressing modes that this does not handle right.
5473 If you add push_reload calls here, you need to make sure gen_reload
5474 handles those cases gracefully. */
5475
5476 static int
5477 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5478 rtx x, int context,
5479 enum rtx_code outer_code, enum rtx_code index_code,
5480 rtx *loc, int opnum, enum reload_type type,
5481 int ind_levels, rtx_insn *insn)
5482 {
5483 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5484 ((CONTEXT) == 0 \
5485 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5486 : REGNO_OK_FOR_INDEX_P (REGNO))
5487
5488 enum reg_class context_reg_class;
5489 RTX_CODE code = GET_CODE (x);
5490 bool reloaded_inner_of_autoinc = false;
5491
5492 if (context == 1)
5493 context_reg_class = INDEX_REG_CLASS;
5494 else
5495 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5496
5497 switch (code)
5498 {
5499 case PLUS:
5500 {
5501 rtx orig_op0 = XEXP (x, 0);
5502 rtx orig_op1 = XEXP (x, 1);
5503 RTX_CODE code0 = GET_CODE (orig_op0);
5504 RTX_CODE code1 = GET_CODE (orig_op1);
5505 rtx op0 = orig_op0;
5506 rtx op1 = orig_op1;
5507
5508 if (GET_CODE (op0) == SUBREG)
5509 {
5510 op0 = SUBREG_REG (op0);
5511 code0 = GET_CODE (op0);
5512 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5513 op0 = gen_rtx_REG (word_mode,
5514 (REGNO (op0) +
5515 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5516 GET_MODE (SUBREG_REG (orig_op0)),
5517 SUBREG_BYTE (orig_op0),
5518 GET_MODE (orig_op0))));
5519 }
5520
5521 if (GET_CODE (op1) == SUBREG)
5522 {
5523 op1 = SUBREG_REG (op1);
5524 code1 = GET_CODE (op1);
5525 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5526 /* ??? Why is this given op1's mode and above for
5527 ??? op0 SUBREGs we use word_mode? */
5528 op1 = gen_rtx_REG (GET_MODE (op1),
5529 (REGNO (op1) +
5530 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5531 GET_MODE (SUBREG_REG (orig_op1)),
5532 SUBREG_BYTE (orig_op1),
5533 GET_MODE (orig_op1))));
5534 }
5535 /* Plus in the index register may be created only as a result of
5536 register rematerialization for expression like &localvar*4. Reload it.
5537 It may be possible to combine the displacement on the outer level,
5538 but it is probably not worthwhile to do so. */
5539 if (context == 1)
5540 {
5541 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5542 opnum, ADDR_TYPE (type), ind_levels, insn);
5543 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5544 context_reg_class,
5545 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5546 return 1;
5547 }
5548
5549 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5550 || code0 == ZERO_EXTEND || code1 == MEM)
5551 {
5552 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5553 &XEXP (x, 0), opnum, type, ind_levels,
5554 insn);
5555 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5556 &XEXP (x, 1), opnum, type, ind_levels,
5557 insn);
5558 }
5559
5560 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5561 || code1 == ZERO_EXTEND || code0 == MEM)
5562 {
5563 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5564 &XEXP (x, 0), opnum, type, ind_levels,
5565 insn);
5566 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5567 &XEXP (x, 1), opnum, type, ind_levels,
5568 insn);
5569 }
5570
5571 else if (code0 == CONST_INT || code0 == CONST
5572 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5573 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5574 &XEXP (x, 1), opnum, type, ind_levels,
5575 insn);
5576
5577 else if (code1 == CONST_INT || code1 == CONST
5578 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5579 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5580 &XEXP (x, 0), opnum, type, ind_levels,
5581 insn);
5582
5583 else if (code0 == REG && code1 == REG)
5584 {
5585 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5586 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5587 return 0;
5588 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5589 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5590 return 0;
5591 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5592 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5593 &XEXP (x, 1), opnum, type, ind_levels,
5594 insn);
5595 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5596 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5597 &XEXP (x, 0), opnum, type, ind_levels,
5598 insn);
5599 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5600 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5601 &XEXP (x, 0), opnum, type, ind_levels,
5602 insn);
5603 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5604 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5605 &XEXP (x, 1), opnum, type, ind_levels,
5606 insn);
5607 else
5608 {
5609 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5610 &XEXP (x, 0), opnum, type, ind_levels,
5611 insn);
5612 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5613 &XEXP (x, 1), opnum, type, ind_levels,
5614 insn);
5615 }
5616 }
5617
5618 else if (code0 == REG)
5619 {
5620 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5621 &XEXP (x, 0), opnum, type, ind_levels,
5622 insn);
5623 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5624 &XEXP (x, 1), opnum, type, ind_levels,
5625 insn);
5626 }
5627
5628 else if (code1 == REG)
5629 {
5630 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5631 &XEXP (x, 1), opnum, type, ind_levels,
5632 insn);
5633 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5634 &XEXP (x, 0), opnum, type, ind_levels,
5635 insn);
5636 }
5637 }
5638
5639 return 0;
5640
5641 case POST_MODIFY:
5642 case PRE_MODIFY:
5643 {
5644 rtx op0 = XEXP (x, 0);
5645 rtx op1 = XEXP (x, 1);
5646 enum rtx_code index_code;
5647 int regno;
5648 int reloadnum;
5649
5650 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5651 return 0;
5652
5653 /* Currently, we only support {PRE,POST}_MODIFY constructs
5654 where a base register is {inc,dec}remented by the contents
5655 of another register or by a constant value. Thus, these
5656 operands must match. */
5657 gcc_assert (op0 == XEXP (op1, 0));
5658
5659 /* Require index register (or constant). Let's just handle the
5660 register case in the meantime... If the target allows
5661 auto-modify by a constant then we could try replacing a pseudo
5662 register with its equivalent constant where applicable.
5663
5664 We also handle the case where the register was eliminated
5665 resulting in a PLUS subexpression.
5666
5667 If we later decide to reload the whole PRE_MODIFY or
5668 POST_MODIFY, inc_for_reload might clobber the reload register
5669 before reading the index. The index register might therefore
5670 need to live longer than a TYPE reload normally would, so be
5671 conservative and class it as RELOAD_OTHER. */
5672 if ((REG_P (XEXP (op1, 1))
5673 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5674 || GET_CODE (XEXP (op1, 1)) == PLUS)
5675 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5676 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5677 ind_levels, insn);
5678
5679 gcc_assert (REG_P (XEXP (op1, 0)));
5680
5681 regno = REGNO (XEXP (op1, 0));
5682 index_code = GET_CODE (XEXP (op1, 1));
5683
5684 /* A register that is incremented cannot be constant! */
5685 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5686 || reg_equiv_constant (regno) == 0);
5687
5688 /* Handle a register that is equivalent to a memory location
5689 which cannot be addressed directly. */
5690 if (reg_equiv_memory_loc (regno) != 0
5691 && (reg_equiv_address (regno) != 0
5692 || num_not_at_initial_offset))
5693 {
5694 rtx tem = make_memloc (XEXP (x, 0), regno);
5695
5696 if (reg_equiv_address (regno)
5697 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5698 {
5699 rtx orig = tem;
5700
5701 /* First reload the memory location's address.
5702 We can't use ADDR_TYPE (type) here, because we need to
5703 write back the value after reading it, hence we actually
5704 need two registers. */
5705 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5706 &XEXP (tem, 0), opnum,
5707 RELOAD_OTHER,
5708 ind_levels, insn);
5709
5710 if (!rtx_equal_p (tem, orig))
5711 push_reg_equiv_alt_mem (regno, tem);
5712
5713 /* Then reload the memory location into a base
5714 register. */
5715 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5716 &XEXP (op1, 0),
5717 base_reg_class (mode, as,
5718 code, index_code),
5719 GET_MODE (x), GET_MODE (x), 0,
5720 0, opnum, RELOAD_OTHER);
5721
5722 update_auto_inc_notes (this_insn, regno, reloadnum);
5723 return 0;
5724 }
5725 }
5726
5727 if (reg_renumber[regno] >= 0)
5728 regno = reg_renumber[regno];
5729
5730 /* We require a base register here... */
5731 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5732 {
5733 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5734 &XEXP (op1, 0), &XEXP (x, 0),
5735 base_reg_class (mode, as,
5736 code, index_code),
5737 GET_MODE (x), GET_MODE (x), 0, 0,
5738 opnum, RELOAD_OTHER);
5739
5740 update_auto_inc_notes (this_insn, regno, reloadnum);
5741 return 0;
5742 }
5743 }
5744 return 0;
5745
5746 case POST_INC:
5747 case POST_DEC:
5748 case PRE_INC:
5749 case PRE_DEC:
5750 if (REG_P (XEXP (x, 0)))
5751 {
5752 int regno = REGNO (XEXP (x, 0));
5753 int value = 0;
5754 rtx x_orig = x;
5755
5756 /* A register that is incremented cannot be constant! */
5757 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5758 || reg_equiv_constant (regno) == 0);
5759
5760 /* Handle a register that is equivalent to a memory location
5761 which cannot be addressed directly. */
5762 if (reg_equiv_memory_loc (regno) != 0
5763 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5764 {
5765 rtx tem = make_memloc (XEXP (x, 0), regno);
5766 if (reg_equiv_address (regno)
5767 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5768 {
5769 rtx orig = tem;
5770
5771 /* First reload the memory location's address.
5772 We can't use ADDR_TYPE (type) here, because we need to
5773 write back the value after reading it, hence we actually
5774 need two registers. */
5775 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5776 &XEXP (tem, 0), opnum, type,
5777 ind_levels, insn);
5778 reloaded_inner_of_autoinc = true;
5779 if (!rtx_equal_p (tem, orig))
5780 push_reg_equiv_alt_mem (regno, tem);
5781 /* Put this inside a new increment-expression. */
5782 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5783 /* Proceed to reload that, as if it contained a register. */
5784 }
5785 }
5786
5787 /* If we have a hard register that is ok in this incdec context,
5788 don't make a reload. If the register isn't nice enough for
5789 autoincdec, we can reload it. But, if an autoincrement of a
5790 register that we here verified as playing nice, still outside
5791 isn't "valid", it must be that no autoincrement is "valid".
5792 If that is true and something made an autoincrement anyway,
5793 this must be a special context where one is allowed.
5794 (For example, a "push" instruction.)
5795 We can't improve this address, so leave it alone. */
5796
5797 /* Otherwise, reload the autoincrement into a suitable hard reg
5798 and record how much to increment by. */
5799
5800 if (reg_renumber[regno] >= 0)
5801 regno = reg_renumber[regno];
5802 if (regno >= FIRST_PSEUDO_REGISTER
5803 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5804 index_code))
5805 {
5806 int reloadnum;
5807
5808 /* If we can output the register afterwards, do so, this
5809 saves the extra update.
5810 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5811 CALL_INSN - and it does not set CC0.
5812 But don't do this if we cannot directly address the
5813 memory location, since this will make it harder to
5814 reuse address reloads, and increases register pressure.
5815 Also don't do this if we can probably update x directly. */
5816 rtx equiv = (MEM_P (XEXP (x, 0))
5817 ? XEXP (x, 0)
5818 : reg_equiv_mem (regno));
5819 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5820 if (insn && NONJUMP_INSN_P (insn) && equiv
5821 && memory_operand (equiv, GET_MODE (equiv))
5822 #if HAVE_cc0
5823 && ! sets_cc0_p (PATTERN (insn))
5824 #endif
5825 && ! (icode != CODE_FOR_nothing
5826 && insn_operand_matches (icode, 0, equiv)
5827 && insn_operand_matches (icode, 1, equiv))
5828 /* Using RELOAD_OTHER means we emit this and the reload we
5829 made earlier in the wrong order. */
5830 && !reloaded_inner_of_autoinc)
5831 {
5832 /* We use the original pseudo for loc, so that
5833 emit_reload_insns() knows which pseudo this
5834 reload refers to and updates the pseudo rtx, not
5835 its equivalent memory location, as well as the
5836 corresponding entry in reg_last_reload_reg. */
5837 loc = &XEXP (x_orig, 0);
5838 x = XEXP (x, 0);
5839 reloadnum
5840 = push_reload (x, x, loc, loc,
5841 context_reg_class,
5842 GET_MODE (x), GET_MODE (x), 0, 0,
5843 opnum, RELOAD_OTHER);
5844 }
5845 else
5846 {
5847 reloadnum
5848 = push_reload (x, x, loc, (rtx*) 0,
5849 context_reg_class,
5850 GET_MODE (x), GET_MODE (x), 0, 0,
5851 opnum, type);
5852 rld[reloadnum].inc
5853 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5854
5855 value = 1;
5856 }
5857
5858 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5859 reloadnum);
5860 }
5861 return value;
5862 }
5863 return 0;
5864
5865 case TRUNCATE:
5866 case SIGN_EXTEND:
5867 case ZERO_EXTEND:
5868 /* Look for parts to reload in the inner expression and reload them
5869 too, in addition to this operation. Reloading all inner parts in
5870 addition to this one shouldn't be necessary, but at this point,
5871 we don't know if we can possibly omit any part that *can* be
5872 reloaded. Targets that are better off reloading just either part
5873 (or perhaps even a different part of an outer expression), should
5874 define LEGITIMIZE_RELOAD_ADDRESS. */
5875 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5876 context, code, SCRATCH, &XEXP (x, 0), opnum,
5877 type, ind_levels, insn);
5878 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5879 context_reg_class,
5880 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5881 return 1;
5882
5883 case MEM:
5884 /* This is probably the result of a substitution, by eliminate_regs, of
5885 an equivalent address for a pseudo that was not allocated to a hard
5886 register. Verify that the specified address is valid and reload it
5887 into a register.
5888
5889 Since we know we are going to reload this item, don't decrement for
5890 the indirection level.
5891
5892 Note that this is actually conservative: it would be slightly more
5893 efficient to use the value of SPILL_INDIRECT_LEVELS from
5894 reload1.c here. */
5895
5896 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5897 opnum, ADDR_TYPE (type), ind_levels, insn);
5898 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5899 context_reg_class,
5900 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5901 return 1;
5902
5903 case REG:
5904 {
5905 int regno = REGNO (x);
5906
5907 if (reg_equiv_constant (regno) != 0)
5908 {
5909 find_reloads_address_part (reg_equiv_constant (regno), loc,
5910 context_reg_class,
5911 GET_MODE (x), opnum, type, ind_levels);
5912 return 1;
5913 }
5914
5915 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5916 that feeds this insn. */
5917 if (reg_equiv_mem (regno) != 0)
5918 {
5919 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5920 context_reg_class,
5921 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5922 return 1;
5923 }
5924 #endif
5925
5926 if (reg_equiv_memory_loc (regno)
5927 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5928 {
5929 rtx tem = make_memloc (x, regno);
5930 if (reg_equiv_address (regno) != 0
5931 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5932 {
5933 x = tem;
5934 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5935 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5936 ind_levels, insn);
5937 if (!rtx_equal_p (x, tem))
5938 push_reg_equiv_alt_mem (regno, x);
5939 }
5940 }
5941
5942 if (reg_renumber[regno] >= 0)
5943 regno = reg_renumber[regno];
5944
5945 if (regno >= FIRST_PSEUDO_REGISTER
5946 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5947 index_code))
5948 {
5949 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5950 context_reg_class,
5951 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5952 return 1;
5953 }
5954
5955 /* If a register appearing in an address is the subject of a CLOBBER
5956 in this insn, reload it into some other register to be safe.
5957 The CLOBBER is supposed to make the register unavailable
5958 from before this insn to after it. */
5959 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5960 {
5961 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5962 context_reg_class,
5963 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5964 return 1;
5965 }
5966 }
5967 return 0;
5968
5969 case SUBREG:
5970 if (REG_P (SUBREG_REG (x)))
5971 {
5972 /* If this is a SUBREG of a hard register and the resulting register
5973 is of the wrong class, reload the whole SUBREG. This avoids
5974 needless copies if SUBREG_REG is multi-word. */
5975 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5976 {
5977 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5978
5979 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5980 index_code))
5981 {
5982 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5983 context_reg_class,
5984 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5985 return 1;
5986 }
5987 }
5988 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5989 is larger than the class size, then reload the whole SUBREG. */
5990 else
5991 {
5992 enum reg_class rclass = context_reg_class;
5993 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
5994 > reg_class_size[(int) rclass])
5995 {
5996 /* If the inner register will be replaced by a memory
5997 reference, we can do this only if we can replace the
5998 whole subreg by a (narrower) memory reference. If
5999 this is not possible, fall through and reload just
6000 the inner register (including address reloads). */
6001 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6002 {
6003 rtx tem = find_reloads_subreg_address (x, opnum,
6004 ADDR_TYPE (type),
6005 ind_levels, insn,
6006 NULL);
6007 if (tem)
6008 {
6009 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6010 GET_MODE (tem), VOIDmode, 0, 0,
6011 opnum, type);
6012 return 1;
6013 }
6014 }
6015 else
6016 {
6017 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6018 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6019 return 1;
6020 }
6021 }
6022 }
6023 }
6024 break;
6025
6026 default:
6027 break;
6028 }
6029
6030 {
6031 const char *fmt = GET_RTX_FORMAT (code);
6032 int i;
6033
6034 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6035 {
6036 if (fmt[i] == 'e')
6037 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6038 we get here. */
6039 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6040 code, SCRATCH, &XEXP (x, i),
6041 opnum, type, ind_levels, insn);
6042 }
6043 }
6044
6045 #undef REG_OK_FOR_CONTEXT
6046 return 0;
6047 }
6048 \f
6049 /* X, which is found at *LOC, is a part of an address that needs to be
6050 reloaded into a register of class RCLASS. If X is a constant, or if
6051 X is a PLUS that contains a constant, check that the constant is a
6052 legitimate operand and that we are supposed to be able to load
6053 it into the register.
6054
6055 If not, force the constant into memory and reload the MEM instead.
6056
6057 MODE is the mode to use, in case X is an integer constant.
6058
6059 OPNUM and TYPE describe the purpose of any reloads made.
6060
6061 IND_LEVELS says how many levels of indirect addressing this machine
6062 supports. */
6063
6064 static void
6065 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6066 machine_mode mode, int opnum,
6067 enum reload_type type, int ind_levels)
6068 {
6069 if (CONSTANT_P (x)
6070 && (!targetm.legitimate_constant_p (mode, x)
6071 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6072 {
6073 x = force_const_mem (mode, x);
6074 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6075 opnum, type, ind_levels, 0);
6076 }
6077
6078 else if (GET_CODE (x) == PLUS
6079 && CONSTANT_P (XEXP (x, 1))
6080 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6081 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6082 == NO_REGS))
6083 {
6084 rtx tem;
6085
6086 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6087 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6088 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6089 opnum, type, ind_levels, 0);
6090 }
6091
6092 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6093 mode, VOIDmode, 0, 0, opnum, type);
6094 }
6095 \f
6096 /* X, a subreg of a pseudo, is a part of an address that needs to be
6097 reloaded, and the pseusdo is equivalent to a memory location.
6098
6099 Attempt to replace the whole subreg by a (possibly narrower or wider)
6100 memory reference. If this is possible, return this new memory
6101 reference, and push all required address reloads. Otherwise,
6102 return NULL.
6103
6104 OPNUM and TYPE identify the purpose of the reload.
6105
6106 IND_LEVELS says how many levels of indirect addressing are
6107 supported at this point in the address.
6108
6109 INSN, if nonzero, is the insn in which we do the reload. It is used
6110 to determine where to put USEs for pseudos that we have to replace with
6111 stack slots. */
6112
6113 static rtx
6114 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6115 int ind_levels, rtx_insn *insn,
6116 int *address_reloaded)
6117 {
6118 machine_mode outer_mode = GET_MODE (x);
6119 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6120 int regno = REGNO (SUBREG_REG (x));
6121 int reloaded = 0;
6122 rtx tem, orig;
6123 int offset;
6124
6125 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6126
6127 /* We cannot replace the subreg with a modified memory reference if:
6128
6129 - we have a paradoxical subreg that implicitly acts as a zero or
6130 sign extension operation due to LOAD_EXTEND_OP;
6131
6132 - we have a subreg that is implicitly supposed to act on the full
6133 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6134
6135 - the address of the equivalent memory location is mode-dependent; or
6136
6137 - we have a paradoxical subreg and the resulting memory is not
6138 sufficiently aligned to allow access in the wider mode.
6139
6140 In addition, we choose not to perform the replacement for *any*
6141 paradoxical subreg, even if it were possible in principle. This
6142 is to avoid generating wider memory references than necessary.
6143
6144 This corresponds to how previous versions of reload used to handle
6145 paradoxical subregs where no address reload was required. */
6146
6147 if (paradoxical_subreg_p (x))
6148 return NULL;
6149
6150 if (WORD_REGISTER_OPERATIONS
6151 && GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)
6152 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6153 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6154 return NULL;
6155
6156 /* Since we don't attempt to handle paradoxical subregs, we can just
6157 call into simplify_subreg, which will handle all remaining checks
6158 for us. */
6159 orig = make_memloc (SUBREG_REG (x), regno);
6160 offset = SUBREG_BYTE (x);
6161 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6162 if (!tem || !MEM_P (tem))
6163 return NULL;
6164
6165 /* Now push all required address reloads, if any. */
6166 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6167 XEXP (tem, 0), &XEXP (tem, 0),
6168 opnum, type, ind_levels, insn);
6169 /* ??? Do we need to handle nonzero offsets somehow? */
6170 if (!offset && !rtx_equal_p (tem, orig))
6171 push_reg_equiv_alt_mem (regno, tem);
6172
6173 /* For some processors an address may be valid in the original mode but
6174 not in a smaller mode. For example, ARM accepts a scaled index register
6175 in SImode but not in HImode. Note that this is only a problem if the
6176 address in reg_equiv_mem is already invalid in the new mode; other
6177 cases would be fixed by find_reloads_address as usual.
6178
6179 ??? We attempt to handle such cases here by doing an additional reload
6180 of the full address after the usual processing by find_reloads_address.
6181 Note that this may not work in the general case, but it seems to cover
6182 the cases where this situation currently occurs. A more general fix
6183 might be to reload the *value* instead of the address, but this would
6184 not be expected by the callers of this routine as-is.
6185
6186 If find_reloads_address already completed replaced the address, there
6187 is nothing further to do. */
6188 if (reloaded == 0
6189 && reg_equiv_mem (regno) != 0
6190 && !strict_memory_address_addr_space_p
6191 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6192 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6193 {
6194 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6195 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6196 MEM, SCRATCH),
6197 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6198 reloaded = 1;
6199 }
6200
6201 /* If this is not a toplevel operand, find_reloads doesn't see this
6202 substitution. We have to emit a USE of the pseudo so that
6203 delete_output_reload can see it. */
6204 if (replace_reloads && recog_data.operand[opnum] != x)
6205 /* We mark the USE with QImode so that we recognize it as one that
6206 can be safely deleted at the end of reload. */
6207 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6208 QImode);
6209
6210 if (address_reloaded)
6211 *address_reloaded = reloaded;
6212
6213 return tem;
6214 }
6215 \f
6216 /* Substitute into the current INSN the registers into which we have reloaded
6217 the things that need reloading. The array `replacements'
6218 contains the locations of all pointers that must be changed
6219 and says what to replace them with.
6220
6221 Return the rtx that X translates into; usually X, but modified. */
6222
6223 void
6224 subst_reloads (rtx_insn *insn)
6225 {
6226 int i;
6227
6228 for (i = 0; i < n_replacements; i++)
6229 {
6230 struct replacement *r = &replacements[i];
6231 rtx reloadreg = rld[r->what].reg_rtx;
6232 if (reloadreg)
6233 {
6234 #ifdef DEBUG_RELOAD
6235 /* This checking takes a very long time on some platforms
6236 causing the gcc.c-torture/compile/limits-fnargs.c test
6237 to time out during testing. See PR 31850.
6238
6239 Internal consistency test. Check that we don't modify
6240 anything in the equivalence arrays. Whenever something from
6241 those arrays needs to be reloaded, it must be unshared before
6242 being substituted into; the equivalence must not be modified.
6243 Otherwise, if the equivalence is used after that, it will
6244 have been modified, and the thing substituted (probably a
6245 register) is likely overwritten and not a usable equivalence. */
6246 int check_regno;
6247
6248 for (check_regno = 0; check_regno < max_regno; check_regno++)
6249 {
6250 #define CHECK_MODF(ARRAY) \
6251 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6252 || !loc_mentioned_in_p (r->where, \
6253 (*reg_equivs)[check_regno].ARRAY))
6254
6255 CHECK_MODF (constant);
6256 CHECK_MODF (memory_loc);
6257 CHECK_MODF (address);
6258 CHECK_MODF (mem);
6259 #undef CHECK_MODF
6260 }
6261 #endif /* DEBUG_RELOAD */
6262
6263 /* If we're replacing a LABEL_REF with a register, there must
6264 already be an indication (to e.g. flow) which label this
6265 register refers to. */
6266 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6267 || !JUMP_P (insn)
6268 || find_reg_note (insn,
6269 REG_LABEL_OPERAND,
6270 XEXP (*r->where, 0))
6271 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6272
6273 /* Encapsulate RELOADREG so its machine mode matches what
6274 used to be there. Note that gen_lowpart_common will
6275 do the wrong thing if RELOADREG is multi-word. RELOADREG
6276 will always be a REG here. */
6277 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6278 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6279
6280 *r->where = reloadreg;
6281 }
6282 /* If reload got no reg and isn't optional, something's wrong. */
6283 else
6284 gcc_assert (rld[r->what].optional);
6285 }
6286 }
6287 \f
6288 /* Make a copy of any replacements being done into X and move those
6289 copies to locations in Y, a copy of X. */
6290
6291 void
6292 copy_replacements (rtx x, rtx y)
6293 {
6294 copy_replacements_1 (&x, &y, n_replacements);
6295 }
6296
6297 static void
6298 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6299 {
6300 int i, j;
6301 rtx x, y;
6302 struct replacement *r;
6303 enum rtx_code code;
6304 const char *fmt;
6305
6306 for (j = 0; j < orig_replacements; j++)
6307 if (replacements[j].where == px)
6308 {
6309 r = &replacements[n_replacements++];
6310 r->where = py;
6311 r->what = replacements[j].what;
6312 r->mode = replacements[j].mode;
6313 }
6314
6315 x = *px;
6316 y = *py;
6317 code = GET_CODE (x);
6318 fmt = GET_RTX_FORMAT (code);
6319
6320 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6321 {
6322 if (fmt[i] == 'e')
6323 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6324 else if (fmt[i] == 'E')
6325 for (j = XVECLEN (x, i); --j >= 0; )
6326 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6327 orig_replacements);
6328 }
6329 }
6330
6331 /* Change any replacements being done to *X to be done to *Y. */
6332
6333 void
6334 move_replacements (rtx *x, rtx *y)
6335 {
6336 int i;
6337
6338 for (i = 0; i < n_replacements; i++)
6339 if (replacements[i].where == x)
6340 replacements[i].where = y;
6341 }
6342 \f
6343 /* If LOC was scheduled to be replaced by something, return the replacement.
6344 Otherwise, return *LOC. */
6345
6346 rtx
6347 find_replacement (rtx *loc)
6348 {
6349 struct replacement *r;
6350
6351 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6352 {
6353 rtx reloadreg = rld[r->what].reg_rtx;
6354
6355 if (reloadreg && r->where == loc)
6356 {
6357 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6358 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6359
6360 return reloadreg;
6361 }
6362 else if (reloadreg && GET_CODE (*loc) == SUBREG
6363 && r->where == &SUBREG_REG (*loc))
6364 {
6365 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6366 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6367
6368 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6369 GET_MODE (SUBREG_REG (*loc)),
6370 SUBREG_BYTE (*loc));
6371 }
6372 }
6373
6374 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6375 what's inside and make a new rtl if so. */
6376 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6377 || GET_CODE (*loc) == MULT)
6378 {
6379 rtx x = find_replacement (&XEXP (*loc, 0));
6380 rtx y = find_replacement (&XEXP (*loc, 1));
6381
6382 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6383 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6384 }
6385
6386 return *loc;
6387 }
6388 \f
6389 /* Return nonzero if register in range [REGNO, ENDREGNO)
6390 appears either explicitly or implicitly in X
6391 other than being stored into (except for earlyclobber operands).
6392
6393 References contained within the substructure at LOC do not count.
6394 LOC may be zero, meaning don't ignore anything.
6395
6396 This is similar to refers_to_regno_p in rtlanal.c except that we
6397 look at equivalences for pseudos that didn't get hard registers. */
6398
6399 static int
6400 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6401 rtx x, rtx *loc)
6402 {
6403 int i;
6404 unsigned int r;
6405 RTX_CODE code;
6406 const char *fmt;
6407
6408 if (x == 0)
6409 return 0;
6410
6411 repeat:
6412 code = GET_CODE (x);
6413
6414 switch (code)
6415 {
6416 case REG:
6417 r = REGNO (x);
6418
6419 /* If this is a pseudo, a hard register must not have been allocated.
6420 X must therefore either be a constant or be in memory. */
6421 if (r >= FIRST_PSEUDO_REGISTER)
6422 {
6423 if (reg_equiv_memory_loc (r))
6424 return refers_to_regno_for_reload_p (regno, endregno,
6425 reg_equiv_memory_loc (r),
6426 (rtx*) 0);
6427
6428 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6429 return 0;
6430 }
6431
6432 return (endregno > r
6433 && regno < r + (r < FIRST_PSEUDO_REGISTER
6434 ? hard_regno_nregs[r][GET_MODE (x)]
6435 : 1));
6436
6437 case SUBREG:
6438 /* If this is a SUBREG of a hard reg, we can see exactly which
6439 registers are being modified. Otherwise, handle normally. */
6440 if (REG_P (SUBREG_REG (x))
6441 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6442 {
6443 unsigned int inner_regno = subreg_regno (x);
6444 unsigned int inner_endregno
6445 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6446 ? subreg_nregs (x) : 1);
6447
6448 return endregno > inner_regno && regno < inner_endregno;
6449 }
6450 break;
6451
6452 case CLOBBER:
6453 case SET:
6454 if (&SET_DEST (x) != loc
6455 /* Note setting a SUBREG counts as referring to the REG it is in for
6456 a pseudo but not for hard registers since we can
6457 treat each word individually. */
6458 && ((GET_CODE (SET_DEST (x)) == SUBREG
6459 && loc != &SUBREG_REG (SET_DEST (x))
6460 && REG_P (SUBREG_REG (SET_DEST (x)))
6461 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6462 && refers_to_regno_for_reload_p (regno, endregno,
6463 SUBREG_REG (SET_DEST (x)),
6464 loc))
6465 /* If the output is an earlyclobber operand, this is
6466 a conflict. */
6467 || ((!REG_P (SET_DEST (x))
6468 || earlyclobber_operand_p (SET_DEST (x)))
6469 && refers_to_regno_for_reload_p (regno, endregno,
6470 SET_DEST (x), loc))))
6471 return 1;
6472
6473 if (code == CLOBBER || loc == &SET_SRC (x))
6474 return 0;
6475 x = SET_SRC (x);
6476 goto repeat;
6477
6478 default:
6479 break;
6480 }
6481
6482 /* X does not match, so try its subexpressions. */
6483
6484 fmt = GET_RTX_FORMAT (code);
6485 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6486 {
6487 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6488 {
6489 if (i == 0)
6490 {
6491 x = XEXP (x, 0);
6492 goto repeat;
6493 }
6494 else
6495 if (refers_to_regno_for_reload_p (regno, endregno,
6496 XEXP (x, i), loc))
6497 return 1;
6498 }
6499 else if (fmt[i] == 'E')
6500 {
6501 int j;
6502 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6503 if (loc != &XVECEXP (x, i, j)
6504 && refers_to_regno_for_reload_p (regno, endregno,
6505 XVECEXP (x, i, j), loc))
6506 return 1;
6507 }
6508 }
6509 return 0;
6510 }
6511
6512 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6513 we check if any register number in X conflicts with the relevant register
6514 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6515 contains a MEM (we don't bother checking for memory addresses that can't
6516 conflict because we expect this to be a rare case.
6517
6518 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6519 that we look at equivalences for pseudos that didn't get hard registers. */
6520
6521 int
6522 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6523 {
6524 int regno, endregno;
6525
6526 /* Overly conservative. */
6527 if (GET_CODE (x) == STRICT_LOW_PART
6528 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6529 x = XEXP (x, 0);
6530
6531 /* If either argument is a constant, then modifying X can not affect IN. */
6532 if (CONSTANT_P (x) || CONSTANT_P (in))
6533 return 0;
6534 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6535 return refers_to_mem_for_reload_p (in);
6536 else if (GET_CODE (x) == SUBREG)
6537 {
6538 regno = REGNO (SUBREG_REG (x));
6539 if (regno < FIRST_PSEUDO_REGISTER)
6540 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6541 GET_MODE (SUBREG_REG (x)),
6542 SUBREG_BYTE (x),
6543 GET_MODE (x));
6544 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6545 ? subreg_nregs (x) : 1);
6546
6547 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6548 }
6549 else if (REG_P (x))
6550 {
6551 regno = REGNO (x);
6552
6553 /* If this is a pseudo, it must not have been assigned a hard register.
6554 Therefore, it must either be in memory or be a constant. */
6555
6556 if (regno >= FIRST_PSEUDO_REGISTER)
6557 {
6558 if (reg_equiv_memory_loc (regno))
6559 return refers_to_mem_for_reload_p (in);
6560 gcc_assert (reg_equiv_constant (regno));
6561 return 0;
6562 }
6563
6564 endregno = END_REGNO (x);
6565
6566 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6567 }
6568 else if (MEM_P (x))
6569 return refers_to_mem_for_reload_p (in);
6570 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6571 || GET_CODE (x) == CC0)
6572 return reg_mentioned_p (x, in);
6573 else
6574 {
6575 gcc_assert (GET_CODE (x) == PLUS);
6576
6577 /* We actually want to know if X is mentioned somewhere inside IN.
6578 We must not say that (plus (sp) (const_int 124)) is in
6579 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6580 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6581 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6582 while (MEM_P (in))
6583 in = XEXP (in, 0);
6584 if (REG_P (in))
6585 return 0;
6586 else if (GET_CODE (in) == PLUS)
6587 return (rtx_equal_p (x, in)
6588 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6589 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6590 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6591 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6592 }
6593
6594 gcc_unreachable ();
6595 }
6596
6597 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6598 registers. */
6599
6600 static int
6601 refers_to_mem_for_reload_p (rtx x)
6602 {
6603 const char *fmt;
6604 int i;
6605
6606 if (MEM_P (x))
6607 return 1;
6608
6609 if (REG_P (x))
6610 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6611 && reg_equiv_memory_loc (REGNO (x)));
6612
6613 fmt = GET_RTX_FORMAT (GET_CODE (x));
6614 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6615 if (fmt[i] == 'e'
6616 && (MEM_P (XEXP (x, i))
6617 || refers_to_mem_for_reload_p (XEXP (x, i))))
6618 return 1;
6619
6620 return 0;
6621 }
6622 \f
6623 /* Check the insns before INSN to see if there is a suitable register
6624 containing the same value as GOAL.
6625 If OTHER is -1, look for a register in class RCLASS.
6626 Otherwise, just see if register number OTHER shares GOAL's value.
6627
6628 Return an rtx for the register found, or zero if none is found.
6629
6630 If RELOAD_REG_P is (short *)1,
6631 we reject any hard reg that appears in reload_reg_rtx
6632 because such a hard reg is also needed coming into this insn.
6633
6634 If RELOAD_REG_P is any other nonzero value,
6635 it is a vector indexed by hard reg number
6636 and we reject any hard reg whose element in the vector is nonnegative
6637 as well as any that appears in reload_reg_rtx.
6638
6639 If GOAL is zero, then GOALREG is a register number; we look
6640 for an equivalent for that register.
6641
6642 MODE is the machine mode of the value we want an equivalence for.
6643 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6644
6645 This function is used by jump.c as well as in the reload pass.
6646
6647 If GOAL is the sum of the stack pointer and a constant, we treat it
6648 as if it were a constant except that sp is required to be unchanging. */
6649
6650 rtx
6651 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6652 short *reload_reg_p, int goalreg, machine_mode mode)
6653 {
6654 rtx_insn *p = insn;
6655 rtx goaltry, valtry, value;
6656 rtx_insn *where;
6657 rtx pat;
6658 int regno = -1;
6659 int valueno;
6660 int goal_mem = 0;
6661 int goal_const = 0;
6662 int goal_mem_addr_varies = 0;
6663 int need_stable_sp = 0;
6664 int nregs;
6665 int valuenregs;
6666 int num = 0;
6667
6668 if (goal == 0)
6669 regno = goalreg;
6670 else if (REG_P (goal))
6671 regno = REGNO (goal);
6672 else if (MEM_P (goal))
6673 {
6674 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6675 if (MEM_VOLATILE_P (goal))
6676 return 0;
6677 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6678 return 0;
6679 /* An address with side effects must be reexecuted. */
6680 switch (code)
6681 {
6682 case POST_INC:
6683 case PRE_INC:
6684 case POST_DEC:
6685 case PRE_DEC:
6686 case POST_MODIFY:
6687 case PRE_MODIFY:
6688 return 0;
6689 default:
6690 break;
6691 }
6692 goal_mem = 1;
6693 }
6694 else if (CONSTANT_P (goal))
6695 goal_const = 1;
6696 else if (GET_CODE (goal) == PLUS
6697 && XEXP (goal, 0) == stack_pointer_rtx
6698 && CONSTANT_P (XEXP (goal, 1)))
6699 goal_const = need_stable_sp = 1;
6700 else if (GET_CODE (goal) == PLUS
6701 && XEXP (goal, 0) == frame_pointer_rtx
6702 && CONSTANT_P (XEXP (goal, 1)))
6703 goal_const = 1;
6704 else
6705 return 0;
6706
6707 num = 0;
6708 /* Scan insns back from INSN, looking for one that copies
6709 a value into or out of GOAL.
6710 Stop and give up if we reach a label. */
6711
6712 while (1)
6713 {
6714 p = PREV_INSN (p);
6715 if (p && DEBUG_INSN_P (p))
6716 continue;
6717 num++;
6718 if (p == 0 || LABEL_P (p)
6719 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6720 return 0;
6721
6722 /* Don't reuse register contents from before a setjmp-type
6723 function call; on the second return (from the longjmp) it
6724 might have been clobbered by a later reuse. It doesn't
6725 seem worthwhile to actually go and see if it is actually
6726 reused even if that information would be readily available;
6727 just don't reuse it across the setjmp call. */
6728 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6729 return 0;
6730
6731 if (NONJUMP_INSN_P (p)
6732 /* If we don't want spill regs ... */
6733 && (! (reload_reg_p != 0
6734 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6735 /* ... then ignore insns introduced by reload; they aren't
6736 useful and can cause results in reload_as_needed to be
6737 different from what they were when calculating the need for
6738 spills. If we notice an input-reload insn here, we will
6739 reject it below, but it might hide a usable equivalent.
6740 That makes bad code. It may even fail: perhaps no reg was
6741 spilled for this insn because it was assumed we would find
6742 that equivalent. */
6743 || INSN_UID (p) < reload_first_uid))
6744 {
6745 rtx tem;
6746 pat = single_set (p);
6747
6748 /* First check for something that sets some reg equal to GOAL. */
6749 if (pat != 0
6750 && ((regno >= 0
6751 && true_regnum (SET_SRC (pat)) == regno
6752 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6753 ||
6754 (regno >= 0
6755 && true_regnum (SET_DEST (pat)) == regno
6756 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6757 ||
6758 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6759 /* When looking for stack pointer + const,
6760 make sure we don't use a stack adjust. */
6761 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6762 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6763 || (goal_mem
6764 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6765 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6766 || (goal_mem
6767 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6768 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6769 /* If we are looking for a constant,
6770 and something equivalent to that constant was copied
6771 into a reg, we can use that reg. */
6772 || (goal_const && REG_NOTES (p) != 0
6773 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6774 && ((rtx_equal_p (XEXP (tem, 0), goal)
6775 && (valueno
6776 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6777 || (REG_P (SET_DEST (pat))
6778 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6779 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6780 && CONST_INT_P (goal)
6781 && 0 != (goaltry
6782 = operand_subword (XEXP (tem, 0), 0, 0,
6783 VOIDmode))
6784 && rtx_equal_p (goal, goaltry)
6785 && (valtry
6786 = operand_subword (SET_DEST (pat), 0, 0,
6787 VOIDmode))
6788 && (valueno = true_regnum (valtry)) >= 0)))
6789 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6790 NULL_RTX))
6791 && REG_P (SET_DEST (pat))
6792 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6793 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6794 && CONST_INT_P (goal)
6795 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6796 VOIDmode))
6797 && rtx_equal_p (goal, goaltry)
6798 && (valtry
6799 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6800 && (valueno = true_regnum (valtry)) >= 0)))
6801 {
6802 if (other >= 0)
6803 {
6804 if (valueno != other)
6805 continue;
6806 }
6807 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6808 continue;
6809 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6810 mode, valueno))
6811 continue;
6812 value = valtry;
6813 where = p;
6814 break;
6815 }
6816 }
6817 }
6818
6819 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6820 (or copying VALUE into GOAL, if GOAL is also a register).
6821 Now verify that VALUE is really valid. */
6822
6823 /* VALUENO is the register number of VALUE; a hard register. */
6824
6825 /* Don't try to re-use something that is killed in this insn. We want
6826 to be able to trust REG_UNUSED notes. */
6827 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6828 return 0;
6829
6830 /* If we propose to get the value from the stack pointer or if GOAL is
6831 a MEM based on the stack pointer, we need a stable SP. */
6832 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6833 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6834 goal)))
6835 need_stable_sp = 1;
6836
6837 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6838 if (GET_MODE (value) != mode)
6839 return 0;
6840
6841 /* Reject VALUE if it was loaded from GOAL
6842 and is also a register that appears in the address of GOAL. */
6843
6844 if (goal_mem && value == SET_DEST (single_set (where))
6845 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6846 goal, (rtx*) 0))
6847 return 0;
6848
6849 /* Reject registers that overlap GOAL. */
6850
6851 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6852 nregs = hard_regno_nregs[regno][mode];
6853 else
6854 nregs = 1;
6855 valuenregs = hard_regno_nregs[valueno][mode];
6856
6857 if (!goal_mem && !goal_const
6858 && regno + nregs > valueno && regno < valueno + valuenregs)
6859 return 0;
6860
6861 /* Reject VALUE if it is one of the regs reserved for reloads.
6862 Reload1 knows how to reuse them anyway, and it would get
6863 confused if we allocated one without its knowledge.
6864 (Now that insns introduced by reload are ignored above,
6865 this case shouldn't happen, but I'm not positive.) */
6866
6867 if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1)
6868 {
6869 int i;
6870 for (i = 0; i < valuenregs; ++i)
6871 if (reload_reg_p[valueno + i] >= 0)
6872 return 0;
6873 }
6874
6875 /* Reject VALUE if it is a register being used for an input reload
6876 even if it is not one of those reserved. */
6877
6878 if (reload_reg_p != 0)
6879 {
6880 int i;
6881 for (i = 0; i < n_reloads; i++)
6882 if (rld[i].reg_rtx != 0 && rld[i].in)
6883 {
6884 int regno1 = REGNO (rld[i].reg_rtx);
6885 int nregs1 = hard_regno_nregs[regno1]
6886 [GET_MODE (rld[i].reg_rtx)];
6887 if (regno1 < valueno + valuenregs
6888 && regno1 + nregs1 > valueno)
6889 return 0;
6890 }
6891 }
6892
6893 if (goal_mem)
6894 /* We must treat frame pointer as varying here,
6895 since it can vary--in a nonlocal goto as generated by expand_goto. */
6896 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6897
6898 /* Now verify that the values of GOAL and VALUE remain unaltered
6899 until INSN is reached. */
6900
6901 p = insn;
6902 while (1)
6903 {
6904 p = PREV_INSN (p);
6905 if (p == where)
6906 return value;
6907
6908 /* Don't trust the conversion past a function call
6909 if either of the two is in a call-clobbered register, or memory. */
6910 if (CALL_P (p))
6911 {
6912 int i;
6913
6914 if (goal_mem || need_stable_sp)
6915 return 0;
6916
6917 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6918 for (i = 0; i < nregs; ++i)
6919 if (call_used_regs[regno + i]
6920 || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode))
6921 return 0;
6922
6923 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6924 for (i = 0; i < valuenregs; ++i)
6925 if (call_used_regs[valueno + i]
6926 || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode))
6927 return 0;
6928 }
6929
6930 if (INSN_P (p))
6931 {
6932 pat = PATTERN (p);
6933
6934 /* Watch out for unspec_volatile, and volatile asms. */
6935 if (volatile_insn_p (pat))
6936 return 0;
6937
6938 /* If this insn P stores in either GOAL or VALUE, return 0.
6939 If GOAL is a memory ref and this insn writes memory, return 0.
6940 If GOAL is a memory ref and its address is not constant,
6941 and this insn P changes a register used in GOAL, return 0. */
6942
6943 if (GET_CODE (pat) == COND_EXEC)
6944 pat = COND_EXEC_CODE (pat);
6945 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6946 {
6947 rtx dest = SET_DEST (pat);
6948 while (GET_CODE (dest) == SUBREG
6949 || GET_CODE (dest) == ZERO_EXTRACT
6950 || GET_CODE (dest) == STRICT_LOW_PART)
6951 dest = XEXP (dest, 0);
6952 if (REG_P (dest))
6953 {
6954 int xregno = REGNO (dest);
6955 int xnregs;
6956 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
6957 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
6958 else
6959 xnregs = 1;
6960 if (xregno < regno + nregs && xregno + xnregs > regno)
6961 return 0;
6962 if (xregno < valueno + valuenregs
6963 && xregno + xnregs > valueno)
6964 return 0;
6965 if (goal_mem_addr_varies
6966 && reg_overlap_mentioned_for_reload_p (dest, goal))
6967 return 0;
6968 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6969 return 0;
6970 }
6971 else if (goal_mem && MEM_P (dest)
6972 && ! push_operand (dest, GET_MODE (dest)))
6973 return 0;
6974 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6975 && reg_equiv_memory_loc (regno) != 0)
6976 return 0;
6977 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6978 return 0;
6979 }
6980 else if (GET_CODE (pat) == PARALLEL)
6981 {
6982 int i;
6983 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6984 {
6985 rtx v1 = XVECEXP (pat, 0, i);
6986 if (GET_CODE (v1) == COND_EXEC)
6987 v1 = COND_EXEC_CODE (v1);
6988 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6989 {
6990 rtx dest = SET_DEST (v1);
6991 while (GET_CODE (dest) == SUBREG
6992 || GET_CODE (dest) == ZERO_EXTRACT
6993 || GET_CODE (dest) == STRICT_LOW_PART)
6994 dest = XEXP (dest, 0);
6995 if (REG_P (dest))
6996 {
6997 int xregno = REGNO (dest);
6998 int xnregs;
6999 if (REGNO (dest) < FIRST_PSEUDO_REGISTER)
7000 xnregs = hard_regno_nregs[xregno][GET_MODE (dest)];
7001 else
7002 xnregs = 1;
7003 if (xregno < regno + nregs
7004 && xregno + xnregs > regno)
7005 return 0;
7006 if (xregno < valueno + valuenregs
7007 && xregno + xnregs > valueno)
7008 return 0;
7009 if (goal_mem_addr_varies
7010 && reg_overlap_mentioned_for_reload_p (dest,
7011 goal))
7012 return 0;
7013 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7014 return 0;
7015 }
7016 else if (goal_mem && MEM_P (dest)
7017 && ! push_operand (dest, GET_MODE (dest)))
7018 return 0;
7019 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7020 && reg_equiv_memory_loc (regno) != 0)
7021 return 0;
7022 else if (need_stable_sp
7023 && push_operand (dest, GET_MODE (dest)))
7024 return 0;
7025 }
7026 }
7027 }
7028
7029 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7030 {
7031 rtx link;
7032
7033 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7034 link = XEXP (link, 1))
7035 {
7036 pat = XEXP (link, 0);
7037 if (GET_CODE (pat) == CLOBBER)
7038 {
7039 rtx dest = SET_DEST (pat);
7040
7041 if (REG_P (dest))
7042 {
7043 int xregno = REGNO (dest);
7044 int xnregs
7045 = hard_regno_nregs[xregno][GET_MODE (dest)];
7046
7047 if (xregno < regno + nregs
7048 && xregno + xnregs > regno)
7049 return 0;
7050 else if (xregno < valueno + valuenregs
7051 && xregno + xnregs > valueno)
7052 return 0;
7053 else if (goal_mem_addr_varies
7054 && reg_overlap_mentioned_for_reload_p (dest,
7055 goal))
7056 return 0;
7057 }
7058
7059 else if (goal_mem && MEM_P (dest)
7060 && ! push_operand (dest, GET_MODE (dest)))
7061 return 0;
7062 else if (need_stable_sp
7063 && push_operand (dest, GET_MODE (dest)))
7064 return 0;
7065 }
7066 }
7067 }
7068
7069 #if AUTO_INC_DEC
7070 /* If this insn auto-increments or auto-decrements
7071 either regno or valueno, return 0 now.
7072 If GOAL is a memory ref and its address is not constant,
7073 and this insn P increments a register used in GOAL, return 0. */
7074 {
7075 rtx link;
7076
7077 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7078 if (REG_NOTE_KIND (link) == REG_INC
7079 && REG_P (XEXP (link, 0)))
7080 {
7081 int incno = REGNO (XEXP (link, 0));
7082 if (incno < regno + nregs && incno >= regno)
7083 return 0;
7084 if (incno < valueno + valuenregs && incno >= valueno)
7085 return 0;
7086 if (goal_mem_addr_varies
7087 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7088 goal))
7089 return 0;
7090 }
7091 }
7092 #endif
7093 }
7094 }
7095 }
7096 \f
7097 /* Find a place where INCED appears in an increment or decrement operator
7098 within X, and return the amount INCED is incremented or decremented by.
7099 The value is always positive. */
7100
7101 static int
7102 find_inc_amount (rtx x, rtx inced)
7103 {
7104 enum rtx_code code = GET_CODE (x);
7105 const char *fmt;
7106 int i;
7107
7108 if (code == MEM)
7109 {
7110 rtx addr = XEXP (x, 0);
7111 if ((GET_CODE (addr) == PRE_DEC
7112 || GET_CODE (addr) == POST_DEC
7113 || GET_CODE (addr) == PRE_INC
7114 || GET_CODE (addr) == POST_INC)
7115 && XEXP (addr, 0) == inced)
7116 return GET_MODE_SIZE (GET_MODE (x));
7117 else if ((GET_CODE (addr) == PRE_MODIFY
7118 || GET_CODE (addr) == POST_MODIFY)
7119 && GET_CODE (XEXP (addr, 1)) == PLUS
7120 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7121 && XEXP (addr, 0) == inced
7122 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7123 {
7124 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7125 return i < 0 ? -i : i;
7126 }
7127 }
7128
7129 fmt = GET_RTX_FORMAT (code);
7130 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7131 {
7132 if (fmt[i] == 'e')
7133 {
7134 int tem = find_inc_amount (XEXP (x, i), inced);
7135 if (tem != 0)
7136 return tem;
7137 }
7138 if (fmt[i] == 'E')
7139 {
7140 int j;
7141 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7142 {
7143 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7144 if (tem != 0)
7145 return tem;
7146 }
7147 }
7148 }
7149
7150 return 0;
7151 }
7152 \f
7153 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7154 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7155
7156 static int
7157 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7158 rtx insn)
7159 {
7160 rtx link;
7161
7162 if (!AUTO_INC_DEC)
7163 return 0;
7164
7165 gcc_assert (insn);
7166
7167 if (! INSN_P (insn))
7168 return 0;
7169
7170 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7171 if (REG_NOTE_KIND (link) == REG_INC)
7172 {
7173 unsigned int test = (int) REGNO (XEXP (link, 0));
7174 if (test >= regno && test < endregno)
7175 return 1;
7176 }
7177 return 0;
7178 }
7179
7180 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7181 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7182 REG_INC. REGNO must refer to a hard register. */
7183
7184 int
7185 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7186 int sets)
7187 {
7188 unsigned int nregs, endregno;
7189
7190 /* regno must be a hard register. */
7191 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7192
7193 nregs = hard_regno_nregs[regno][mode];
7194 endregno = regno + nregs;
7195
7196 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7197 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7198 && REG_P (XEXP (PATTERN (insn), 0)))
7199 {
7200 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7201
7202 return test >= regno && test < endregno;
7203 }
7204
7205 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7206 return 1;
7207
7208 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7209 {
7210 int i = XVECLEN (PATTERN (insn), 0) - 1;
7211
7212 for (; i >= 0; i--)
7213 {
7214 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7215 if ((GET_CODE (elt) == CLOBBER
7216 || (sets == 1 && GET_CODE (elt) == SET))
7217 && REG_P (XEXP (elt, 0)))
7218 {
7219 unsigned int test = REGNO (XEXP (elt, 0));
7220
7221 if (test >= regno && test < endregno)
7222 return 1;
7223 }
7224 if (sets == 2
7225 && reg_inc_found_and_valid_p (regno, endregno, elt))
7226 return 1;
7227 }
7228 }
7229
7230 return 0;
7231 }
7232
7233 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7234 rtx
7235 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7236 {
7237 int regno;
7238
7239 if (GET_MODE (reloadreg) == mode)
7240 return reloadreg;
7241
7242 regno = REGNO (reloadreg);
7243
7244 if (REG_WORDS_BIG_ENDIAN)
7245 regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)]
7246 - (int) hard_regno_nregs[regno][mode];
7247
7248 return gen_rtx_REG (mode, regno);
7249 }
7250
7251 static const char *const reload_when_needed_name[] =
7252 {
7253 "RELOAD_FOR_INPUT",
7254 "RELOAD_FOR_OUTPUT",
7255 "RELOAD_FOR_INSN",
7256 "RELOAD_FOR_INPUT_ADDRESS",
7257 "RELOAD_FOR_INPADDR_ADDRESS",
7258 "RELOAD_FOR_OUTPUT_ADDRESS",
7259 "RELOAD_FOR_OUTADDR_ADDRESS",
7260 "RELOAD_FOR_OPERAND_ADDRESS",
7261 "RELOAD_FOR_OPADDR_ADDR",
7262 "RELOAD_OTHER",
7263 "RELOAD_FOR_OTHER_ADDRESS"
7264 };
7265
7266 /* These functions are used to print the variables set by 'find_reloads' */
7267
7268 DEBUG_FUNCTION void
7269 debug_reload_to_stream (FILE *f)
7270 {
7271 int r;
7272 const char *prefix;
7273
7274 if (! f)
7275 f = stderr;
7276 for (r = 0; r < n_reloads; r++)
7277 {
7278 fprintf (f, "Reload %d: ", r);
7279
7280 if (rld[r].in != 0)
7281 {
7282 fprintf (f, "reload_in (%s) = ",
7283 GET_MODE_NAME (rld[r].inmode));
7284 print_inline_rtx (f, rld[r].in, 24);
7285 fprintf (f, "\n\t");
7286 }
7287
7288 if (rld[r].out != 0)
7289 {
7290 fprintf (f, "reload_out (%s) = ",
7291 GET_MODE_NAME (rld[r].outmode));
7292 print_inline_rtx (f, rld[r].out, 24);
7293 fprintf (f, "\n\t");
7294 }
7295
7296 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7297
7298 fprintf (f, "%s (opnum = %d)",
7299 reload_when_needed_name[(int) rld[r].when_needed],
7300 rld[r].opnum);
7301
7302 if (rld[r].optional)
7303 fprintf (f, ", optional");
7304
7305 if (rld[r].nongroup)
7306 fprintf (f, ", nongroup");
7307
7308 if (rld[r].inc != 0)
7309 fprintf (f, ", inc by %d", rld[r].inc);
7310
7311 if (rld[r].nocombine)
7312 fprintf (f, ", can't combine");
7313
7314 if (rld[r].secondary_p)
7315 fprintf (f, ", secondary_reload_p");
7316
7317 if (rld[r].in_reg != 0)
7318 {
7319 fprintf (f, "\n\treload_in_reg: ");
7320 print_inline_rtx (f, rld[r].in_reg, 24);
7321 }
7322
7323 if (rld[r].out_reg != 0)
7324 {
7325 fprintf (f, "\n\treload_out_reg: ");
7326 print_inline_rtx (f, rld[r].out_reg, 24);
7327 }
7328
7329 if (rld[r].reg_rtx != 0)
7330 {
7331 fprintf (f, "\n\treload_reg_rtx: ");
7332 print_inline_rtx (f, rld[r].reg_rtx, 24);
7333 }
7334
7335 prefix = "\n\t";
7336 if (rld[r].secondary_in_reload != -1)
7337 {
7338 fprintf (f, "%ssecondary_in_reload = %d",
7339 prefix, rld[r].secondary_in_reload);
7340 prefix = ", ";
7341 }
7342
7343 if (rld[r].secondary_out_reload != -1)
7344 fprintf (f, "%ssecondary_out_reload = %d\n",
7345 prefix, rld[r].secondary_out_reload);
7346
7347 prefix = "\n\t";
7348 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7349 {
7350 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7351 insn_data[rld[r].secondary_in_icode].name);
7352 prefix = ", ";
7353 }
7354
7355 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7356 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7357 insn_data[rld[r].secondary_out_icode].name);
7358
7359 fprintf (f, "\n");
7360 }
7361 }
7362
7363 DEBUG_FUNCTION void
7364 debug_reload (void)
7365 {
7366 debug_reload_to_stream (stderr);
7367 }