]>
Commit | Line | Data |
---|---|---|
eab89b90 | 1 | /* Search an insn for pseudo regs that must be in hard regs and are not. |
23a5b65a | 2 | Copyright (C) 1987-2014 Free Software Foundation, Inc. |
eab89b90 | 3 | |
1322177d | 4 | This file is part of GCC. |
eab89b90 | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
eab89b90 | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
eab89b90 RK |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
eab89b90 | 19 | |
eab89b90 RK |
20 | /* This file contains subroutines used only from the file reload1.c. |
21 | It knows how to scan one insn for operands and values | |
22 | that need to be copied into registers to make valid code. | |
23 | It also finds other operands and values which are valid | |
24 | but for which equivalent values in registers exist and | |
25 | ought to be used instead. | |
26 | ||
27 | Before processing the first insn of the function, call `init_reload'. | |
965ccc5a | 28 | init_reload actually has to be called earlier anyway. |
eab89b90 RK |
29 | |
30 | To scan an insn, call `find_reloads'. This does two things: | |
31 | 1. sets up tables describing which values must be reloaded | |
32 | for this insn, and what kind of hard regs they must be reloaded into; | |
33 | 2. optionally record the locations where those values appear in | |
34 | the data, so they can be replaced properly later. | |
35 | This is done only if the second arg to `find_reloads' is nonzero. | |
36 | ||
37 | The third arg to `find_reloads' specifies the number of levels | |
38 | of indirect addressing supported by the machine. If it is zero, | |
39 | indirect addressing is not valid. If it is one, (MEM (REG n)) | |
40 | is valid even if (REG n) did not get a hard register; if it is two, | |
41 | (MEM (MEM (REG n))) is also valid even if (REG n) did not get a | |
42 | hard register, and similarly for higher values. | |
43 | ||
44 | Then you must choose the hard regs to reload those pseudo regs into, | |
45 | and generate appropriate load insns before this insn and perhaps | |
46 | also store insns after this insn. Set up the array `reload_reg_rtx' | |
47 | to contain the REG rtx's for the registers you used. In some | |
48 | cases `find_reloads' will return a nonzero value in `reload_reg_rtx' | |
49 | for certain reloads. Then that tells you which register to use, | |
50 | so you do not need to allocate one. But you still do need to add extra | |
51 | instructions to copy the value into and out of that register. | |
52 | ||
53 | Finally you must call `subst_reloads' to substitute the reload reg rtx's | |
54 | into the locations already recorded. | |
55 | ||
56 | NOTE SIDE EFFECTS: | |
57 | ||
58 | find_reloads can alter the operands of the instruction it is called on. | |
59 | ||
60 | 1. Two operands of any sort may be interchanged, if they are in a | |
61 | commutative instruction. | |
62 | This happens only if find_reloads thinks the instruction will compile | |
63 | better that way. | |
64 | ||
65 | 2. Pseudo-registers that are equivalent to constants are replaced | |
66 | with those constants if they are not in hard registers. | |
67 | ||
68 | 1 happens every time find_reloads is called. | |
69 | 2 happens only when REPLACE is 1, which is only when | |
70 | actually doing the reloads, not when just counting them. | |
71 | ||
eab89b90 RK |
72 | Using a reload register for several reloads in one insn: |
73 | ||
74 | When an insn has reloads, it is considered as having three parts: | |
75 | the input reloads, the insn itself after reloading, and the output reloads. | |
76 | Reloads of values used in memory addresses are often needed for only one part. | |
77 | ||
78 | When this is so, reload_when_needed records which part needs the reload. | |
79 | Two reloads for different parts of the insn can share the same reload | |
80 | register. | |
81 | ||
82 | When a reload is used for addresses in multiple parts, or when it is | |
83 | an ordinary operand, it is classified as RELOAD_OTHER, and cannot share | |
84 | a register with any other reload. */ | |
85 | ||
86 | #define REG_OK_STRICT | |
87 | ||
0fa4cb7d SE |
88 | /* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */ |
89 | #undef DEBUG_RELOAD | |
90 | ||
eab89b90 | 91 | #include "config.h" |
670ee920 | 92 | #include "system.h" |
4977bab6 ZW |
93 | #include "coretypes.h" |
94 | #include "tm.h" | |
0cbd9993 | 95 | #include "rtl-error.h" |
6baf1cc8 | 96 | #include "tm_p.h" |
eab89b90 | 97 | #include "insn-config.h" |
e78d8e51 ZW |
98 | #include "expr.h" |
99 | #include "optabs.h" | |
eab89b90 | 100 | #include "recog.h" |
60393bbc AM |
101 | #include "dominance.h" |
102 | #include "cfg.h" | |
103 | #include "predict.h" | |
104 | #include "basic-block.h" | |
7a8cba34 | 105 | #include "df.h" |
eab89b90 RK |
106 | #include "reload.h" |
107 | #include "regs.h" | |
c4963a0a | 108 | #include "addresses.h" |
eab89b90 RK |
109 | #include "hard-reg-set.h" |
110 | #include "flags.h" | |
83685514 AM |
111 | #include "hashtab.h" |
112 | #include "hash-set.h" | |
113 | #include "vec.h" | |
114 | #include "machmode.h" | |
115 | #include "input.h" | |
49ad7cfa | 116 | #include "function.h" |
0bcf8261 | 117 | #include "params.h" |
34208acf | 118 | #include "target.h" |
ce18efcb | 119 | #include "ira.h" |
eab89b90 | 120 | |
fbbf66e7 RS |
121 | /* True if X is a constant that can be forced into the constant pool. |
122 | MODE is the mode of the operand, or VOIDmode if not known. */ | |
123 | #define CONST_POOL_OK_P(MODE, X) \ | |
124 | ((MODE) != VOIDmode \ | |
125 | && CONSTANT_P (X) \ | |
34208acf | 126 | && GET_CODE (X) != HIGH \ |
fbbf66e7 | 127 | && !targetm.cannot_force_const_mem (MODE, X)) |
e9840398 AO |
128 | |
129 | /* True if C is a non-empty register class that has too few registers | |
130 | to be safely used as a reload target class. */ | |
07b8f0a8 AS |
131 | |
132 | static inline bool | |
133 | small_register_class_p (reg_class_t rclass) | |
134 | { | |
135 | return (reg_class_size [(int) rclass] == 1 | |
136 | || (reg_class_size [(int) rclass] >= 1 | |
137 | && targetm.class_likely_spilled_p (rclass))); | |
138 | } | |
e9840398 | 139 | |
eab89b90 | 140 | \f |
eceef4c9 BS |
141 | /* All reloads of the current insn are recorded here. See reload.h for |
142 | comments. */ | |
eab89b90 | 143 | int n_reloads; |
eceef4c9 | 144 | struct reload rld[MAX_RELOADS]; |
eab89b90 RK |
145 | |
146 | /* All the "earlyclobber" operands of the current insn | |
147 | are recorded here. */ | |
148 | int n_earlyclobbers; | |
149 | rtx reload_earlyclobbers[MAX_RECOG_OPERANDS]; | |
150 | ||
a8c9daeb RK |
151 | int reload_n_operands; |
152 | ||
eab89b90 RK |
153 | /* Replacing reloads. |
154 | ||
155 | If `replace_reloads' is nonzero, then as each reload is recorded | |
156 | an entry is made for it in the table `replacements'. | |
157 | Then later `subst_reloads' can look through that table and | |
158 | perform all the replacements needed. */ | |
159 | ||
160 | /* Nonzero means record the places to replace. */ | |
161 | static int replace_reloads; | |
162 | ||
163 | /* Each replacement is recorded with a structure like this. */ | |
164 | struct replacement | |
165 | { | |
166 | rtx *where; /* Location to store in */ | |
eab89b90 | 167 | int what; /* which reload this is for */ |
ef4bddc2 | 168 | machine_mode mode; /* mode it must have */ |
eab89b90 RK |
169 | }; |
170 | ||
171 | static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)]; | |
172 | ||
173 | /* Number of replacements currently recorded. */ | |
174 | static int n_replacements; | |
175 | ||
a8c9daeb RK |
176 | /* Used to track what is modified by an operand. */ |
177 | struct decomposition | |
178 | { | |
0f41302f MS |
179 | int reg_flag; /* Nonzero if referencing a register. */ |
180 | int safe; /* Nonzero if this can't conflict with anything. */ | |
181 | rtx base; /* Base address for MEM. */ | |
182 | HOST_WIDE_INT start; /* Starting offset or register number. */ | |
2a6d5ce0 | 183 | HOST_WIDE_INT end; /* Ending offset or register number. */ |
a8c9daeb RK |
184 | }; |
185 | ||
0dadecf6 RK |
186 | #ifdef SECONDARY_MEMORY_NEEDED |
187 | ||
188 | /* Save MEMs needed to copy from one class of registers to another. One MEM | |
05d10675 | 189 | is used per mode, but normally only one or two modes are ever used. |
0dadecf6 | 190 | |
05d10675 | 191 | We keep two versions, before and after register elimination. The one |
a8c9daeb RK |
192 | after register elimination is record separately for each operand. This |
193 | is done in case the address is not valid to be sure that we separately | |
194 | reload each. */ | |
0dadecf6 RK |
195 | |
196 | static rtx secondary_memlocs[NUM_MACHINE_MODES]; | |
77545d45 | 197 | static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS]; |
048b0d2e | 198 | static int secondary_memlocs_elim_used = 0; |
0dadecf6 RK |
199 | #endif |
200 | ||
eab89b90 RK |
201 | /* The instruction we are doing reloads for; |
202 | so we can test whether a register dies in it. */ | |
5d86f5f9 | 203 | static rtx_insn *this_insn; |
eab89b90 RK |
204 | |
205 | /* Nonzero if this instruction is a user-specified asm with operands. */ | |
206 | static int this_insn_is_asm; | |
207 | ||
208 | /* If hard_regs_live_known is nonzero, | |
209 | we can tell which hard regs are currently live, | |
210 | at least enough to succeed in choosing dummy reloads. */ | |
211 | static int hard_regs_live_known; | |
212 | ||
213 | /* Indexed by hard reg number, | |
956d6950 | 214 | element is nonnegative if hard reg has been spilled. |
eab89b90 RK |
215 | This vector is passed to `find_reloads' as an argument |
216 | and is not changed here. */ | |
217 | static short *static_reload_reg_p; | |
218 | ||
219 | /* Set to 1 in subst_reg_equivs if it changes anything. */ | |
220 | static int subst_reg_equivs_changed; | |
221 | ||
222 | /* On return from push_reload, holds the reload-number for the OUT | |
223 | operand, which can be different for that from the input operand. */ | |
224 | static int output_reloadnum; | |
225 | ||
9ec7078b RK |
226 | /* Compare two RTX's. */ |
227 | #define MATCHES(x, y) \ | |
f8cfc6aa JQ |
228 | (x == y || (x != 0 && (REG_P (x) \ |
229 | ? REG_P (y) && REGNO (x) == REGNO (y) \ | |
9ec7078b RK |
230 | : rtx_equal_p (x, y) && ! side_effects_p (x)))) |
231 | ||
232 | /* Indicates if two reloads purposes are for similar enough things that we | |
233 | can merge their reloads. */ | |
234 | #define MERGABLE_RELOADS(when1, when2, op1, op2) \ | |
235 | ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \ | |
236 | || ((when1) == (when2) && (op1) == (op2)) \ | |
237 | || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \ | |
238 | || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \ | |
239 | && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \ | |
240 | || ((when1) == RELOAD_FOR_OTHER_ADDRESS \ | |
241 | && (when2) == RELOAD_FOR_OTHER_ADDRESS)) | |
242 | ||
243 | /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */ | |
244 | #define MERGE_TO_OTHER(when1, when2, op1, op2) \ | |
245 | ((when1) != (when2) \ | |
246 | || ! ((op1) == (op2) \ | |
247 | || (when1) == RELOAD_FOR_INPUT \ | |
248 | || (when1) == RELOAD_FOR_OPERAND_ADDRESS \ | |
249 | || (when1) == RELOAD_FOR_OTHER_ADDRESS)) | |
250 | ||
47c8cf91 ILT |
251 | /* If we are going to reload an address, compute the reload type to |
252 | use. */ | |
253 | #define ADDR_TYPE(type) \ | |
254 | ((type) == RELOAD_FOR_INPUT_ADDRESS \ | |
255 | ? RELOAD_FOR_INPADDR_ADDRESS \ | |
256 | : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \ | |
257 | ? RELOAD_FOR_OUTADDR_ADDRESS \ | |
258 | : (type))) | |
259 | ||
0c20a65f | 260 | static int push_secondary_reload (int, rtx, int, int, enum reg_class, |
ef4bddc2 | 261 | machine_mode, enum reload_type, |
8a99f6f9 | 262 | enum insn_code *, secondary_reload_info *); |
ef4bddc2 | 263 | static enum reg_class find_valid_class (machine_mode, machine_mode, |
e11ab33b | 264 | int, unsigned int); |
ef4bddc2 | 265 | static void push_replacement (rtx *, int, machine_mode); |
0c20a65f AJ |
266 | static void dup_replacements (rtx *, rtx *); |
267 | static void combine_reloads (void); | |
268 | static int find_reusable_reload (rtx *, rtx, enum reg_class, | |
269 | enum reload_type, int, int); | |
ef4bddc2 RS |
270 | static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode, |
271 | machine_mode, reg_class_t, int, int); | |
0c20a65f AJ |
272 | static int hard_reg_set_here_p (unsigned int, unsigned int, rtx); |
273 | static struct decomposition decompose (rtx); | |
274 | static int immune_p (rtx, rtx, struct decomposition); | |
1f7f6676 | 275 | static bool alternative_allows_const_pool_ref (rtx, const char *, int); |
5d86f5f9 DM |
276 | static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int, |
277 | rtx_insn *, int *); | |
0c20a65f | 278 | static rtx make_memloc (rtx, int); |
ef4bddc2 | 279 | static int maybe_memory_address_addr_space_p (machine_mode, rtx, |
09e881c9 | 280 | addr_space_t, rtx *); |
ef4bddc2 | 281 | static int find_reloads_address (machine_mode, rtx *, rtx, rtx *, |
5d86f5f9 DM |
282 | int, enum reload_type, int, rtx_insn *); |
283 | static rtx subst_reg_equivs (rtx, rtx_insn *); | |
0c20a65f | 284 | static rtx subst_indexed_address (rtx); |
5d86f5f9 | 285 | static void update_auto_inc_notes (rtx_insn *, int, int); |
ef4bddc2 | 286 | static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int, |
c4963a0a | 287 | enum rtx_code, enum rtx_code, rtx *, |
5d86f5f9 | 288 | int, enum reload_type,int, rtx_insn *); |
0c20a65f | 289 | static void find_reloads_address_part (rtx, rtx *, enum reg_class, |
ef4bddc2 | 290 | machine_mode, int, |
0c20a65f | 291 | enum reload_type, int); |
80de67b8 | 292 | static rtx find_reloads_subreg_address (rtx, int, enum reload_type, |
5d86f5f9 | 293 | int, rtx_insn *, int *); |
0c20a65f AJ |
294 | static void copy_replacements_1 (rtx *, rtx *, int); |
295 | static int find_inc_amount (rtx, rtx); | |
10015a27 KH |
296 | static int refers_to_mem_for_reload_p (rtx); |
297 | static int refers_to_regno_for_reload_p (unsigned int, unsigned int, | |
298 | rtx, rtx *); | |
3f1e3e70 AO |
299 | |
300 | /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the | |
301 | list yet. */ | |
302 | ||
303 | static void | |
304 | push_reg_equiv_alt_mem (int regno, rtx mem) | |
305 | { | |
306 | rtx it; | |
307 | ||
f2034d06 | 308 | for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1)) |
3f1e3e70 AO |
309 | if (rtx_equal_p (XEXP (it, 0), mem)) |
310 | return; | |
311 | ||
f2034d06 | 312 | reg_equiv_alt_mem_list (regno) |
3f1e3e70 | 313 | = alloc_EXPR_LIST (REG_EQUIV, mem, |
f2034d06 | 314 | reg_equiv_alt_mem_list (regno)); |
3f1e3e70 | 315 | } |
eab89b90 | 316 | \f |
eab89b90 | 317 | /* Determine if any secondary reloads are needed for loading (if IN_P is |
40f03658 | 318 | nonzero) or storing (if IN_P is zero) X to or from a reload register of |
9ec7078b RK |
319 | register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads |
320 | are needed, push them. | |
321 | ||
322 | Return the reload number of the secondary reload we made, or -1 if | |
323 | we didn't need one. *PICODE is set to the insn_code to use if we do | |
324 | need a secondary reload. */ | |
325 | ||
326 | static int | |
0c20a65f AJ |
327 | push_secondary_reload (int in_p, rtx x, int opnum, int optional, |
328 | enum reg_class reload_class, | |
ef4bddc2 | 329 | machine_mode reload_mode, enum reload_type type, |
8a99f6f9 | 330 | enum insn_code *picode, secondary_reload_info *prev_sri) |
eab89b90 | 331 | { |
55d796da | 332 | enum reg_class rclass = NO_REGS; |
8a99f6f9 | 333 | enum reg_class scratch_class; |
ef4bddc2 | 334 | machine_mode mode = reload_mode; |
eab89b90 | 335 | enum insn_code icode = CODE_FOR_nothing; |
65b4c337 | 336 | enum insn_code t_icode = CODE_FOR_nothing; |
d94d2abc | 337 | enum reload_type secondary_type; |
9ec7078b | 338 | int s_reload, t_reload = -1; |
8a99f6f9 | 339 | const char *scratch_constraint; |
8a99f6f9 | 340 | secondary_reload_info sri; |
9ec7078b | 341 | |
47c8cf91 ILT |
342 | if (type == RELOAD_FOR_INPUT_ADDRESS |
343 | || type == RELOAD_FOR_OUTPUT_ADDRESS | |
344 | || type == RELOAD_FOR_INPADDR_ADDRESS | |
345 | || type == RELOAD_FOR_OUTADDR_ADDRESS) | |
d94d2abc RK |
346 | secondary_type = type; |
347 | else | |
348 | secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS; | |
349 | ||
9ec7078b | 350 | *picode = CODE_FOR_nothing; |
eab89b90 | 351 | |
67340b03 RK |
352 | /* If X is a paradoxical SUBREG, use the inner value to determine both the |
353 | mode and object being reloaded. */ | |
6a4bdc79 | 354 | if (paradoxical_subreg_p (x)) |
67340b03 RK |
355 | { |
356 | x = SUBREG_REG (x); | |
357 | reload_mode = GET_MODE (x); | |
358 | } | |
359 | ||
d45cf215 RS |
360 | /* If X is a pseudo-register that has an equivalent MEM (actually, if it |
361 | is still a pseudo-register by now, it *must* have an equivalent MEM | |
362 | but we don't want to assume that), use that equivalent when seeing if | |
363 | a secondary reload is needed since whether or not a reload is needed | |
364 | might be sensitive to the form of the MEM. */ | |
365 | ||
f8cfc6aa | 366 | if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER |
f2034d06 JL |
367 | && reg_equiv_mem (REGNO (x))) |
368 | x = reg_equiv_mem (REGNO (x)); | |
d45cf215 | 369 | |
8a99f6f9 R |
370 | sri.icode = CODE_FOR_nothing; |
371 | sri.prev_sri = prev_sri; | |
a87cf97e JR |
372 | rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class, |
373 | reload_mode, &sri); | |
32e8bb8e | 374 | icode = (enum insn_code) sri.icode; |
eab89b90 | 375 | |
9ec7078b | 376 | /* If we don't need any secondary registers, done. */ |
55d796da | 377 | if (rclass == NO_REGS && icode == CODE_FOR_nothing) |
9ec7078b | 378 | return -1; |
eab89b90 | 379 | |
55d796da KG |
380 | if (rclass != NO_REGS) |
381 | t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass, | |
8a99f6f9 | 382 | reload_mode, type, &t_icode, &sri); |
eab89b90 | 383 | |
8a99f6f9 R |
384 | /* If we will be using an insn, the secondary reload is for a |
385 | scratch register. */ | |
eab89b90 RK |
386 | |
387 | if (icode != CODE_FOR_nothing) | |
388 | { | |
40f03658 | 389 | /* If IN_P is nonzero, the reload register will be the output in |
eab89b90 RK |
390 | operand 0. If IN_P is zero, the reload register will be the input |
391 | in operand 1. Outputs should have an initial "=", which we must | |
392 | skip. */ | |
393 | ||
8a99f6f9 R |
394 | /* ??? It would be useful to be able to handle only two, or more than |
395 | three, operands, but for now we can only handle the case of having | |
396 | exactly three: output, input and one temp/scratch. */ | |
397 | gcc_assert (insn_data[(int) icode].n_operands == 3); | |
398 | ||
399 | /* ??? We currently have no way to represent a reload that needs | |
6416ae7f | 400 | an icode to reload from an intermediate tertiary reload register. |
8a99f6f9 R |
401 | We should probably have a new field in struct reload to tag a |
402 | chain of scratch operand reloads onto. */ | |
55d796da | 403 | gcc_assert (rclass == NO_REGS); |
8a99f6f9 R |
404 | |
405 | scratch_constraint = insn_data[(int) icode].operand[2].constraint; | |
406 | gcc_assert (*scratch_constraint == '='); | |
407 | scratch_constraint++; | |
408 | if (*scratch_constraint == '&') | |
409 | scratch_constraint++; | |
8677664e RS |
410 | scratch_class = (reg_class_for_constraint |
411 | (lookup_constraint (scratch_constraint))); | |
8a99f6f9 | 412 | |
55d796da | 413 | rclass = scratch_class; |
8a99f6f9 | 414 | mode = insn_data[(int) icode].operand[2].mode; |
eab89b90 RK |
415 | } |
416 | ||
9ec7078b RK |
417 | /* This case isn't valid, so fail. Reload is allowed to use the same |
418 | register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but | |
419 | in the case of a secondary register, we actually need two different | |
420 | registers for correct code. We fail here to prevent the possibility of | |
421 | silently generating incorrect code later. | |
422 | ||
423 | The convention is that secondary input reloads are valid only if the | |
31989264 RH |
424 | secondary_class is different from class. If you have such a case, you |
425 | can not use secondary reloads, you must work around the problem some | |
426 | other way. | |
9ec7078b | 427 | |
94aaab7a RH |
428 | Allow this when a reload_in/out pattern is being used. I.e. assume |
429 | that the generated code handles this case. */ | |
9ec7078b | 430 | |
55d796da | 431 | gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing |
41374e13 | 432 | || t_icode != CODE_FOR_nothing); |
9ec7078b | 433 | |
9ec7078b RK |
434 | /* See if we can reuse an existing secondary reload. */ |
435 | for (s_reload = 0; s_reload < n_reloads; s_reload++) | |
eceef4c9 | 436 | if (rld[s_reload].secondary_p |
48c54229 KG |
437 | && (reg_class_subset_p (rclass, rld[s_reload].rclass) |
438 | || reg_class_subset_p (rld[s_reload].rclass, rclass)) | |
eceef4c9 BS |
439 | && ((in_p && rld[s_reload].inmode == mode) |
440 | || (! in_p && rld[s_reload].outmode == mode)) | |
441 | && ((in_p && rld[s_reload].secondary_in_reload == t_reload) | |
442 | || (! in_p && rld[s_reload].secondary_out_reload == t_reload)) | |
443 | && ((in_p && rld[s_reload].secondary_in_icode == t_icode) | |
444 | || (! in_p && rld[s_reload].secondary_out_icode == t_icode)) | |
07b8f0a8 | 445 | && (small_register_class_p (rclass) |
42db504c | 446 | || targetm.small_register_classes_for_mode_p (VOIDmode)) |
eceef4c9 BS |
447 | && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed, |
448 | opnum, rld[s_reload].opnum)) | |
9ec7078b RK |
449 | { |
450 | if (in_p) | |
eceef4c9 | 451 | rld[s_reload].inmode = mode; |
9ec7078b | 452 | if (! in_p) |
eceef4c9 | 453 | rld[s_reload].outmode = mode; |
9ec7078b | 454 | |
48c54229 KG |
455 | if (reg_class_subset_p (rclass, rld[s_reload].rclass)) |
456 | rld[s_reload].rclass = rclass; | |
9ec7078b | 457 | |
eceef4c9 BS |
458 | rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum); |
459 | rld[s_reload].optional &= optional; | |
460 | rld[s_reload].secondary_p = 1; | |
461 | if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed, | |
462 | opnum, rld[s_reload].opnum)) | |
463 | rld[s_reload].when_needed = RELOAD_OTHER; | |
c3be2598 JW |
464 | |
465 | break; | |
9ec7078b | 466 | } |
eab89b90 | 467 | |
9ec7078b RK |
468 | if (s_reload == n_reloads) |
469 | { | |
e9a25f70 JL |
470 | #ifdef SECONDARY_MEMORY_NEEDED |
471 | /* If we need a memory location to copy between the two reload regs, | |
472 | set it up now. Note that we do the input case before making | |
05d10675 | 473 | the reload and the output case after. This is due to the |
e9a25f70 JL |
474 | way reloads are output. */ |
475 | ||
476 | if (in_p && icode == CODE_FOR_nothing | |
55d796da | 477 | && SECONDARY_MEMORY_NEEDED (rclass, reload_class, mode)) |
6fe8aebc RH |
478 | { |
479 | get_secondary_mem (x, reload_mode, opnum, type); | |
480 | ||
481 | /* We may have just added new reloads. Make sure we add | |
482 | the new reload at the end. */ | |
483 | s_reload = n_reloads; | |
484 | } | |
e9a25f70 JL |
485 | #endif |
486 | ||
9ec7078b | 487 | /* We need to make a new secondary reload for this register class. */ |
eceef4c9 | 488 | rld[s_reload].in = rld[s_reload].out = 0; |
48c54229 | 489 | rld[s_reload].rclass = rclass; |
eceef4c9 BS |
490 | |
491 | rld[s_reload].inmode = in_p ? mode : VOIDmode; | |
492 | rld[s_reload].outmode = ! in_p ? mode : VOIDmode; | |
493 | rld[s_reload].reg_rtx = 0; | |
494 | rld[s_reload].optional = optional; | |
eceef4c9 | 495 | rld[s_reload].inc = 0; |
9ec7078b | 496 | /* Maybe we could combine these, but it seems too tricky. */ |
eceef4c9 BS |
497 | rld[s_reload].nocombine = 1; |
498 | rld[s_reload].in_reg = 0; | |
499 | rld[s_reload].out_reg = 0; | |
500 | rld[s_reload].opnum = opnum; | |
501 | rld[s_reload].when_needed = secondary_type; | |
502 | rld[s_reload].secondary_in_reload = in_p ? t_reload : -1; | |
503 | rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1; | |
504 | rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing; | |
505 | rld[s_reload].secondary_out_icode | |
9ec7078b | 506 | = ! in_p ? t_icode : CODE_FOR_nothing; |
eceef4c9 | 507 | rld[s_reload].secondary_p = 1; |
9ec7078b RK |
508 | |
509 | n_reloads++; | |
510 | ||
511 | #ifdef SECONDARY_MEMORY_NEEDED | |
9ec7078b | 512 | if (! in_p && icode == CODE_FOR_nothing |
55d796da | 513 | && SECONDARY_MEMORY_NEEDED (reload_class, rclass, mode)) |
f49e4127 | 514 | get_secondary_mem (x, mode, opnum, type); |
9ec7078b RK |
515 | #endif |
516 | } | |
517 | ||
518 | *picode = icode; | |
519 | return s_reload; | |
eab89b90 | 520 | } |
8a99f6f9 R |
521 | |
522 | /* If a secondary reload is needed, return its class. If both an intermediate | |
523 | register and a scratch register is needed, we return the class of the | |
524 | intermediate register. */ | |
faa832a7 | 525 | reg_class_t |
ef4bddc2 | 526 | secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode, |
faa832a7 | 527 | rtx x) |
8a99f6f9 R |
528 | { |
529 | enum insn_code icode; | |
530 | secondary_reload_info sri; | |
531 | ||
532 | sri.icode = CODE_FOR_nothing; | |
533 | sri.prev_sri = NULL; | |
a87cf97e JR |
534 | rclass |
535 | = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri); | |
32e8bb8e | 536 | icode = (enum insn_code) sri.icode; |
8a99f6f9 R |
537 | |
538 | /* If there are no secondary reloads at all, we return NO_REGS. | |
539 | If an intermediate register is needed, we return its class. */ | |
55d796da KG |
540 | if (icode == CODE_FOR_nothing || rclass != NO_REGS) |
541 | return rclass; | |
8a99f6f9 R |
542 | |
543 | /* No intermediate register is needed, but we have a special reload | |
544 | pattern, which we assume for now needs a scratch register. */ | |
545 | return scratch_reload_class (icode); | |
546 | } | |
547 | ||
548 | /* ICODE is the insn_code of a reload pattern. Check that it has exactly | |
549 | three operands, verify that operand 2 is an output operand, and return | |
550 | its register class. | |
551 | ??? We'd like to be able to handle any pattern with at least 2 operands, | |
552 | for zero or more scratch registers, but that needs more infrastructure. */ | |
553 | enum reg_class | |
554 | scratch_reload_class (enum insn_code icode) | |
555 | { | |
556 | const char *scratch_constraint; | |
55d796da | 557 | enum reg_class rclass; |
8a99f6f9 R |
558 | |
559 | gcc_assert (insn_data[(int) icode].n_operands == 3); | |
560 | scratch_constraint = insn_data[(int) icode].operand[2].constraint; | |
561 | gcc_assert (*scratch_constraint == '='); | |
562 | scratch_constraint++; | |
563 | if (*scratch_constraint == '&') | |
564 | scratch_constraint++; | |
777e635f | 565 | rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint)); |
55d796da KG |
566 | gcc_assert (rclass != NO_REGS); |
567 | return rclass; | |
8a99f6f9 | 568 | } |
eab89b90 | 569 | \f |
0dadecf6 RK |
570 | #ifdef SECONDARY_MEMORY_NEEDED |
571 | ||
05d10675 | 572 | /* Return a memory location that will be used to copy X in mode MODE. |
0dadecf6 RK |
573 | If we haven't already made a location for this mode in this insn, |
574 | call find_reloads_address on the location being returned. */ | |
575 | ||
576 | rtx | |
ef4bddc2 | 577 | get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode, |
0c20a65f | 578 | int opnum, enum reload_type type) |
0dadecf6 RK |
579 | { |
580 | rtx loc; | |
581 | int mem_valid; | |
582 | ||
64609742 RK |
583 | /* By default, if MODE is narrower than a word, widen it to a word. |
584 | This is required because most machines that require these memory | |
585 | locations do not support short load and stores from all registers | |
586 | (e.g., FP registers). */ | |
587 | ||
588 | #ifdef SECONDARY_MEMORY_NEEDED_MODE | |
589 | mode = SECONDARY_MEMORY_NEEDED_MODE (mode); | |
590 | #else | |
4f9e0766 | 591 | if (GET_MODE_BITSIZE (mode) < BITS_PER_WORD && INTEGRAL_MODE_P (mode)) |
0dadecf6 | 592 | mode = mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0); |
64609742 | 593 | #endif |
0dadecf6 | 594 | |
77545d45 RK |
595 | /* If we already have made a MEM for this operand in MODE, return it. */ |
596 | if (secondary_memlocs_elim[(int) mode][opnum] != 0) | |
597 | return secondary_memlocs_elim[(int) mode][opnum]; | |
0dadecf6 | 598 | |
05d10675 | 599 | /* If this is the first time we've tried to get a MEM for this mode, |
0dadecf6 RK |
600 | allocate a new one. `something_changed' in reload will get set |
601 | by noticing that the frame size has changed. */ | |
602 | ||
603 | if (secondary_memlocs[(int) mode] == 0) | |
b24a53d5 JW |
604 | { |
605 | #ifdef SECONDARY_MEMORY_NEEDED_RTX | |
606 | secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode); | |
607 | #else | |
608 | secondary_memlocs[(int) mode] | |
609 | = assign_stack_local (mode, GET_MODE_SIZE (mode), 0); | |
610 | #endif | |
611 | } | |
0dadecf6 RK |
612 | |
613 | /* Get a version of the address doing any eliminations needed. If that | |
614 | didn't give us a new MEM, make a new one if it isn't valid. */ | |
615 | ||
1914f5da | 616 | loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX); |
09e881c9 BE |
617 | mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0), |
618 | MEM_ADDR_SPACE (loc)); | |
0dadecf6 RK |
619 | |
620 | if (! mem_valid && loc == secondary_memlocs[(int) mode]) | |
621 | loc = copy_rtx (loc); | |
622 | ||
623 | /* The only time the call below will do anything is if the stack | |
624 | offset is too large. In that case IND_LEVELS doesn't matter, so we | |
a8c9daeb RK |
625 | can just pass a zero. Adjust the type to be the address of the |
626 | corresponding object. If the address was valid, save the eliminated | |
627 | address. If it wasn't valid, we need to make a reload each time, so | |
628 | don't save it. */ | |
0dadecf6 | 629 | |
a8c9daeb RK |
630 | if (! mem_valid) |
631 | { | |
632 | type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS | |
633 | : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS | |
634 | : RELOAD_OTHER); | |
8d618585 | 635 | |
57292ec3 | 636 | find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0), |
55c22565 | 637 | opnum, type, 0, 0); |
a8c9daeb | 638 | } |
0dadecf6 | 639 | |
77545d45 | 640 | secondary_memlocs_elim[(int) mode][opnum] = loc; |
98e5e087 JH |
641 | if (secondary_memlocs_elim_used <= (int)mode) |
642 | secondary_memlocs_elim_used = (int)mode + 1; | |
0dadecf6 RK |
643 | return loc; |
644 | } | |
645 | ||
646 | /* Clear any secondary memory locations we've made. */ | |
647 | ||
648 | void | |
0c20a65f | 649 | clear_secondary_mem (void) |
0dadecf6 | 650 | { |
703ad42b | 651 | memset (secondary_memlocs, 0, sizeof secondary_memlocs); |
0dadecf6 RK |
652 | } |
653 | #endif /* SECONDARY_MEMORY_NEEDED */ | |
654 | \f | |
e11ab33b DD |
655 | |
656 | /* Find the largest class which has at least one register valid in | |
657 | mode INNER, and which for every such register, that register number | |
658 | plus N is also valid in OUTER (if in range) and is cheap to move | |
0e61db61 | 659 | into REGNO. Such a class must exist. */ |
c6716840 RK |
660 | |
661 | static enum reg_class | |
ef4bddc2 RS |
662 | find_valid_class (machine_mode outer ATTRIBUTE_UNUSED, |
663 | machine_mode inner ATTRIBUTE_UNUSED, int n, | |
0c20a65f | 664 | unsigned int dest_regno ATTRIBUTE_UNUSED) |
c6716840 | 665 | { |
a65dc37d | 666 | int best_cost = -1; |
55d796da | 667 | int rclass; |
c6716840 | 668 | int regno; |
f428f252 | 669 | enum reg_class best_class = NO_REGS; |
4977bab6 | 670 | enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno); |
770ae6cc | 671 | unsigned int best_size = 0; |
b80cb6e2 | 672 | int cost; |
c6716840 | 673 | |
55d796da | 674 | for (rclass = 1; rclass < N_REG_CLASSES; rclass++) |
c6716840 RK |
675 | { |
676 | int bad = 0; | |
e11ab33b DD |
677 | int good = 0; |
678 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++) | |
55d796da | 679 | if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno)) |
e11ab33b DD |
680 | { |
681 | if (HARD_REGNO_MODE_OK (regno, inner)) | |
682 | { | |
683 | good = 1; | |
befe8647 JR |
684 | if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n) |
685 | && ! HARD_REGNO_MODE_OK (regno + n, outer)) | |
e11ab33b DD |
686 | bad = 1; |
687 | } | |
688 | } | |
c6716840 | 689 | |
e11ab33b | 690 | if (bad || !good) |
b80cb6e2 | 691 | continue; |
de8f4b07 | 692 | cost = register_move_cost (outer, (enum reg_class) rclass, dest_class); |
b80cb6e2 | 693 | |
55d796da | 694 | if ((reg_class_size[rclass] > best_size |
b80cb6e2 JH |
695 | && (best_cost < 0 || best_cost >= cost)) |
696 | || best_cost > cost) | |
a65dc37d | 697 | { |
32e8bb8e | 698 | best_class = (enum reg_class) rclass; |
55d796da | 699 | best_size = reg_class_size[rclass]; |
de8f4b07 | 700 | best_cost = register_move_cost (outer, (enum reg_class) rclass, |
bbbbb16a | 701 | dest_class); |
a65dc37d | 702 | } |
c6716840 RK |
703 | } |
704 | ||
41374e13 | 705 | gcc_assert (best_size != 0); |
c6716840 RK |
706 | |
707 | return best_class; | |
708 | } | |
82ad0aaf BS |
709 | |
710 | /* We are trying to reload a subreg of something that is not a register. | |
51e44392 | 711 | Find the largest class which contains only registers valid in |
82ad0aaf BS |
712 | mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in |
713 | which we would eventually like to obtain the object. */ | |
714 | ||
715 | static enum reg_class | |
ef4bddc2 RS |
716 | find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED, |
717 | machine_mode mode ATTRIBUTE_UNUSED, | |
82ad0aaf BS |
718 | enum reg_class dest_class ATTRIBUTE_UNUSED) |
719 | { | |
720 | int best_cost = -1; | |
721 | int rclass; | |
722 | int regno; | |
723 | enum reg_class best_class = NO_REGS; | |
724 | unsigned int best_size = 0; | |
725 | int cost; | |
726 | ||
727 | for (rclass = 1; rclass < N_REG_CLASSES; rclass++) | |
728 | { | |
729 | int bad = 0; | |
730 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER && !bad; regno++) | |
51e44392 BS |
731 | { |
732 | if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno) | |
733 | && !HARD_REGNO_MODE_OK (regno, mode)) | |
734 | bad = 1; | |
735 | } | |
736 | ||
82ad0aaf BS |
737 | if (bad) |
738 | continue; | |
739 | ||
740 | cost = register_move_cost (outer, (enum reg_class) rclass, dest_class); | |
741 | ||
742 | if ((reg_class_size[rclass] > best_size | |
743 | && (best_cost < 0 || best_cost >= cost)) | |
744 | || best_cost > cost) | |
745 | { | |
746 | best_class = (enum reg_class) rclass; | |
747 | best_size = reg_class_size[rclass]; | |
748 | best_cost = register_move_cost (outer, (enum reg_class) rclass, | |
749 | dest_class); | |
750 | } | |
751 | } | |
752 | ||
753 | gcc_assert (best_size != 0); | |
754 | ||
755 | #ifdef LIMIT_RELOAD_CLASS | |
756 | best_class = LIMIT_RELOAD_CLASS (mode, best_class); | |
757 | #endif | |
758 | return best_class; | |
759 | } | |
c6716840 | 760 | \f |
121315ea BS |
761 | /* Return the number of a previously made reload that can be combined with |
762 | a new one, or n_reloads if none of the existing reloads can be used. | |
55d796da | 763 | OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to |
121315ea BS |
764 | push_reload, they determine the kind of the new reload that we try to |
765 | combine. P_IN points to the corresponding value of IN, which can be | |
766 | modified by this function. | |
767 | DONT_SHARE is nonzero if we can't share any input-only reload for IN. */ | |
35fb60c4 | 768 | |
121315ea | 769 | static int |
55d796da | 770 | find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass, |
0c20a65f | 771 | enum reload_type type, int opnum, int dont_share) |
121315ea BS |
772 | { |
773 | rtx in = *p_in; | |
774 | int i; | |
775 | /* We can't merge two reloads if the output of either one is | |
776 | earlyclobbered. */ | |
777 | ||
778 | if (earlyclobber_operand_p (out)) | |
779 | return n_reloads; | |
780 | ||
781 | /* We can use an existing reload if the class is right | |
782 | and at least one of IN and OUT is a match | |
783 | and the other is at worst neutral. | |
05d10675 | 784 | (A zero compared against anything is neutral.) |
121315ea | 785 | |
42db504c SB |
786 | For targets with small register classes, don't use existing reloads |
787 | unless they are for the same thing since that can cause us to need | |
788 | more reload registers than we otherwise would. */ | |
05d10675 | 789 | |
121315ea | 790 | for (i = 0; i < n_reloads; i++) |
48c54229 KG |
791 | if ((reg_class_subset_p (rclass, rld[i].rclass) |
792 | || reg_class_subset_p (rld[i].rclass, rclass)) | |
121315ea | 793 | /* If the existing reload has a register, it must fit our class. */ |
eceef4c9 | 794 | && (rld[i].reg_rtx == 0 |
55d796da | 795 | || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], |
eceef4c9 BS |
796 | true_regnum (rld[i].reg_rtx))) |
797 | && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share | |
798 | && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out))) | |
799 | || (out != 0 && MATCHES (rld[i].out, out) | |
800 | && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in)))) | |
801 | && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out)) | |
07b8f0a8 | 802 | && (small_register_class_p (rclass) |
42db504c | 803 | || targetm.small_register_classes_for_mode_p (VOIDmode)) |
eceef4c9 | 804 | && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum)) |
121315ea BS |
805 | return i; |
806 | ||
807 | /* Reloading a plain reg for input can match a reload to postincrement | |
808 | that reg, since the postincrement's value is the right value. | |
809 | Likewise, it can match a preincrement reload, since we regard | |
810 | the preincrementation as happening before any ref in this insn | |
811 | to that register. */ | |
812 | for (i = 0; i < n_reloads; i++) | |
48c54229 KG |
813 | if ((reg_class_subset_p (rclass, rld[i].rclass) |
814 | || reg_class_subset_p (rld[i].rclass, rclass)) | |
121315ea BS |
815 | /* If the existing reload has a register, it must fit our |
816 | class. */ | |
eceef4c9 | 817 | && (rld[i].reg_rtx == 0 |
55d796da | 818 | || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], |
eceef4c9 BS |
819 | true_regnum (rld[i].reg_rtx))) |
820 | && out == 0 && rld[i].out == 0 && rld[i].in != 0 | |
f8cfc6aa | 821 | && ((REG_P (in) |
ec8e098d | 822 | && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC |
eceef4c9 | 823 | && MATCHES (XEXP (rld[i].in, 0), in)) |
f8cfc6aa | 824 | || (REG_P (rld[i].in) |
ec8e098d | 825 | && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC |
4b983fdc | 826 | && MATCHES (XEXP (in, 0), rld[i].in))) |
eceef4c9 | 827 | && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out)) |
07b8f0a8 | 828 | && (small_register_class_p (rclass) |
42db504c | 829 | || targetm.small_register_classes_for_mode_p (VOIDmode)) |
eceef4c9 BS |
830 | && MERGABLE_RELOADS (type, rld[i].when_needed, |
831 | opnum, rld[i].opnum)) | |
121315ea BS |
832 | { |
833 | /* Make sure reload_in ultimately has the increment, | |
834 | not the plain register. */ | |
f8cfc6aa | 835 | if (REG_P (in)) |
eceef4c9 | 836 | *p_in = rld[i].in; |
121315ea BS |
837 | return i; |
838 | } | |
839 | return n_reloads; | |
840 | } | |
841 | ||
d9d09ca2 EB |
842 | /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG |
843 | expression. MODE is the mode that X will be used in. OUTPUT is true if | |
844 | the function is invoked for the output part of an enclosing reload. */ | |
e6ea3b5f | 845 | |
d9d09ca2 | 846 | static bool |
ef4bddc2 | 847 | reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output) |
e6ea3b5f JL |
848 | { |
849 | rtx inner; | |
850 | ||
851 | /* Only SUBREGs are problematical. */ | |
852 | if (GET_CODE (x) != SUBREG) | |
d9d09ca2 | 853 | return false; |
e6ea3b5f JL |
854 | |
855 | inner = SUBREG_REG (x); | |
856 | ||
d9d09ca2 | 857 | /* If INNER is a constant or PLUS, then INNER will need reloading. */ |
35fb60c4 | 858 | if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS) |
d9d09ca2 | 859 | return true; |
e6ea3b5f | 860 | |
d9d09ca2 EB |
861 | /* If INNER is not a hard register, then INNER will not need reloading. */ |
862 | if (!(REG_P (inner) && HARD_REGISTER_P (inner))) | |
863 | return false; | |
e6ea3b5f JL |
864 | |
865 | /* If INNER is not ok for MODE, then INNER will need reloading. */ | |
d9d09ca2 EB |
866 | if (!HARD_REGNO_MODE_OK (subreg_regno (x), mode)) |
867 | return true; | |
868 | ||
869 | /* If this is for an output, and the outer part is a word or smaller, | |
870 | INNER is larger than a word and the number of registers in INNER is | |
871 | not the same as the number of words in INNER, then INNER will need | |
872 | reloading (with an in-out reload). */ | |
873 | return (output | |
874 | && GET_MODE_SIZE (mode) <= UNITS_PER_WORD | |
e6ea3b5f JL |
875 | && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD |
876 | && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD) | |
66fd46b6 | 877 | != (int) hard_regno_nregs[REGNO (inner)][GET_MODE (inner)])); |
e6ea3b5f JL |
878 | } |
879 | ||
35d6034b R |
880 | /* Return nonzero if IN can be reloaded into REGNO with mode MODE without |
881 | requiring an extra reload register. The caller has already found that | |
882 | IN contains some reference to REGNO, so check that we can produce the | |
883 | new value in a single step. E.g. if we have | |
884 | (set (reg r13) (plus (reg r13) (const int 1))), and there is an | |
885 | instruction that adds one to a register, this should succeed. | |
886 | However, if we have something like | |
887 | (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999 | |
888 | needs to be loaded into a register first, we need a separate reload | |
889 | register. | |
890 | Such PLUS reloads are generated by find_reload_address_part. | |
891 | The out-of-range PLUS expressions are usually introduced in the instruction | |
892 | patterns by register elimination and substituting pseudos without a home | |
893 | by their function-invariant equivalences. */ | |
894 | static int | |
ef4bddc2 | 895 | can_reload_into (rtx in, int regno, machine_mode mode) |
35d6034b | 896 | { |
647d790d DM |
897 | rtx dst; |
898 | rtx_insn *test_insn; | |
35d6034b | 899 | int r = 0; |
8da2e059 | 900 | struct recog_data_d save_recog_data; |
35d6034b R |
901 | |
902 | /* For matching constraints, we often get notional input reloads where | |
903 | we want to use the original register as the reload register. I.e. | |
904 | technically this is a non-optional input-output reload, but IN is | |
905 | already a valid register, and has been chosen as the reload register. | |
906 | Speed this up, since it trivially works. */ | |
f8cfc6aa | 907 | if (REG_P (in)) |
35d6034b R |
908 | return 1; |
909 | ||
910 | /* To test MEMs properly, we'd have to take into account all the reloads | |
911 | that are already scheduled, which can become quite complicated. | |
912 | And since we've already handled address reloads for this MEM, it | |
913 | should always succeed anyway. */ | |
3c0cb5de | 914 | if (MEM_P (in)) |
35d6034b R |
915 | return 1; |
916 | ||
917 | /* If we can make a simple SET insn that does the job, everything should | |
918 | be fine. */ | |
919 | dst = gen_rtx_REG (mode, regno); | |
920 | test_insn = make_insn_raw (gen_rtx_SET (VOIDmode, dst, in)); | |
921 | save_recog_data = recog_data; | |
922 | if (recog_memoized (test_insn) >= 0) | |
923 | { | |
924 | extract_insn (test_insn); | |
daca1a96 | 925 | r = constrain_operands (1, get_enabled_alternatives (test_insn)); |
35d6034b R |
926 | } |
927 | recog_data = save_recog_data; | |
928 | return r; | |
929 | } | |
930 | ||
a8c9daeb | 931 | /* Record one reload that needs to be performed. |
eab89b90 RK |
932 | IN is an rtx saying where the data are to be found before this instruction. |
933 | OUT says where they must be stored after the instruction. | |
934 | (IN is zero for data not read, and OUT is zero for data not written.) | |
935 | INLOC and OUTLOC point to the places in the instructions where | |
936 | IN and OUT were found. | |
40f03658 | 937 | If IN and OUT are both nonzero, it means the same register must be used |
a8c9daeb RK |
938 | to reload both IN and OUT. |
939 | ||
55d796da | 940 | RCLASS is a register class required for the reloaded data. |
eab89b90 RK |
941 | INMODE is the machine mode that the instruction requires |
942 | for the reg that replaces IN and OUTMODE is likewise for OUT. | |
943 | ||
944 | If IN is zero, then OUT's location and mode should be passed as | |
945 | INLOC and INMODE. | |
946 | ||
947 | STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx. | |
948 | ||
949 | OPTIONAL nonzero means this reload does not need to be performed: | |
950 | it can be discarded if that is more convenient. | |
951 | ||
a8c9daeb RK |
952 | OPNUM and TYPE say what the purpose of this reload is. |
953 | ||
eab89b90 RK |
954 | The return value is the reload-number for this reload. |
955 | ||
956 | If both IN and OUT are nonzero, in some rare cases we might | |
957 | want to make two separate reloads. (Actually we never do this now.) | |
958 | Therefore, the reload-number for OUT is stored in | |
959 | output_reloadnum when we return; the return value applies to IN. | |
960 | Usually (presently always), when IN and OUT are nonzero, | |
961 | the two reload-numbers are equal, but the caller should be careful to | |
962 | distinguish them. */ | |
963 | ||
aead1ca3 | 964 | int |
0c20a65f | 965 | push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc, |
ef4bddc2 RS |
966 | enum reg_class rclass, machine_mode inmode, |
967 | machine_mode outmode, int strict_low, int optional, | |
0c20a65f | 968 | int opnum, enum reload_type type) |
eab89b90 | 969 | { |
b3694847 | 970 | int i; |
eab89b90 | 971 | int dont_share = 0; |
74347d76 | 972 | int dont_remove_subreg = 0; |
de95483d | 973 | #ifdef LIMIT_RELOAD_CLASS |
eab89b90 | 974 | rtx *in_subreg_loc = 0, *out_subreg_loc = 0; |
de95483d | 975 | #endif |
9ec7078b | 976 | int secondary_in_reload = -1, secondary_out_reload = -1; |
a229128d RK |
977 | enum insn_code secondary_in_icode = CODE_FOR_nothing; |
978 | enum insn_code secondary_out_icode = CODE_FOR_nothing; | |
82ad0aaf BS |
979 | enum reg_class subreg_in_class ATTRIBUTE_UNUSED; |
980 | subreg_in_class = NO_REGS; | |
a8c9daeb | 981 | |
eab89b90 RK |
982 | /* INMODE and/or OUTMODE could be VOIDmode if no mode |
983 | has been specified for the operand. In that case, | |
984 | use the operand's mode as the mode to reload. */ | |
985 | if (inmode == VOIDmode && in != 0) | |
986 | inmode = GET_MODE (in); | |
987 | if (outmode == VOIDmode && out != 0) | |
988 | outmode = GET_MODE (out); | |
989 | ||
90d12f1f AK |
990 | /* If find_reloads and friends until now missed to replace a pseudo |
991 | with a constant of reg_equiv_constant something went wrong | |
992 | beforehand. | |
993 | Note that it can't simply be done here if we missed it earlier | |
994 | since the constant might need to be pushed into the literal pool | |
995 | and the resulting memref would probably need further | |
996 | reloading. */ | |
f8cfc6aa | 997 | if (in != 0 && REG_P (in)) |
eab89b90 | 998 | { |
b3694847 | 999 | int regno = REGNO (in); |
eab89b90 | 1000 | |
90d12f1f AK |
1001 | gcc_assert (regno < FIRST_PSEUDO_REGISTER |
1002 | || reg_renumber[regno] >= 0 | |
f2034d06 | 1003 | || reg_equiv_constant (regno) == NULL_RTX); |
eab89b90 RK |
1004 | } |
1005 | ||
90d12f1f AK |
1006 | /* reg_equiv_constant only contains constants which are obviously |
1007 | not appropriate as destination. So if we would need to replace | |
1008 | the destination pseudo with a constant we are in real | |
1009 | trouble. */ | |
f8cfc6aa | 1010 | if (out != 0 && REG_P (out)) |
eab89b90 | 1011 | { |
b3694847 | 1012 | int regno = REGNO (out); |
eab89b90 | 1013 | |
90d12f1f AK |
1014 | gcc_assert (regno < FIRST_PSEUDO_REGISTER |
1015 | || reg_renumber[regno] >= 0 | |
f2034d06 | 1016 | || reg_equiv_constant (regno) == NULL_RTX); |
eab89b90 RK |
1017 | } |
1018 | ||
1019 | /* If we have a read-write operand with an address side-effect, | |
1020 | change either IN or OUT so the side-effect happens only once. */ | |
3c0cb5de | 1021 | if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out)) |
f1ec5147 RK |
1022 | switch (GET_CODE (XEXP (in, 0))) |
1023 | { | |
1024 | case POST_INC: case POST_DEC: case POST_MODIFY: | |
1025 | in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0)); | |
1026 | break; | |
3bdf5ad1 | 1027 | |
f1ec5147 RK |
1028 | case PRE_INC: case PRE_DEC: case PRE_MODIFY: |
1029 | out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0)); | |
1030 | break; | |
3bdf5ad1 | 1031 | |
f1ec5147 RK |
1032 | default: |
1033 | break; | |
f4f4d0f8 | 1034 | } |
eab89b90 | 1035 | |
a61c98cf | 1036 | /* If we are reloading a (SUBREG constant ...), really reload just the |
ca769828 | 1037 | inside expression in its own mode. Similarly for (SUBREG (PLUS ...)). |
a61c98cf RK |
1038 | If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still |
1039 | a pseudo and hence will become a MEM) with M1 wider than M2 and the | |
1040 | register is a pseudo, also reload the inside expression. | |
f72ccbe6 | 1041 | For machines that extend byte loads, do this for any SUBREG of a pseudo |
486d8509 RK |
1042 | where both M1 and M2 are a word or smaller, M1 is wider than M2, and |
1043 | M2 is an integral mode that gets extended when loaded. | |
d9d09ca2 EB |
1044 | Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R |
1045 | where either M1 is not valid for R or M2 is wider than a word but we | |
1046 | only need one register to store an M2-sized quantity in R. | |
86c31b2d RS |
1047 | (However, if OUT is nonzero, we need to reload the reg *and* |
1048 | the subreg, so do nothing here, and let following statement handle it.) | |
1049 | ||
eab89b90 RK |
1050 | Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere; |
1051 | we can't handle it here because CONST_INT does not indicate a mode. | |
1052 | ||
1053 | Similarly, we must reload the inside expression if we have a | |
fa10beec | 1054 | STRICT_LOW_PART (presumably, in == out in this case). |
df62f951 RK |
1055 | |
1056 | Also reload the inner expression if it does not require a secondary | |
486d8509 RK |
1057 | reload but the SUBREG does. |
1058 | ||
1059 | Finally, reload the inner expression if it is a register that is in | |
1060 | the class whose registers cannot be referenced in a different size | |
b4905cba | 1061 | and M1 is not the same size as M2. If subreg_lowpart_p is false, we |
d030f4b2 | 1062 | cannot reload just the inside since we might end up with the wrong |
ab87f8c8 JL |
1063 | register class. But if it is inside a STRICT_LOW_PART, we have |
1064 | no choice, so we hope we do get the right register class there. */ | |
eab89b90 | 1065 | |
ab87f8c8 | 1066 | if (in != 0 && GET_CODE (in) == SUBREG |
b4905cba | 1067 | && (subreg_lowpart_p (in) || strict_low) |
cff9f8d5 | 1068 | #ifdef CANNOT_CHANGE_MODE_CLASS |
55d796da | 1069 | && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (in)), inmode, rclass) |
94bafba7 | 1070 | #endif |
4d41c2d1 | 1071 | && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (in))] |
a61c98cf | 1072 | && (CONSTANT_P (SUBREG_REG (in)) |
ca769828 | 1073 | || GET_CODE (SUBREG_REG (in)) == PLUS |
eab89b90 | 1074 | || strict_low |
f8cfc6aa | 1075 | || (((REG_P (SUBREG_REG (in)) |
a61c98cf | 1076 | && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER) |
3c0cb5de | 1077 | || MEM_P (SUBREG_REG (in))) |
6a4bdc79 BS |
1078 | && ((GET_MODE_PRECISION (inmode) |
1079 | > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in)))) | |
09bf0250 | 1080 | #ifdef LOAD_EXTEND_OP |
03b72c86 RK |
1081 | || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD |
1082 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) | |
1083 | <= UNITS_PER_WORD) | |
6a4bdc79 BS |
1084 | && (GET_MODE_PRECISION (inmode) |
1085 | > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in)))) | |
486d8509 | 1086 | && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (in))) |
f822d252 | 1087 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (in))) != UNKNOWN) |
d2c92f5a R |
1088 | #endif |
1089 | #ifdef WORD_REGISTER_OPERATIONS | |
6a4bdc79 BS |
1090 | || ((GET_MODE_PRECISION (inmode) |
1091 | < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (in)))) | |
d2c92f5a R |
1092 | && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD == |
1093 | ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1) | |
1094 | / UNITS_PER_WORD))) | |
f72ccbe6 | 1095 | #endif |
03b72c86 | 1096 | )) |
f8cfc6aa | 1097 | || (REG_P (SUBREG_REG (in)) |
a61c98cf | 1098 | && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER |
86c31b2d RS |
1099 | /* The case where out is nonzero |
1100 | is handled differently in the following statement. */ | |
b4905cba | 1101 | && (out == 0 || subreg_lowpart_p (in)) |
f72ccbe6 RK |
1102 | && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD |
1103 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) | |
1104 | > UNITS_PER_WORD) | |
1105 | && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) | |
1106 | / UNITS_PER_WORD) | |
66fd46b6 JH |
1107 | != (int) hard_regno_nregs[REGNO (SUBREG_REG (in))] |
1108 | [GET_MODE (SUBREG_REG (in))])) | |
ddef6bc7 | 1109 | || ! HARD_REGNO_MODE_OK (subreg_regno (in), inmode))) |
55d796da KG |
1110 | || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS |
1111 | && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)), | |
8a99f6f9 | 1112 | SUBREG_REG (in)) |
df62f951 | 1113 | == NO_REGS)) |
cff9f8d5 | 1114 | #ifdef CANNOT_CHANGE_MODE_CLASS |
f8cfc6aa | 1115 | || (REG_P (SUBREG_REG (in)) |
486d8509 | 1116 | && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER |
cff9f8d5 AH |
1117 | && REG_CANNOT_CHANGE_MODE_P |
1118 | (REGNO (SUBREG_REG (in)), GET_MODE (SUBREG_REG (in)), inmode)) | |
df62f951 RK |
1119 | #endif |
1120 | )) | |
eab89b90 | 1121 | { |
de95483d | 1122 | #ifdef LIMIT_RELOAD_CLASS |
eab89b90 | 1123 | in_subreg_loc = inloc; |
de95483d | 1124 | #endif |
eab89b90 RK |
1125 | inloc = &SUBREG_REG (in); |
1126 | in = *inloc; | |
d2c92f5a | 1127 | #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS) |
3c0cb5de | 1128 | if (MEM_P (in)) |
eab89b90 RK |
1129 | /* This is supposed to happen only for paradoxical subregs made by |
1130 | combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */ | |
41374e13 | 1131 | gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode)); |
e05a9da8 | 1132 | #endif |
eab89b90 RK |
1133 | inmode = GET_MODE (in); |
1134 | } | |
1135 | ||
d9d09ca2 EB |
1136 | /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R |
1137 | where M1 is not valid for R if it was not handled by the code above. | |
1138 | ||
1139 | Similar issue for (SUBREG constant ...) if it was not handled by the | |
1140 | code above. This can happen if SUBREG_BYTE != 0. | |
86c31b2d RS |
1141 | |
1142 | However, we must reload the inner reg *as well as* the subreg in | |
1143 | that case. */ | |
1144 | ||
d9d09ca2 | 1145 | if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false)) |
86c31b2d | 1146 | { |
f8cfc6aa | 1147 | if (REG_P (SUBREG_REG (in))) |
82ad0aaf | 1148 | subreg_in_class |
e11ab33b | 1149 | = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)), |
35fb60c4 RK |
1150 | subreg_regno_offset (REGNO (SUBREG_REG (in)), |
1151 | GET_MODE (SUBREG_REG (in)), | |
1152 | SUBREG_BYTE (in), | |
a65dc37d JH |
1153 | GET_MODE (in)), |
1154 | REGNO (SUBREG_REG (in))); | |
82ad0aaf BS |
1155 | else if (GET_CODE (SUBREG_REG (in)) == SYMBOL_REF) |
1156 | subreg_in_class = find_valid_class_1 (inmode, | |
1157 | GET_MODE (SUBREG_REG (in)), | |
1158 | rclass); | |
35fb60c4 | 1159 | |
c96d01ab RK |
1160 | /* This relies on the fact that emit_reload_insns outputs the |
1161 | instructions for input reloads of type RELOAD_OTHER in the same | |
1162 | order as the reloads. Thus if the outer reload is also of type | |
1163 | RELOAD_OTHER, we are guaranteed that this inner reload will be | |
1164 | output before the outer reload. */ | |
f4f4d0f8 | 1165 | push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0, |
82ad0aaf | 1166 | subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type); |
74347d76 | 1167 | dont_remove_subreg = 1; |
86c31b2d RS |
1168 | } |
1169 | ||
eab89b90 RK |
1170 | /* Similarly for paradoxical and problematical SUBREGs on the output. |
1171 | Note that there is no reason we need worry about the previous value | |
95d17cbf EB |
1172 | of SUBREG_REG (out); even if wider than out, storing in a subreg is |
1173 | entitled to clobber it all (except in the case of a word mode subreg | |
1174 | or of a STRICT_LOW_PART, in that latter case the constraint should | |
1175 | label it input-output.) */ | |
ab87f8c8 | 1176 | if (out != 0 && GET_CODE (out) == SUBREG |
b4905cba | 1177 | && (subreg_lowpart_p (out) || strict_low) |
cff9f8d5 | 1178 | #ifdef CANNOT_CHANGE_MODE_CLASS |
55d796da | 1179 | && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SUBREG_REG (out)), outmode, rclass) |
94bafba7 | 1180 | #endif |
4d41c2d1 | 1181 | && contains_reg_of_mode[(int) rclass][(int) GET_MODE (SUBREG_REG (out))] |
a61c98cf | 1182 | && (CONSTANT_P (SUBREG_REG (out)) |
eab89b90 | 1183 | || strict_low |
f8cfc6aa | 1184 | || (((REG_P (SUBREG_REG (out)) |
a61c98cf | 1185 | && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER) |
3c0cb5de | 1186 | || MEM_P (SUBREG_REG (out))) |
6a4bdc79 BS |
1187 | && ((GET_MODE_PRECISION (outmode) |
1188 | > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out)))) | |
1914f5da | 1189 | #ifdef WORD_REGISTER_OPERATIONS |
6a4bdc79 BS |
1190 | || ((GET_MODE_PRECISION (outmode) |
1191 | < GET_MODE_PRECISION (GET_MODE (SUBREG_REG (out)))) | |
6d49a073 JW |
1192 | && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD == |
1193 | ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1) | |
1194 | / UNITS_PER_WORD))) | |
1914f5da | 1195 | #endif |
05d10675 | 1196 | )) |
2286a26f EB |
1197 | || (REG_P (SUBREG_REG (out)) |
1198 | && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER | |
1199 | /* The case of a word mode subreg | |
1200 | is handled differently in the following statement. */ | |
1201 | && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD | |
1202 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) | |
1203 | > UNITS_PER_WORD)) | |
1204 | && ! HARD_REGNO_MODE_OK (subreg_regno (out), outmode)) | |
55d796da KG |
1205 | || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS |
1206 | && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)), | |
8a99f6f9 | 1207 | SUBREG_REG (out)) |
df62f951 | 1208 | == NO_REGS)) |
cff9f8d5 | 1209 | #ifdef CANNOT_CHANGE_MODE_CLASS |
f8cfc6aa | 1210 | || (REG_P (SUBREG_REG (out)) |
486d8509 | 1211 | && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER |
cff9f8d5 | 1212 | && REG_CANNOT_CHANGE_MODE_P (REGNO (SUBREG_REG (out)), |
0c20a65f | 1213 | GET_MODE (SUBREG_REG (out)), |
cff9f8d5 | 1214 | outmode)) |
df62f951 RK |
1215 | #endif |
1216 | )) | |
eab89b90 | 1217 | { |
de95483d | 1218 | #ifdef LIMIT_RELOAD_CLASS |
eab89b90 | 1219 | out_subreg_loc = outloc; |
de95483d | 1220 | #endif |
eab89b90 | 1221 | outloc = &SUBREG_REG (out); |
05d10675 | 1222 | out = *outloc; |
d2c92f5a | 1223 | #if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS) |
41374e13 NS |
1224 | gcc_assert (!MEM_P (out) |
1225 | || GET_MODE_SIZE (GET_MODE (out)) | |
1226 | <= GET_MODE_SIZE (outmode)); | |
e05a9da8 | 1227 | #endif |
eab89b90 RK |
1228 | outmode = GET_MODE (out); |
1229 | } | |
1230 | ||
d9d09ca2 EB |
1231 | /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R |
1232 | where either M1 is not valid for R or M2 is wider than a word but we | |
1233 | only need one register to store an M2-sized quantity in R. | |
74347d76 RK |
1234 | |
1235 | However, we must reload the inner reg *as well as* the subreg in | |
d9d09ca2 | 1236 | that case and the inner reg is an in-out reload. */ |
74347d76 | 1237 | |
d9d09ca2 | 1238 | if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true)) |
74347d76 | 1239 | { |
d9d09ca2 EB |
1240 | enum reg_class in_out_class |
1241 | = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)), | |
1242 | subreg_regno_offset (REGNO (SUBREG_REG (out)), | |
1243 | GET_MODE (SUBREG_REG (out)), | |
1244 | SUBREG_BYTE (out), | |
1245 | GET_MODE (out)), | |
1246 | REGNO (SUBREG_REG (out))); | |
1247 | ||
c96d01ab RK |
1248 | /* This relies on the fact that emit_reload_insns outputs the |
1249 | instructions for output reloads of type RELOAD_OTHER in reverse | |
1250 | order of the reloads. Thus if the outer reload is also of type | |
1251 | RELOAD_OTHER, we are guaranteed that this inner reload will be | |
1252 | output after the outer reload. */ | |
74347d76 | 1253 | push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out), |
d9d09ca2 EB |
1254 | &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode, |
1255 | 0, 0, opnum, RELOAD_OTHER); | |
1256 | dont_remove_subreg = 1; | |
74347d76 RK |
1257 | } |
1258 | ||
eab89b90 | 1259 | /* If IN appears in OUT, we can't share any input-only reload for IN. */ |
3c0cb5de | 1260 | if (in != 0 && out != 0 && MEM_P (out) |
10050f74 | 1261 | && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS) |
bfa30b22 | 1262 | && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0))) |
eab89b90 RK |
1263 | dont_share = 1; |
1264 | ||
0dadecf6 RK |
1265 | /* If IN is a SUBREG of a hard register, make a new REG. This |
1266 | simplifies some of the cases below. */ | |
1267 | ||
f8cfc6aa | 1268 | if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in)) |
74347d76 RK |
1269 | && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER |
1270 | && ! dont_remove_subreg) | |
ddef6bc7 | 1271 | in = gen_rtx_REG (GET_MODE (in), subreg_regno (in)); |
0dadecf6 RK |
1272 | |
1273 | /* Similarly for OUT. */ | |
1274 | if (out != 0 && GET_CODE (out) == SUBREG | |
f8cfc6aa | 1275 | && REG_P (SUBREG_REG (out)) |
74347d76 RK |
1276 | && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER |
1277 | && ! dont_remove_subreg) | |
ddef6bc7 | 1278 | out = gen_rtx_REG (GET_MODE (out), subreg_regno (out)); |
0dadecf6 | 1279 | |
eab89b90 RK |
1280 | /* Narrow down the class of register wanted if that is |
1281 | desirable on this machine for efficiency. */ | |
b5c82fa1 | 1282 | { |
abd26bfb | 1283 | reg_class_t preferred_class = rclass; |
b5c82fa1 PB |
1284 | |
1285 | if (in != 0) | |
abd26bfb | 1286 | preferred_class = targetm.preferred_reload_class (in, rclass); |
eab89b90 | 1287 | |
abd26bfb | 1288 | /* Output reloads may need analogous treatment, different in detail. */ |
b5c82fa1 | 1289 | if (out != 0) |
abd26bfb AS |
1290 | preferred_class |
1291 | = targetm.preferred_output_reload_class (out, preferred_class); | |
18a53b78 | 1292 | |
b5c82fa1 PB |
1293 | /* Discard what the target said if we cannot do it. */ |
1294 | if (preferred_class != NO_REGS | |
1295 | || (optional && type == RELOAD_FOR_OUTPUT)) | |
abd26bfb | 1296 | rclass = (enum reg_class) preferred_class; |
b5c82fa1 PB |
1297 | } |
1298 | ||
eab89b90 RK |
1299 | /* Make sure we use a class that can handle the actual pseudo |
1300 | inside any subreg. For example, on the 386, QImode regs | |
1301 | can appear within SImode subregs. Although GENERAL_REGS | |
1302 | can handle SImode, QImode needs a smaller class. */ | |
1303 | #ifdef LIMIT_RELOAD_CLASS | |
1304 | if (in_subreg_loc) | |
55d796da | 1305 | rclass = LIMIT_RELOAD_CLASS (inmode, rclass); |
eab89b90 | 1306 | else if (in != 0 && GET_CODE (in) == SUBREG) |
55d796da | 1307 | rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass); |
eab89b90 RK |
1308 | |
1309 | if (out_subreg_loc) | |
55d796da | 1310 | rclass = LIMIT_RELOAD_CLASS (outmode, rclass); |
eab89b90 | 1311 | if (out != 0 && GET_CODE (out) == SUBREG) |
55d796da | 1312 | rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass); |
eab89b90 RK |
1313 | #endif |
1314 | ||
eab89b90 RK |
1315 | /* Verify that this class is at least possible for the mode that |
1316 | is specified. */ | |
1317 | if (this_insn_is_asm) | |
1318 | { | |
ef4bddc2 | 1319 | machine_mode mode; |
eab89b90 RK |
1320 | if (GET_MODE_SIZE (inmode) > GET_MODE_SIZE (outmode)) |
1321 | mode = inmode; | |
1322 | else | |
1323 | mode = outmode; | |
5488078f RS |
1324 | if (mode == VOIDmode) |
1325 | { | |
971801ff JM |
1326 | error_for_asm (this_insn, "cannot reload integer constant " |
1327 | "operand in %<asm%>"); | |
5488078f RS |
1328 | mode = word_mode; |
1329 | if (in != 0) | |
1330 | inmode = word_mode; | |
1331 | if (out != 0) | |
1332 | outmode = word_mode; | |
1333 | } | |
eab89b90 RK |
1334 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
1335 | if (HARD_REGNO_MODE_OK (i, mode) | |
55d796da | 1336 | && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i)) |
09e18274 | 1337 | break; |
eab89b90 RK |
1338 | if (i == FIRST_PSEUDO_REGISTER) |
1339 | { | |
971801ff JM |
1340 | error_for_asm (this_insn, "impossible register constraint " |
1341 | "in %<asm%>"); | |
71156bcc JH |
1342 | /* Avoid further trouble with this insn. */ |
1343 | PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx); | |
1344 | /* We used to continue here setting class to ALL_REGS, but it triggers | |
1345 | sanity check on i386 for: | |
1346 | void foo(long double d) | |
1347 | { | |
1348 | asm("" :: "a" (d)); | |
1349 | } | |
1350 | Returning zero here ought to be safe as we take care in | |
1351 | find_reloads to not process the reloads when instruction was | |
1352 | replaced by USE. */ | |
b8698a0f | 1353 | |
71156bcc | 1354 | return 0; |
eab89b90 RK |
1355 | } |
1356 | } | |
1357 | ||
cb2afeb3 R |
1358 | /* Optional output reloads are always OK even if we have no register class, |
1359 | since the function of these reloads is only to have spill_reg_store etc. | |
1360 | set, so that the storing insn can be deleted later. */ | |
55d796da | 1361 | gcc_assert (rclass != NO_REGS |
41374e13 | 1362 | || (optional != 0 && type == RELOAD_FOR_OUTPUT)); |
5488078f | 1363 | |
55d796da | 1364 | i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share); |
eab89b90 RK |
1365 | |
1366 | if (i == n_reloads) | |
1367 | { | |
9ec7078b RK |
1368 | /* See if we need a secondary reload register to move between CLASS |
1369 | and IN or CLASS and OUT. Get the icode and push any required reloads | |
1370 | needed for each of them if so. */ | |
eab89b90 | 1371 | |
eab89b90 | 1372 | if (in != 0) |
9ec7078b | 1373 | secondary_in_reload |
55d796da | 1374 | = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type, |
8a99f6f9 | 1375 | &secondary_in_icode, NULL); |
eab89b90 | 1376 | if (out != 0 && GET_CODE (out) != SCRATCH) |
9ec7078b | 1377 | secondary_out_reload |
55d796da | 1378 | = push_secondary_reload (0, out, opnum, optional, rclass, outmode, |
8a99f6f9 | 1379 | type, &secondary_out_icode, NULL); |
eab89b90 RK |
1380 | |
1381 | /* We found no existing reload suitable for re-use. | |
1382 | So add an additional reload. */ | |
1383 | ||
e9a25f70 | 1384 | #ifdef SECONDARY_MEMORY_NEEDED |
82ad0aaf BS |
1385 | if (subreg_in_class == NO_REGS |
1386 | && in != 0 | |
71a9b19a RG |
1387 | && (REG_P (in) |
1388 | || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in)))) | |
82ad0aaf BS |
1389 | && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER) |
1390 | subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in)); | |
1391 | /* If a memory location is needed for the copy, make one. */ | |
1392 | if (subreg_in_class != NO_REGS | |
1393 | && SECONDARY_MEMORY_NEEDED (subreg_in_class, rclass, inmode)) | |
e9a25f70 JL |
1394 | get_secondary_mem (in, inmode, opnum, type); |
1395 | #endif | |
1396 | ||
9ec7078b | 1397 | i = n_reloads; |
eceef4c9 BS |
1398 | rld[i].in = in; |
1399 | rld[i].out = out; | |
48c54229 | 1400 | rld[i].rclass = rclass; |
eceef4c9 BS |
1401 | rld[i].inmode = inmode; |
1402 | rld[i].outmode = outmode; | |
1403 | rld[i].reg_rtx = 0; | |
1404 | rld[i].optional = optional; | |
eceef4c9 BS |
1405 | rld[i].inc = 0; |
1406 | rld[i].nocombine = 0; | |
1407 | rld[i].in_reg = inloc ? *inloc : 0; | |
1408 | rld[i].out_reg = outloc ? *outloc : 0; | |
1409 | rld[i].opnum = opnum; | |
1410 | rld[i].when_needed = type; | |
1411 | rld[i].secondary_in_reload = secondary_in_reload; | |
1412 | rld[i].secondary_out_reload = secondary_out_reload; | |
1413 | rld[i].secondary_in_icode = secondary_in_icode; | |
1414 | rld[i].secondary_out_icode = secondary_out_icode; | |
1415 | rld[i].secondary_p = 0; | |
eab89b90 RK |
1416 | |
1417 | n_reloads++; | |
0dadecf6 RK |
1418 | |
1419 | #ifdef SECONDARY_MEMORY_NEEDED | |
71a9b19a RG |
1420 | if (out != 0 |
1421 | && (REG_P (out) | |
1422 | || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out)))) | |
344b78b8 | 1423 | && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER |
55d796da | 1424 | && SECONDARY_MEMORY_NEEDED (rclass, |
344b78b8 | 1425 | REGNO_REG_CLASS (reg_or_subregno (out)), |
0dadecf6 | 1426 | outmode)) |
a8c9daeb | 1427 | get_secondary_mem (out, outmode, opnum, type); |
0dadecf6 | 1428 | #endif |
eab89b90 RK |
1429 | } |
1430 | else | |
1431 | { | |
1432 | /* We are reusing an existing reload, | |
1433 | but we may have additional information for it. | |
1434 | For example, we may now have both IN and OUT | |
1435 | while the old one may have just one of them. */ | |
1436 | ||
6fd5ac08 JW |
1437 | /* The modes can be different. If they are, we want to reload in |
1438 | the larger mode, so that the value is valid for both modes. */ | |
1439 | if (inmode != VOIDmode | |
eceef4c9 BS |
1440 | && GET_MODE_SIZE (inmode) > GET_MODE_SIZE (rld[i].inmode)) |
1441 | rld[i].inmode = inmode; | |
6fd5ac08 | 1442 | if (outmode != VOIDmode |
eceef4c9 BS |
1443 | && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (rld[i].outmode)) |
1444 | rld[i].outmode = outmode; | |
eab89b90 | 1445 | if (in != 0) |
cb2afeb3 | 1446 | { |
506b3b3a | 1447 | rtx in_reg = inloc ? *inloc : 0; |
cb2afeb3 R |
1448 | /* If we merge reloads for two distinct rtl expressions that |
1449 | are identical in content, there might be duplicate address | |
1450 | reloads. Remove the extra set now, so that if we later find | |
1451 | that we can inherit this reload, we can get rid of the | |
b838974e JL |
1452 | address reloads altogether. |
1453 | ||
1454 | Do not do this if both reloads are optional since the result | |
1455 | would be an optional reload which could potentially leave | |
1456 | unresolved address replacements. | |
1457 | ||
1458 | It is not sufficient to call transfer_replacements since | |
1459 | choose_reload_regs will remove the replacements for address | |
1460 | reloads of inherited reloads which results in the same | |
1461 | problem. */ | |
eceef4c9 BS |
1462 | if (rld[i].in != in && rtx_equal_p (in, rld[i].in) |
1463 | && ! (rld[i].optional && optional)) | |
cb2afeb3 R |
1464 | { |
1465 | /* We must keep the address reload with the lower operand | |
1466 | number alive. */ | |
eceef4c9 | 1467 | if (opnum > rld[i].opnum) |
cb2afeb3 R |
1468 | { |
1469 | remove_address_replacements (in); | |
eceef4c9 BS |
1470 | in = rld[i].in; |
1471 | in_reg = rld[i].in_reg; | |
cb2afeb3 R |
1472 | } |
1473 | else | |
eceef4c9 | 1474 | remove_address_replacements (rld[i].in); |
cb2afeb3 | 1475 | } |
46662f25 MM |
1476 | /* When emitting reloads we don't necessarily look at the in- |
1477 | and outmode, but also directly at the operands (in and out). | |
1478 | So we can't simply overwrite them with whatever we have found | |
1479 | for this (to-be-merged) reload, we have to "merge" that too. | |
1480 | Reusing another reload already verified that we deal with the | |
1481 | same operands, just possibly in different modes. So we | |
1482 | overwrite the operands only when the new mode is larger. | |
1483 | See also PR33613. */ | |
1484 | if (!rld[i].in | |
1485 | || GET_MODE_SIZE (GET_MODE (in)) | |
1486 | > GET_MODE_SIZE (GET_MODE (rld[i].in))) | |
1487 | rld[i].in = in; | |
1488 | if (!rld[i].in_reg | |
1489 | || (in_reg | |
1490 | && GET_MODE_SIZE (GET_MODE (in_reg)) | |
1491 | > GET_MODE_SIZE (GET_MODE (rld[i].in_reg)))) | |
1492 | rld[i].in_reg = in_reg; | |
cb2afeb3 | 1493 | } |
eab89b90 | 1494 | if (out != 0) |
cb2afeb3 | 1495 | { |
46662f25 MM |
1496 | if (!rld[i].out |
1497 | || (out | |
1498 | && GET_MODE_SIZE (GET_MODE (out)) | |
1499 | > GET_MODE_SIZE (GET_MODE (rld[i].out)))) | |
1500 | rld[i].out = out; | |
1501 | if (outloc | |
1502 | && (!rld[i].out_reg | |
1503 | || GET_MODE_SIZE (GET_MODE (*outloc)) | |
1504 | > GET_MODE_SIZE (GET_MODE (rld[i].out_reg)))) | |
1505 | rld[i].out_reg = *outloc; | |
cb2afeb3 | 1506 | } |
48c54229 KG |
1507 | if (reg_class_subset_p (rclass, rld[i].rclass)) |
1508 | rld[i].rclass = rclass; | |
eceef4c9 BS |
1509 | rld[i].optional &= optional; |
1510 | if (MERGE_TO_OTHER (type, rld[i].when_needed, | |
1511 | opnum, rld[i].opnum)) | |
1512 | rld[i].when_needed = RELOAD_OTHER; | |
1513 | rld[i].opnum = MIN (rld[i].opnum, opnum); | |
eab89b90 RK |
1514 | } |
1515 | ||
e0120d6e | 1516 | /* If the ostensible rtx being reloaded differs from the rtx found |
eab89b90 RK |
1517 | in the location to substitute, this reload is not safe to combine |
1518 | because we cannot reliably tell whether it appears in the insn. */ | |
1519 | ||
1520 | if (in != 0 && in != *inloc) | |
eceef4c9 | 1521 | rld[i].nocombine = 1; |
eab89b90 RK |
1522 | |
1523 | #if 0 | |
1524 | /* This was replaced by changes in find_reloads_address_1 and the new | |
1525 | function inc_for_reload, which go with a new meaning of reload_inc. */ | |
1526 | ||
1527 | /* If this is an IN/OUT reload in an insn that sets the CC, | |
1528 | it must be for an autoincrement. It doesn't work to store | |
1529 | the incremented value after the insn because that would clobber the CC. | |
1530 | So we must do the increment of the value reloaded from, | |
1531 | increment it, store it back, then decrement again. */ | |
1532 | if (out != 0 && sets_cc0_p (PATTERN (this_insn))) | |
1533 | { | |
1534 | out = 0; | |
eceef4c9 BS |
1535 | rld[i].out = 0; |
1536 | rld[i].inc = find_inc_amount (PATTERN (this_insn), in); | |
eab89b90 RK |
1537 | /* If we did not find a nonzero amount-to-increment-by, |
1538 | that contradicts the belief that IN is being incremented | |
1539 | in an address in this insn. */ | |
41374e13 | 1540 | gcc_assert (rld[i].inc != 0); |
eab89b90 RK |
1541 | } |
1542 | #endif | |
1543 | ||
1544 | /* If we will replace IN and OUT with the reload-reg, | |
1545 | record where they are located so that substitution need | |
1546 | not do a tree walk. */ | |
1547 | ||
1548 | if (replace_reloads) | |
1549 | { | |
1550 | if (inloc != 0) | |
1551 | { | |
b3694847 | 1552 | struct replacement *r = &replacements[n_replacements++]; |
eab89b90 | 1553 | r->what = i; |
eab89b90 RK |
1554 | r->where = inloc; |
1555 | r->mode = inmode; | |
1556 | } | |
1557 | if (outloc != 0 && outloc != inloc) | |
1558 | { | |
b3694847 | 1559 | struct replacement *r = &replacements[n_replacements++]; |
eab89b90 RK |
1560 | r->what = i; |
1561 | r->where = outloc; | |
eab89b90 RK |
1562 | r->mode = outmode; |
1563 | } | |
1564 | } | |
1565 | ||
1566 | /* If this reload is just being introduced and it has both | |
1567 | an incoming quantity and an outgoing quantity that are | |
1568 | supposed to be made to match, see if either one of the two | |
1569 | can serve as the place to reload into. | |
1570 | ||
eceef4c9 | 1571 | If one of them is acceptable, set rld[i].reg_rtx |
eab89b90 RK |
1572 | to that one. */ |
1573 | ||
eceef4c9 | 1574 | if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0) |
eab89b90 | 1575 | { |
eceef4c9 BS |
1576 | rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc, |
1577 | inmode, outmode, | |
48c54229 | 1578 | rld[i].rclass, i, |
eceef4c9 | 1579 | earlyclobber_operand_p (out)); |
eab89b90 RK |
1580 | |
1581 | /* If the outgoing register already contains the same value | |
1582 | as the incoming one, we can dispense with loading it. | |
1583 | The easiest way to tell the caller that is to give a phony | |
1584 | value for the incoming operand (same as outgoing one). */ | |
eceef4c9 | 1585 | if (rld[i].reg_rtx == out |
f8cfc6aa | 1586 | && (REG_P (in) || CONSTANT_P (in)) |
bbbbb16a | 1587 | && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out), |
eab89b90 | 1588 | static_reload_reg_p, i, inmode)) |
eceef4c9 | 1589 | rld[i].in = out; |
eab89b90 RK |
1590 | } |
1591 | ||
1592 | /* If this is an input reload and the operand contains a register that | |
1593 | dies in this insn and is used nowhere else, see if it is the right class | |
1594 | to be used for this reload. Use it if so. (This occurs most commonly | |
1595 | in the case of paradoxical SUBREGs and in-out reloads). We cannot do | |
1596 | this if it is also an output reload that mentions the register unless | |
1597 | the output is a SUBREG that clobbers an entire register. | |
1598 | ||
1599 | Note that the operand might be one of the spill regs, if it is a | |
1600 | pseudo reg and we are in a block where spilling has not taken place. | |
1601 | But if there is no spilling in this block, that is OK. | |
1602 | An explicitly used hard reg cannot be a spill reg. */ | |
1603 | ||
687b527d | 1604 | if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known) |
eab89b90 RK |
1605 | { |
1606 | rtx note; | |
1607 | int regno; | |
ef4bddc2 | 1608 | machine_mode rel_mode = inmode; |
d0b6af71 R |
1609 | |
1610 | if (out && GET_MODE_SIZE (outmode) > GET_MODE_SIZE (inmode)) | |
1611 | rel_mode = outmode; | |
eab89b90 RK |
1612 | |
1613 | for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1)) | |
1614 | if (REG_NOTE_KIND (note) == REG_DEAD | |
f8cfc6aa | 1615 | && REG_P (XEXP (note, 0)) |
eab89b90 RK |
1616 | && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER |
1617 | && reg_mentioned_p (XEXP (note, 0), in) | |
96cdfb52 | 1618 | /* Check that a former pseudo is valid; see find_dummy_reload. */ |
687b527d | 1619 | && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER |
fefa31b5 | 1620 | || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)), |
058e97ec | 1621 | ORIGINAL_REGNO (XEXP (note, 0))) |
96cdfb52 | 1622 | && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)) |
eab89b90 | 1623 | && ! refers_to_regno_for_reload_p (regno, |
09e18274 RS |
1624 | end_hard_regno (rel_mode, |
1625 | regno), | |
eab89b90 | 1626 | PATTERN (this_insn), inloc) |
05b4ec4f RS |
1627 | /* If this is also an output reload, IN cannot be used as |
1628 | the reload register if it is set in this insn unless IN | |
1629 | is also OUT. */ | |
1630 | && (out == 0 || in == out | |
1631 | || ! hard_reg_set_here_p (regno, | |
09e18274 | 1632 | end_hard_regno (rel_mode, regno), |
05b4ec4f RS |
1633 | PATTERN (this_insn))) |
1634 | /* ??? Why is this code so different from the previous? | |
1635 | Is there any simple coherent way to describe the two together? | |
1636 | What's going on here. */ | |
eab89b90 RK |
1637 | && (in != out |
1638 | || (GET_CODE (in) == SUBREG | |
1639 | && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1)) | |
1640 | / UNITS_PER_WORD) | |
1641 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) | |
1642 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))) | |
1643 | /* Make sure the operand fits in the reg that dies. */ | |
d0b6af71 R |
1644 | && (GET_MODE_SIZE (rel_mode) |
1645 | <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))) | |
eab89b90 | 1646 | && HARD_REGNO_MODE_OK (regno, inmode) |
96b42f4c | 1647 | && HARD_REGNO_MODE_OK (regno, outmode)) |
eab89b90 | 1648 | { |
db30db99 | 1649 | unsigned int offs; |
66fd46b6 JH |
1650 | unsigned int nregs = MAX (hard_regno_nregs[regno][inmode], |
1651 | hard_regno_nregs[regno][outmode]); | |
db30db99 | 1652 | |
96b42f4c BS |
1653 | for (offs = 0; offs < nregs; offs++) |
1654 | if (fixed_regs[regno + offs] | |
55d796da | 1655 | || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], |
96b42f4c BS |
1656 | regno + offs)) |
1657 | break; | |
1658 | ||
35d6034b R |
1659 | if (offs == nregs |
1660 | && (! (refers_to_regno_for_reload_p | |
09e18274 | 1661 | (regno, end_hard_regno (inmode, regno), in, (rtx *) 0)) |
35d6034b | 1662 | || can_reload_into (in, regno, inmode))) |
96b42f4c | 1663 | { |
d0b6af71 | 1664 | rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno); |
96b42f4c BS |
1665 | break; |
1666 | } | |
eab89b90 RK |
1667 | } |
1668 | } | |
1669 | ||
1670 | if (out) | |
1671 | output_reloadnum = i; | |
1672 | ||
1673 | return i; | |
1674 | } | |
1675 | ||
1676 | /* Record an additional place we must replace a value | |
1677 | for which we have already recorded a reload. | |
1678 | RELOADNUM is the value returned by push_reload | |
1679 | when the reload was recorded. | |
1680 | This is used in insn patterns that use match_dup. */ | |
1681 | ||
1682 | static void | |
ef4bddc2 | 1683 | push_replacement (rtx *loc, int reloadnum, machine_mode mode) |
eab89b90 RK |
1684 | { |
1685 | if (replace_reloads) | |
1686 | { | |
b3694847 | 1687 | struct replacement *r = &replacements[n_replacements++]; |
eab89b90 RK |
1688 | r->what = reloadnum; |
1689 | r->where = loc; | |
eab89b90 RK |
1690 | r->mode = mode; |
1691 | } | |
1692 | } | |
6cabe79e UW |
1693 | |
1694 | /* Duplicate any replacement we have recorded to apply at | |
1695 | location ORIG_LOC to also be performed at DUP_LOC. | |
1696 | This is used in insn patterns that use match_dup. */ | |
1697 | ||
1698 | static void | |
0c20a65f | 1699 | dup_replacements (rtx *dup_loc, rtx *orig_loc) |
6cabe79e UW |
1700 | { |
1701 | int i, n = n_replacements; | |
1702 | ||
1703 | for (i = 0; i < n; i++) | |
1704 | { | |
1705 | struct replacement *r = &replacements[i]; | |
1706 | if (r->where == orig_loc) | |
1707 | push_replacement (dup_loc, r->what, r->mode); | |
1708 | } | |
1709 | } | |
eab89b90 | 1710 | \f |
a8c9daeb RK |
1711 | /* Transfer all replacements that used to be in reload FROM to be in |
1712 | reload TO. */ | |
1713 | ||
1714 | void | |
0c20a65f | 1715 | transfer_replacements (int to, int from) |
a8c9daeb RK |
1716 | { |
1717 | int i; | |
1718 | ||
1719 | for (i = 0; i < n_replacements; i++) | |
1720 | if (replacements[i].what == from) | |
1721 | replacements[i].what = to; | |
1722 | } | |
1723 | \f | |
cb2afeb3 R |
1724 | /* IN_RTX is the value loaded by a reload that we now decided to inherit, |
1725 | or a subpart of it. If we have any replacements registered for IN_RTX, | |
1726 | cancel the reloads that were supposed to load them. | |
40f03658 | 1727 | Return nonzero if we canceled any reloads. */ |
cb2afeb3 | 1728 | int |
0c20a65f | 1729 | remove_address_replacements (rtx in_rtx) |
029b38ff R |
1730 | { |
1731 | int i, j; | |
cb2afeb3 R |
1732 | char reload_flags[MAX_RELOADS]; |
1733 | int something_changed = 0; | |
029b38ff | 1734 | |
961192e1 | 1735 | memset (reload_flags, 0, sizeof reload_flags); |
029b38ff R |
1736 | for (i = 0, j = 0; i < n_replacements; i++) |
1737 | { | |
cb2afeb3 R |
1738 | if (loc_mentioned_in_p (replacements[i].where, in_rtx)) |
1739 | reload_flags[replacements[i].what] |= 1; | |
1740 | else | |
1741 | { | |
1742 | replacements[j++] = replacements[i]; | |
1743 | reload_flags[replacements[i].what] |= 2; | |
1744 | } | |
1745 | } | |
1746 | /* Note that the following store must be done before the recursive calls. */ | |
1747 | n_replacements = j; | |
1748 | ||
1749 | for (i = n_reloads - 1; i >= 0; i--) | |
1750 | { | |
1751 | if (reload_flags[i] == 1) | |
1752 | { | |
1753 | deallocate_reload_reg (i); | |
eceef4c9 BS |
1754 | remove_address_replacements (rld[i].in); |
1755 | rld[i].in = 0; | |
cb2afeb3 R |
1756 | something_changed = 1; |
1757 | } | |
1758 | } | |
1759 | return something_changed; | |
1760 | } | |
029b38ff | 1761 | \f |
eab89b90 RK |
1762 | /* If there is only one output reload, and it is not for an earlyclobber |
1763 | operand, try to combine it with a (logically unrelated) input reload | |
1764 | to reduce the number of reload registers needed. | |
1765 | ||
1766 | This is safe if the input reload does not appear in | |
1767 | the value being output-reloaded, because this implies | |
1768 | it is not needed any more once the original insn completes. | |
1769 | ||
1770 | If that doesn't work, see we can use any of the registers that | |
1771 | die in this insn as a reload register. We can if it is of the right | |
1772 | class and does not appear in the value being output-reloaded. */ | |
1773 | ||
1774 | static void | |
0c20a65f | 1775 | combine_reloads (void) |
eab89b90 | 1776 | { |
96cdfb52 | 1777 | int i, regno; |
eab89b90 | 1778 | int output_reload = -1; |
8922eb5b | 1779 | int secondary_out = -1; |
eab89b90 RK |
1780 | rtx note; |
1781 | ||
1782 | /* Find the output reload; return unless there is exactly one | |
1783 | and that one is mandatory. */ | |
1784 | ||
1785 | for (i = 0; i < n_reloads; i++) | |
eceef4c9 | 1786 | if (rld[i].out != 0) |
eab89b90 RK |
1787 | { |
1788 | if (output_reload >= 0) | |
1789 | return; | |
1790 | output_reload = i; | |
1791 | } | |
1792 | ||
eceef4c9 | 1793 | if (output_reload < 0 || rld[output_reload].optional) |
eab89b90 RK |
1794 | return; |
1795 | ||
1796 | /* An input-output reload isn't combinable. */ | |
1797 | ||
eceef4c9 | 1798 | if (rld[output_reload].in != 0) |
eab89b90 RK |
1799 | return; |
1800 | ||
6dc42e49 | 1801 | /* If this reload is for an earlyclobber operand, we can't do anything. */ |
eceef4c9 | 1802 | if (earlyclobber_operand_p (rld[output_reload].out)) |
4644aad4 | 1803 | return; |
eab89b90 | 1804 | |
6b3736a1 | 1805 | /* If there is a reload for part of the address of this operand, we would |
9f5ed61a | 1806 | need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend |
6b3736a1 RK |
1807 | its life to the point where doing this combine would not lower the |
1808 | number of spill registers needed. */ | |
1809 | for (i = 0; i < n_reloads; i++) | |
1810 | if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS | |
1811 | || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) | |
1812 | && rld[i].opnum == rld[output_reload].opnum) | |
1813 | return; | |
1814 | ||
eab89b90 RK |
1815 | /* Check each input reload; can we combine it? */ |
1816 | ||
1817 | for (i = 0; i < n_reloads; i++) | |
eceef4c9 | 1818 | if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine |
eab89b90 | 1819 | /* Life span of this reload must not extend past main insn. */ |
eceef4c9 BS |
1820 | && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS |
1821 | && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS | |
1822 | && rld[i].when_needed != RELOAD_OTHER | |
a8c44c52 AS |
1823 | && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode] |
1824 | == ira_reg_class_max_nregs [(int) rld[output_reload].rclass] | |
1825 | [(int) rld[output_reload].outmode]) | |
eceef4c9 BS |
1826 | && rld[i].inc == 0 |
1827 | && rld[i].reg_rtx == 0 | |
a8c9daeb | 1828 | #ifdef SECONDARY_MEMORY_NEEDED |
9ec7078b RK |
1829 | /* Don't combine two reloads with different secondary |
1830 | memory locations. */ | |
eceef4c9 BS |
1831 | && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0 |
1832 | || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0 | |
1833 | || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum], | |
1834 | secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum])) | |
a8c9daeb | 1835 | #endif |
42db504c | 1836 | && (targetm.small_register_classes_for_mode_p (VOIDmode) |
48c54229 KG |
1837 | ? (rld[i].rclass == rld[output_reload].rclass) |
1838 | : (reg_class_subset_p (rld[i].rclass, | |
1839 | rld[output_reload].rclass) | |
1840 | || reg_class_subset_p (rld[output_reload].rclass, | |
1841 | rld[i].rclass))) | |
eceef4c9 | 1842 | && (MATCHES (rld[i].in, rld[output_reload].out) |
eab89b90 RK |
1843 | /* Args reversed because the first arg seems to be |
1844 | the one that we imagine being modified | |
1845 | while the second is the one that might be affected. */ | |
eceef4c9 BS |
1846 | || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out, |
1847 | rld[i].in) | |
eab89b90 RK |
1848 | /* However, if the input is a register that appears inside |
1849 | the output, then we also can't share. | |
1850 | Imagine (set (mem (reg 69)) (plus (reg 69) ...)). | |
1851 | If the same reload reg is used for both reg 69 and the | |
1852 | result to be stored in memory, then that result | |
1853 | will clobber the address of the memory ref. */ | |
f8cfc6aa | 1854 | && ! (REG_P (rld[i].in) |
eceef4c9 BS |
1855 | && reg_overlap_mentioned_for_reload_p (rld[i].in, |
1856 | rld[output_reload].out)))) | |
4bf1d92e JH |
1857 | && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode, |
1858 | rld[i].when_needed != RELOAD_FOR_INPUT) | |
48c54229 | 1859 | && (reg_class_size[(int) rld[i].rclass] |
42db504c | 1860 | || targetm.small_register_classes_for_mode_p (VOIDmode)) |
a8c9daeb RK |
1861 | /* We will allow making things slightly worse by combining an |
1862 | input and an output, but no worse than that. */ | |
eceef4c9 BS |
1863 | && (rld[i].when_needed == RELOAD_FOR_INPUT |
1864 | || rld[i].when_needed == RELOAD_FOR_OUTPUT)) | |
eab89b90 RK |
1865 | { |
1866 | int j; | |
1867 | ||
1868 | /* We have found a reload to combine with! */ | |
eceef4c9 BS |
1869 | rld[i].out = rld[output_reload].out; |
1870 | rld[i].out_reg = rld[output_reload].out_reg; | |
1871 | rld[i].outmode = rld[output_reload].outmode; | |
eab89b90 | 1872 | /* Mark the old output reload as inoperative. */ |
eceef4c9 | 1873 | rld[output_reload].out = 0; |
eab89b90 | 1874 | /* The combined reload is needed for the entire insn. */ |
eceef4c9 | 1875 | rld[i].when_needed = RELOAD_OTHER; |
0f41302f | 1876 | /* If the output reload had a secondary reload, copy it. */ |
eceef4c9 | 1877 | if (rld[output_reload].secondary_out_reload != -1) |
9ec7078b | 1878 | { |
eceef4c9 BS |
1879 | rld[i].secondary_out_reload |
1880 | = rld[output_reload].secondary_out_reload; | |
1881 | rld[i].secondary_out_icode | |
1882 | = rld[output_reload].secondary_out_icode; | |
9ec7078b RK |
1883 | } |
1884 | ||
a8c9daeb RK |
1885 | #ifdef SECONDARY_MEMORY_NEEDED |
1886 | /* Copy any secondary MEM. */ | |
eceef4c9 BS |
1887 | if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0) |
1888 | secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] | |
1889 | = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]; | |
a8c9daeb | 1890 | #endif |
0f41302f | 1891 | /* If required, minimize the register class. */ |
48c54229 KG |
1892 | if (reg_class_subset_p (rld[output_reload].rclass, |
1893 | rld[i].rclass)) | |
1894 | rld[i].rclass = rld[output_reload].rclass; | |
eab89b90 RK |
1895 | |
1896 | /* Transfer all replacements from the old reload to the combined. */ | |
1897 | for (j = 0; j < n_replacements; j++) | |
1898 | if (replacements[j].what == output_reload) | |
1899 | replacements[j].what = i; | |
1900 | ||
1901 | return; | |
1902 | } | |
1903 | ||
1904 | /* If this insn has only one operand that is modified or written (assumed | |
1905 | to be the first), it must be the one corresponding to this reload. It | |
1906 | is safe to use anything that dies in this insn for that output provided | |
1907 | that it does not occur in the output (we already know it isn't an | |
1908 | earlyclobber. If this is an asm insn, give up. */ | |
1909 | ||
1f9a0ec2 | 1910 | if (INSN_CODE (this_insn) == -1) |
eab89b90 RK |
1911 | return; |
1912 | ||
a995e389 RH |
1913 | for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++) |
1914 | if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '=' | |
1915 | || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+') | |
eab89b90 RK |
1916 | return; |
1917 | ||
1918 | /* See if some hard register that dies in this insn and is not used in | |
1919 | the output is the right class. Only works if the register we pick | |
1920 | up can fully hold our output reload. */ | |
1921 | for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1)) | |
1922 | if (REG_NOTE_KIND (note) == REG_DEAD | |
f8cfc6aa | 1923 | && REG_P (XEXP (note, 0)) |
96cdfb52 EB |
1924 | && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0), |
1925 | rld[output_reload].out) | |
1926 | && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER | |
1927 | && HARD_REGNO_MODE_OK (regno, rld[output_reload].outmode) | |
48c54229 | 1928 | && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass], |
96cdfb52 EB |
1929 | regno) |
1930 | && (hard_regno_nregs[regno][rld[output_reload].outmode] | |
1931 | <= hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))]) | |
8922eb5b RK |
1932 | /* Ensure that a secondary or tertiary reload for this output |
1933 | won't want this register. */ | |
eceef4c9 | 1934 | && ((secondary_out = rld[output_reload].secondary_out_reload) == -1 |
96cdfb52 | 1935 | || (!(TEST_HARD_REG_BIT |
48c54229 | 1936 | (reg_class_contents[(int) rld[secondary_out].rclass], regno)) |
eceef4c9 | 1937 | && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1 |
96cdfb52 | 1938 | || !(TEST_HARD_REG_BIT |
48c54229 | 1939 | (reg_class_contents[(int) rld[secondary_out].rclass], |
96cdfb52 EB |
1940 | regno))))) |
1941 | && !fixed_regs[regno] | |
1942 | /* Check that a former pseudo is valid; see find_dummy_reload. */ | |
c1673e1b | 1943 | && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER |
fefa31b5 | 1944 | || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)), |
96cdfb52 EB |
1945 | ORIGINAL_REGNO (XEXP (note, 0))) |
1946 | && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))) | |
eab89b90 | 1947 | { |
eceef4c9 | 1948 | rld[output_reload].reg_rtx |
96cdfb52 | 1949 | = gen_rtx_REG (rld[output_reload].outmode, regno); |
eab89b90 RK |
1950 | return; |
1951 | } | |
1952 | } | |
1953 | \f | |
1954 | /* Try to find a reload register for an in-out reload (expressions IN and OUT). | |
1955 | See if one of IN and OUT is a register that may be used; | |
1956 | this is desirable since a spill-register won't be needed. | |
1957 | If so, return the register rtx that proves acceptable. | |
1958 | ||
1959 | INLOC and OUTLOC are locations where IN and OUT appear in the insn. | |
55d796da | 1960 | RCLASS is the register class required for the reload. |
eab89b90 RK |
1961 | |
1962 | If FOR_REAL is >= 0, it is the number of the reload, | |
1963 | and in some cases when it can be discovered that OUT doesn't need | |
eceef4c9 | 1964 | to be computed, clear out rld[FOR_REAL].out. |
eab89b90 RK |
1965 | |
1966 | If FOR_REAL is -1, this should not be done, because this call | |
189086f9 RK |
1967 | is just to see if a register can be found, not to find and install it. |
1968 | ||
40f03658 | 1969 | EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This |
189086f9 RK |
1970 | puts an additional constraint on being able to use IN for OUT since |
1971 | IN must not appear elsewhere in the insn (it is assumed that IN itself | |
1972 | is safe from the earlyclobber). */ | |
eab89b90 RK |
1973 | |
1974 | static rtx | |
0c20a65f | 1975 | find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc, |
ef4bddc2 | 1976 | machine_mode inmode, machine_mode outmode, |
abd26bfb | 1977 | reg_class_t rclass, int for_real, int earlyclobber) |
eab89b90 RK |
1978 | { |
1979 | rtx in = real_in; | |
1980 | rtx out = real_out; | |
1981 | int in_offset = 0; | |
1982 | int out_offset = 0; | |
1983 | rtx value = 0; | |
1984 | ||
1985 | /* If operands exceed a word, we can't use either of them | |
1986 | unless they have the same size. */ | |
36b50568 RS |
1987 | if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode) |
1988 | && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD | |
1989 | || GET_MODE_SIZE (inmode) > UNITS_PER_WORD)) | |
eab89b90 RK |
1990 | return 0; |
1991 | ||
ddef6bc7 JJ |
1992 | /* Note that {in,out}_offset are needed only when 'in' or 'out' |
1993 | respectively refers to a hard register. */ | |
1994 | ||
eab89b90 RK |
1995 | /* Find the inside of any subregs. */ |
1996 | while (GET_CODE (out) == SUBREG) | |
1997 | { | |
f8cfc6aa | 1998 | if (REG_P (SUBREG_REG (out)) |
ddef6bc7 JJ |
1999 | && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER) |
2000 | out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)), | |
2001 | GET_MODE (SUBREG_REG (out)), | |
2002 | SUBREG_BYTE (out), | |
2003 | GET_MODE (out)); | |
eab89b90 RK |
2004 | out = SUBREG_REG (out); |
2005 | } | |
2006 | while (GET_CODE (in) == SUBREG) | |
2007 | { | |
f8cfc6aa | 2008 | if (REG_P (SUBREG_REG (in)) |
ddef6bc7 JJ |
2009 | && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER) |
2010 | in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)), | |
2011 | GET_MODE (SUBREG_REG (in)), | |
2012 | SUBREG_BYTE (in), | |
2013 | GET_MODE (in)); | |
eab89b90 RK |
2014 | in = SUBREG_REG (in); |
2015 | } | |
2016 | ||
2017 | /* Narrow down the reg class, the same way push_reload will; | |
2018 | otherwise we might find a dummy now, but push_reload won't. */ | |
b5c82fa1 | 2019 | { |
fba42e24 | 2020 | reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass); |
e17e34d8 | 2021 | if (preferred_class != NO_REGS) |
fba42e24 | 2022 | rclass = (enum reg_class) preferred_class; |
b5c82fa1 | 2023 | } |
eab89b90 RK |
2024 | |
2025 | /* See if OUT will do. */ | |
f8cfc6aa | 2026 | if (REG_P (out) |
eab89b90 RK |
2027 | && REGNO (out) < FIRST_PSEUDO_REGISTER) |
2028 | { | |
770ae6cc | 2029 | unsigned int regno = REGNO (out) + out_offset; |
66fd46b6 | 2030 | unsigned int nwords = hard_regno_nregs[regno][outmode]; |
d3b9996a | 2031 | rtx saved_rtx; |
eab89b90 RK |
2032 | |
2033 | /* When we consider whether the insn uses OUT, | |
2034 | ignore references within IN. They don't prevent us | |
2035 | from copying IN into OUT, because those refs would | |
2036 | move into the insn that reloads IN. | |
2037 | ||
2038 | However, we only ignore IN in its role as this reload. | |
2039 | If the insn uses IN elsewhere and it contains OUT, | |
2040 | that counts. We can't be sure it's the "same" operand | |
0290430b RE |
2041 | so it might not go through this reload. |
2042 | ||
2043 | We also need to avoid using OUT if it, or part of it, is a | |
2044 | fixed register. Modifying such registers, even transiently, | |
2045 | may have undefined effects on the machine, such as modifying | |
2046 | the stack pointer. */ | |
d3b9996a | 2047 | saved_rtx = *inloc; |
eab89b90 RK |
2048 | *inloc = const0_rtx; |
2049 | ||
2050 | if (regno < FIRST_PSEUDO_REGISTER | |
05fa709d | 2051 | && HARD_REGNO_MODE_OK (regno, outmode) |
eab89b90 RK |
2052 | && ! refers_to_regno_for_reload_p (regno, regno + nwords, |
2053 | PATTERN (this_insn), outloc)) | |
2054 | { | |
770ae6cc RK |
2055 | unsigned int i; |
2056 | ||
eab89b90 | 2057 | for (i = 0; i < nwords; i++) |
55d796da | 2058 | if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], |
0290430b RE |
2059 | regno + i) |
2060 | || fixed_regs[regno + i]) | |
eab89b90 RK |
2061 | break; |
2062 | ||
2063 | if (i == nwords) | |
2064 | { | |
f8cfc6aa | 2065 | if (REG_P (real_out)) |
eab89b90 RK |
2066 | value = real_out; |
2067 | else | |
38a448ca | 2068 | value = gen_rtx_REG (outmode, regno); |
eab89b90 RK |
2069 | } |
2070 | } | |
2071 | ||
d3b9996a | 2072 | *inloc = saved_rtx; |
eab89b90 RK |
2073 | } |
2074 | ||
2075 | /* Consider using IN if OUT was not acceptable | |
2076 | or if OUT dies in this insn (like the quotient in a divmod insn). | |
2077 | We can't use IN unless it is dies in this insn, | |
2078 | which means we must know accurately which hard regs are live. | |
189086f9 RK |
2079 | Also, the result can't go in IN if IN is used within OUT, |
2080 | or if OUT is an earlyclobber and IN appears elsewhere in the insn. */ | |
eab89b90 | 2081 | if (hard_regs_live_known |
f8cfc6aa | 2082 | && REG_P (in) |
eab89b90 RK |
2083 | && REGNO (in) < FIRST_PSEUDO_REGISTER |
2084 | && (value == 0 | |
2085 | || find_reg_note (this_insn, REG_UNUSED, real_out)) | |
2086 | && find_reg_note (this_insn, REG_DEAD, real_in) | |
2087 | && !fixed_regs[REGNO (in)] | |
36b50568 RS |
2088 | && HARD_REGNO_MODE_OK (REGNO (in), |
2089 | /* The only case where out and real_out might | |
2090 | have different modes is where real_out | |
2091 | is a subreg, and in that case, out | |
2092 | has a real mode. */ | |
2093 | (GET_MODE (out) != VOIDmode | |
687b527d | 2094 | ? GET_MODE (out) : outmode)) |
687b527d | 2095 | && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER |
96cdfb52 EB |
2096 | /* However only do this if we can be sure that this input |
2097 | operand doesn't correspond with an uninitialized pseudo. | |
2098 | global can assign some hardreg to it that is the same as | |
2099 | the one assigned to a different, also live pseudo (as it | |
2100 | can ignore the conflict). We must never introduce writes | |
2101 | to such hardregs, as they would clobber the other live | |
2102 | pseudo. See PR 20973. */ | |
fefa31b5 | 2103 | || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)), |
96cdfb52 EB |
2104 | ORIGINAL_REGNO (in)) |
2105 | /* Similarly, only do this if we can be sure that the death | |
2106 | note is still valid. global can assign some hardreg to | |
2107 | the pseudo referenced in the note and simultaneously a | |
2108 | subword of this hardreg to a different, also live pseudo, | |
2109 | because only another subword of the hardreg is actually | |
2110 | used in the insn. This cannot happen if the pseudo has | |
2111 | been assigned exactly one hardreg. See PR 33732. */ | |
2112 | && hard_regno_nregs[REGNO (in)][GET_MODE (in)] == 1))) | |
eab89b90 | 2113 | { |
770ae6cc | 2114 | unsigned int regno = REGNO (in) + in_offset; |
66fd46b6 | 2115 | unsigned int nwords = hard_regno_nregs[regno][inmode]; |
eab89b90 | 2116 | |
f4f4d0f8 | 2117 | if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0) |
eab89b90 | 2118 | && ! hard_reg_set_here_p (regno, regno + nwords, |
189086f9 RK |
2119 | PATTERN (this_insn)) |
2120 | && (! earlyclobber | |
2121 | || ! refers_to_regno_for_reload_p (regno, regno + nwords, | |
2122 | PATTERN (this_insn), inloc))) | |
eab89b90 | 2123 | { |
770ae6cc RK |
2124 | unsigned int i; |
2125 | ||
eab89b90 | 2126 | for (i = 0; i < nwords; i++) |
55d796da | 2127 | if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass], |
eab89b90 RK |
2128 | regno + i)) |
2129 | break; | |
2130 | ||
2131 | if (i == nwords) | |
2132 | { | |
2133 | /* If we were going to use OUT as the reload reg | |
2134 | and changed our mind, it means OUT is a dummy that | |
2135 | dies here. So don't bother copying value to it. */ | |
2136 | if (for_real >= 0 && value == real_out) | |
eceef4c9 | 2137 | rld[for_real].out = 0; |
f8cfc6aa | 2138 | if (REG_P (real_in)) |
eab89b90 RK |
2139 | value = real_in; |
2140 | else | |
38a448ca | 2141 | value = gen_rtx_REG (inmode, regno); |
eab89b90 RK |
2142 | } |
2143 | } | |
2144 | } | |
2145 | ||
2146 | return value; | |
2147 | } | |
2148 | \f | |
2149 | /* This page contains subroutines used mainly for determining | |
2150 | whether the IN or an OUT of a reload can serve as the | |
2151 | reload register. */ | |
2152 | ||
4644aad4 RK |
2153 | /* Return 1 if X is an operand of an insn that is being earlyclobbered. */ |
2154 | ||
09a308fe | 2155 | int |
0c20a65f | 2156 | earlyclobber_operand_p (rtx x) |
4644aad4 RK |
2157 | { |
2158 | int i; | |
2159 | ||
2160 | for (i = 0; i < n_earlyclobbers; i++) | |
2161 | if (reload_earlyclobbers[i] == x) | |
2162 | return 1; | |
2163 | ||
2164 | return 0; | |
2165 | } | |
2166 | ||
eab89b90 RK |
2167 | /* Return 1 if expression X alters a hard reg in the range |
2168 | from BEG_REGNO (inclusive) to END_REGNO (exclusive), | |
2169 | either explicitly or in the guise of a pseudo-reg allocated to REGNO. | |
2170 | X should be the body of an instruction. */ | |
2171 | ||
2172 | static int | |
0c20a65f | 2173 | hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x) |
eab89b90 RK |
2174 | { |
2175 | if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) | |
2176 | { | |
b3694847 | 2177 | rtx op0 = SET_DEST (x); |
770ae6cc | 2178 | |
eab89b90 RK |
2179 | while (GET_CODE (op0) == SUBREG) |
2180 | op0 = SUBREG_REG (op0); | |
f8cfc6aa | 2181 | if (REG_P (op0)) |
eab89b90 | 2182 | { |
770ae6cc RK |
2183 | unsigned int r = REGNO (op0); |
2184 | ||
eab89b90 RK |
2185 | /* See if this reg overlaps range under consideration. */ |
2186 | if (r < end_regno | |
09e18274 | 2187 | && end_hard_regno (GET_MODE (op0), r) > beg_regno) |
eab89b90 RK |
2188 | return 1; |
2189 | } | |
2190 | } | |
2191 | else if (GET_CODE (x) == PARALLEL) | |
2192 | { | |
b3694847 | 2193 | int i = XVECLEN (x, 0) - 1; |
770ae6cc | 2194 | |
eab89b90 RK |
2195 | for (; i >= 0; i--) |
2196 | if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i))) | |
2197 | return 1; | |
2198 | } | |
2199 | ||
2200 | return 0; | |
2201 | } | |
2202 | ||
09e881c9 BE |
2203 | /* Return 1 if ADDR is a valid memory address for mode MODE |
2204 | in address space AS, and check that each pseudo reg has the | |
2205 | proper kind of hard reg. */ | |
eab89b90 RK |
2206 | |
2207 | int | |
ef4bddc2 | 2208 | strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED, |
09e881c9 | 2209 | rtx addr, addr_space_t as) |
eab89b90 | 2210 | { |
c6c3dba9 | 2211 | #ifdef GO_IF_LEGITIMATE_ADDRESS |
09e881c9 | 2212 | gcc_assert (ADDR_SPACE_GENERIC_P (as)); |
eab89b90 RK |
2213 | GO_IF_LEGITIMATE_ADDRESS (mode, addr, win); |
2214 | return 0; | |
2215 | ||
2216 | win: | |
2217 | return 1; | |
c6c3dba9 | 2218 | #else |
09e881c9 | 2219 | return targetm.addr_space.legitimate_address_p (mode, addr, 1, as); |
c6c3dba9 | 2220 | #endif |
eab89b90 | 2221 | } |
eab89b90 RK |
2222 | \f |
2223 | /* Like rtx_equal_p except that it allows a REG and a SUBREG to match | |
2224 | if they are the same hard reg, and has special hacks for | |
2225 | autoincrement and autodecrement. | |
2226 | This is specifically intended for find_reloads to use | |
2227 | in determining whether two operands match. | |
2228 | X is the operand whose number is the lower of the two. | |
2229 | ||
2230 | The value is 2 if Y contains a pre-increment that matches | |
2231 | a non-incrementing address in X. */ | |
2232 | ||
2233 | /* ??? To be completely correct, we should arrange to pass | |
2234 | for X the output operand and for Y the input operand. | |
2235 | For now, we assume that the output operand has the lower number | |
2236 | because that is natural in (SET output (... input ...)). */ | |
2237 | ||
2238 | int | |
0c20a65f | 2239 | operands_match_p (rtx x, rtx y) |
eab89b90 | 2240 | { |
b3694847 SS |
2241 | int i; |
2242 | RTX_CODE code = GET_CODE (x); | |
2243 | const char *fmt; | |
eab89b90 | 2244 | int success_2; |
05d10675 | 2245 | |
eab89b90 RK |
2246 | if (x == y) |
2247 | return 1; | |
f8cfc6aa JQ |
2248 | if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x)))) |
2249 | && (REG_P (y) || (GET_CODE (y) == SUBREG | |
2250 | && REG_P (SUBREG_REG (y))))) | |
eab89b90 | 2251 | { |
b3694847 | 2252 | int j; |
eab89b90 RK |
2253 | |
2254 | if (code == SUBREG) | |
2255 | { | |
2256 | i = REGNO (SUBREG_REG (x)); | |
2257 | if (i >= FIRST_PSEUDO_REGISTER) | |
2258 | goto slow; | |
ddef6bc7 JJ |
2259 | i += subreg_regno_offset (REGNO (SUBREG_REG (x)), |
2260 | GET_MODE (SUBREG_REG (x)), | |
2261 | SUBREG_BYTE (x), | |
2262 | GET_MODE (x)); | |
eab89b90 RK |
2263 | } |
2264 | else | |
2265 | i = REGNO (x); | |
2266 | ||
2267 | if (GET_CODE (y) == SUBREG) | |
2268 | { | |
2269 | j = REGNO (SUBREG_REG (y)); | |
2270 | if (j >= FIRST_PSEUDO_REGISTER) | |
2271 | goto slow; | |
ddef6bc7 JJ |
2272 | j += subreg_regno_offset (REGNO (SUBREG_REG (y)), |
2273 | GET_MODE (SUBREG_REG (y)), | |
2274 | SUBREG_BYTE (y), | |
2275 | GET_MODE (y)); | |
eab89b90 RK |
2276 | } |
2277 | else | |
2278 | j = REGNO (y); | |
2279 | ||
c0a6a1ef | 2280 | /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a |
b436d712 DE |
2281 | multiple hard register group of scalar integer registers, so that |
2282 | for example (reg:DI 0) and (reg:SI 1) will be considered the same | |
2283 | register. */ | |
c0a6a1ef | 2284 | if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD |
b436d712 | 2285 | && SCALAR_INT_MODE_P (GET_MODE (x)) |
dca52d80 | 2286 | && i < FIRST_PSEUDO_REGISTER) |
66fd46b6 | 2287 | i += hard_regno_nregs[i][GET_MODE (x)] - 1; |
c0a6a1ef | 2288 | if (REG_WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (y)) > UNITS_PER_WORD |
b436d712 | 2289 | && SCALAR_INT_MODE_P (GET_MODE (y)) |
dca52d80 | 2290 | && j < FIRST_PSEUDO_REGISTER) |
66fd46b6 | 2291 | j += hard_regno_nregs[j][GET_MODE (y)] - 1; |
dca52d80 | 2292 | |
eab89b90 RK |
2293 | return i == j; |
2294 | } | |
2295 | /* If two operands must match, because they are really a single | |
2296 | operand of an assembler insn, then two postincrements are invalid | |
2297 | because the assembler insn would increment only once. | |
09da1532 | 2298 | On the other hand, a postincrement matches ordinary indexing |
eab89b90 | 2299 | if the postincrement is the output operand. */ |
4b983fdc | 2300 | if (code == POST_DEC || code == POST_INC || code == POST_MODIFY) |
eab89b90 RK |
2301 | return operands_match_p (XEXP (x, 0), y); |
2302 | /* Two preincrements are invalid | |
2303 | because the assembler insn would increment only once. | |
09da1532 | 2304 | On the other hand, a preincrement matches ordinary indexing |
eab89b90 RK |
2305 | if the preincrement is the input operand. |
2306 | In this case, return 2, since some callers need to do special | |
2307 | things when this happens. */ | |
4b983fdc RH |
2308 | if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC |
2309 | || GET_CODE (y) == PRE_MODIFY) | |
eab89b90 RK |
2310 | return operands_match_p (x, XEXP (y, 0)) ? 2 : 0; |
2311 | ||
2312 | slow: | |
2313 | ||
37cf6116 RH |
2314 | /* Now we have disposed of all the cases in which different rtx codes |
2315 | can match. */ | |
eab89b90 RK |
2316 | if (code != GET_CODE (y)) |
2317 | return 0; | |
eab89b90 RK |
2318 | |
2319 | /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */ | |
eab89b90 RK |
2320 | if (GET_MODE (x) != GET_MODE (y)) |
2321 | return 0; | |
2322 | ||
5932a4d4 | 2323 | /* MEMs referring to different address space are not equivalent. */ |
09e881c9 BE |
2324 | if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y)) |
2325 | return 0; | |
2326 | ||
37cf6116 RH |
2327 | switch (code) |
2328 | { | |
d8116890 | 2329 | CASE_CONST_UNIQUE: |
37cf6116 RH |
2330 | return 0; |
2331 | ||
2332 | case LABEL_REF: | |
a827d9b1 | 2333 | return LABEL_REF_LABEL (x) == LABEL_REF_LABEL (y); |
37cf6116 RH |
2334 | case SYMBOL_REF: |
2335 | return XSTR (x, 0) == XSTR (y, 0); | |
2336 | ||
2337 | default: | |
2338 | break; | |
2339 | } | |
2340 | ||
eab89b90 RK |
2341 | /* Compare the elements. If any pair of corresponding elements |
2342 | fail to match, return 0 for the whole things. */ | |
2343 | ||
2344 | success_2 = 0; | |
2345 | fmt = GET_RTX_FORMAT (code); | |
2346 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
2347 | { | |
91bb873f | 2348 | int val, j; |
eab89b90 RK |
2349 | switch (fmt[i]) |
2350 | { | |
fb3821f7 CH |
2351 | case 'w': |
2352 | if (XWINT (x, i) != XWINT (y, i)) | |
2353 | return 0; | |
2354 | break; | |
2355 | ||
eab89b90 RK |
2356 | case 'i': |
2357 | if (XINT (x, i) != XINT (y, i)) | |
2358 | return 0; | |
2359 | break; | |
2360 | ||
2361 | case 'e': | |
2362 | val = operands_match_p (XEXP (x, i), XEXP (y, i)); | |
2363 | if (val == 0) | |
2364 | return 0; | |
2365 | /* If any subexpression returns 2, | |
2366 | we should return 2 if we are successful. */ | |
2367 | if (val == 2) | |
2368 | success_2 = 1; | |
2369 | break; | |
2370 | ||
2371 | case '0': | |
2372 | break; | |
2373 | ||
91bb873f RH |
2374 | case 'E': |
2375 | if (XVECLEN (x, i) != XVECLEN (y, i)) | |
2376 | return 0; | |
2377 | for (j = XVECLEN (x, i) - 1; j >= 0; --j) | |
2378 | { | |
2379 | val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j)); | |
2380 | if (val == 0) | |
2381 | return 0; | |
2382 | if (val == 2) | |
2383 | success_2 = 1; | |
2384 | } | |
2385 | break; | |
2386 | ||
eab89b90 RK |
2387 | /* It is believed that rtx's at this level will never |
2388 | contain anything but integers and other rtx's, | |
2389 | except for within LABEL_REFs and SYMBOL_REFs. */ | |
2390 | default: | |
41374e13 | 2391 | gcc_unreachable (); |
eab89b90 RK |
2392 | } |
2393 | } | |
2394 | return 1 + success_2; | |
2395 | } | |
2396 | \f | |
eab89b90 | 2397 | /* Describe the range of registers or memory referenced by X. |
05d10675 | 2398 | If X is a register, set REG_FLAG and put the first register |
eab89b90 | 2399 | number into START and the last plus one into END. |
05d10675 | 2400 | If X is a memory reference, put a base address into BASE |
eab89b90 | 2401 | and a range of integer offsets into START and END. |
05d10675 | 2402 | If X is pushing on the stack, we can assume it causes no trouble, |
eab89b90 RK |
2403 | so we set the SAFE field. */ |
2404 | ||
2405 | static struct decomposition | |
0c20a65f | 2406 | decompose (rtx x) |
eab89b90 RK |
2407 | { |
2408 | struct decomposition val; | |
2409 | int all_const = 0; | |
2410 | ||
6de9cd9a DN |
2411 | memset (&val, 0, sizeof (val)); |
2412 | ||
41374e13 | 2413 | switch (GET_CODE (x)) |
eab89b90 | 2414 | { |
41374e13 NS |
2415 | case MEM: |
2416 | { | |
2417 | rtx base = NULL_RTX, offset = 0; | |
2418 | rtx addr = XEXP (x, 0); | |
b8698a0f | 2419 | |
41374e13 NS |
2420 | if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC |
2421 | || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC) | |
2422 | { | |
2423 | val.base = XEXP (addr, 0); | |
2424 | val.start = -GET_MODE_SIZE (GET_MODE (x)); | |
2425 | val.end = GET_MODE_SIZE (GET_MODE (x)); | |
2426 | val.safe = REGNO (val.base) == STACK_POINTER_REGNUM; | |
2427 | return val; | |
2428 | } | |
b8698a0f | 2429 | |
41374e13 NS |
2430 | if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY) |
2431 | { | |
2432 | if (GET_CODE (XEXP (addr, 1)) == PLUS | |
2433 | && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0) | |
2434 | && CONSTANT_P (XEXP (XEXP (addr, 1), 1))) | |
2435 | { | |
2436 | val.base = XEXP (addr, 0); | |
2437 | val.start = -INTVAL (XEXP (XEXP (addr, 1), 1)); | |
2438 | val.end = INTVAL (XEXP (XEXP (addr, 1), 1)); | |
2439 | val.safe = REGNO (val.base) == STACK_POINTER_REGNUM; | |
2440 | return val; | |
2441 | } | |
2442 | } | |
b8698a0f | 2443 | |
41374e13 NS |
2444 | if (GET_CODE (addr) == CONST) |
2445 | { | |
2446 | addr = XEXP (addr, 0); | |
2447 | all_const = 1; | |
2448 | } | |
2449 | if (GET_CODE (addr) == PLUS) | |
2450 | { | |
2451 | if (CONSTANT_P (XEXP (addr, 0))) | |
2452 | { | |
2453 | base = XEXP (addr, 1); | |
2454 | offset = XEXP (addr, 0); | |
2455 | } | |
2456 | else if (CONSTANT_P (XEXP (addr, 1))) | |
2457 | { | |
2458 | base = XEXP (addr, 0); | |
2459 | offset = XEXP (addr, 1); | |
2460 | } | |
2461 | } | |
b8698a0f | 2462 | |
41374e13 NS |
2463 | if (offset == 0) |
2464 | { | |
2465 | base = addr; | |
2466 | offset = const0_rtx; | |
2467 | } | |
2468 | if (GET_CODE (offset) == CONST) | |
2469 | offset = XEXP (offset, 0); | |
2470 | if (GET_CODE (offset) == PLUS) | |
2471 | { | |
481683e1 | 2472 | if (CONST_INT_P (XEXP (offset, 0))) |
41374e13 NS |
2473 | { |
2474 | base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1)); | |
2475 | offset = XEXP (offset, 0); | |
2476 | } | |
481683e1 | 2477 | else if (CONST_INT_P (XEXP (offset, 1))) |
41374e13 NS |
2478 | { |
2479 | base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0)); | |
2480 | offset = XEXP (offset, 1); | |
2481 | } | |
2482 | else | |
2483 | { | |
2484 | base = gen_rtx_PLUS (GET_MODE (base), base, offset); | |
2485 | offset = const0_rtx; | |
2486 | } | |
2487 | } | |
481683e1 | 2488 | else if (!CONST_INT_P (offset)) |
41374e13 NS |
2489 | { |
2490 | base = gen_rtx_PLUS (GET_MODE (base), base, offset); | |
2491 | offset = const0_rtx; | |
2492 | } | |
b8698a0f | 2493 | |
41374e13 NS |
2494 | if (all_const && GET_CODE (base) == PLUS) |
2495 | base = gen_rtx_CONST (GET_MODE (base), base); | |
b8698a0f | 2496 | |
481683e1 | 2497 | gcc_assert (CONST_INT_P (offset)); |
b8698a0f | 2498 | |
41374e13 NS |
2499 | val.start = INTVAL (offset); |
2500 | val.end = val.start + GET_MODE_SIZE (GET_MODE (x)); | |
2501 | val.base = base; | |
2502 | } | |
2503 | break; | |
b8698a0f | 2504 | |
41374e13 | 2505 | case REG: |
eab89b90 | 2506 | val.reg_flag = 1; |
05d10675 | 2507 | val.start = true_regnum (x); |
67468e8e | 2508 | if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER) |
eab89b90 RK |
2509 | { |
2510 | /* A pseudo with no hard reg. */ | |
2511 | val.start = REGNO (x); | |
2512 | val.end = val.start + 1; | |
2513 | } | |
2514 | else | |
2515 | /* A hard reg. */ | |
09e18274 | 2516 | val.end = end_hard_regno (GET_MODE (x), val.start); |
41374e13 NS |
2517 | break; |
2518 | ||
2519 | case SUBREG: | |
f8cfc6aa | 2520 | if (!REG_P (SUBREG_REG (x))) |
eab89b90 RK |
2521 | /* This could be more precise, but it's good enough. */ |
2522 | return decompose (SUBREG_REG (x)); | |
2523 | val.reg_flag = 1; | |
05d10675 | 2524 | val.start = true_regnum (x); |
67468e8e | 2525 | if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER) |
eab89b90 RK |
2526 | return decompose (SUBREG_REG (x)); |
2527 | else | |
2528 | /* A hard reg. */ | |
f1f4e530 | 2529 | val.end = val.start + subreg_nregs (x); |
41374e13 NS |
2530 | break; |
2531 | ||
2532 | case SCRATCH: | |
2533 | /* This hasn't been assigned yet, so it can't conflict yet. */ | |
2534 | val.safe = 1; | |
2535 | break; | |
2536 | ||
2537 | default: | |
2538 | gcc_assert (CONSTANT_P (x)); | |
2539 | val.safe = 1; | |
2540 | break; | |
eab89b90 | 2541 | } |
eab89b90 RK |
2542 | return val; |
2543 | } | |
2544 | ||
2545 | /* Return 1 if altering Y will not modify the value of X. | |
2546 | Y is also described by YDATA, which should be decompose (Y). */ | |
2547 | ||
2548 | static int | |
0c20a65f | 2549 | immune_p (rtx x, rtx y, struct decomposition ydata) |
eab89b90 RK |
2550 | { |
2551 | struct decomposition xdata; | |
2552 | ||
2553 | if (ydata.reg_flag) | |
f4f4d0f8 | 2554 | return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0); |
eab89b90 RK |
2555 | if (ydata.safe) |
2556 | return 1; | |
2557 | ||
41374e13 | 2558 | gcc_assert (MEM_P (y)); |
eab89b90 | 2559 | /* If Y is memory and X is not, Y can't affect X. */ |
3c0cb5de | 2560 | if (!MEM_P (x)) |
eab89b90 RK |
2561 | return 1; |
2562 | ||
4381f7c2 | 2563 | xdata = decompose (x); |
eab89b90 RK |
2564 | |
2565 | if (! rtx_equal_p (xdata.base, ydata.base)) | |
2566 | { | |
2567 | /* If bases are distinct symbolic constants, there is no overlap. */ | |
2568 | if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base)) | |
2569 | return 1; | |
2570 | /* Constants and stack slots never overlap. */ | |
2571 | if (CONSTANT_P (xdata.base) | |
2572 | && (ydata.base == frame_pointer_rtx | |
a36d4c62 | 2573 | || ydata.base == hard_frame_pointer_rtx |
eab89b90 RK |
2574 | || ydata.base == stack_pointer_rtx)) |
2575 | return 1; | |
2576 | if (CONSTANT_P (ydata.base) | |
2577 | && (xdata.base == frame_pointer_rtx | |
a36d4c62 | 2578 | || xdata.base == hard_frame_pointer_rtx |
eab89b90 RK |
2579 | || xdata.base == stack_pointer_rtx)) |
2580 | return 1; | |
2581 | /* If either base is variable, we don't know anything. */ | |
2582 | return 0; | |
2583 | } | |
2584 | ||
eab89b90 RK |
2585 | return (xdata.start >= ydata.end || ydata.start >= xdata.end); |
2586 | } | |
44ace968 | 2587 | |
f72aed24 | 2588 | /* Similar, but calls decompose. */ |
44ace968 JW |
2589 | |
2590 | int | |
0c20a65f | 2591 | safe_from_earlyclobber (rtx op, rtx clobber) |
44ace968 JW |
2592 | { |
2593 | struct decomposition early_data; | |
2594 | ||
2595 | early_data = decompose (clobber); | |
2596 | return immune_p (op, clobber, early_data); | |
2597 | } | |
eab89b90 RK |
2598 | \f |
2599 | /* Main entry point of this file: search the body of INSN | |
2600 | for values that need reloading and record them with push_reload. | |
2601 | REPLACE nonzero means record also where the values occur | |
2602 | so that subst_reloads can be used. | |
2603 | ||
2604 | IND_LEVELS says how many levels of indirection are supported by this | |
2605 | machine; a value of zero means that a memory reference is not a valid | |
2606 | memory address. | |
2607 | ||
2608 | LIVE_KNOWN says we have valid information about which hard | |
2609 | regs are live at each point in the program; this is true when | |
2610 | we are called from global_alloc but false when stupid register | |
2611 | allocation has been done. | |
2612 | ||
2613 | RELOAD_REG_P if nonzero is a vector indexed by hard reg number | |
2614 | which is nonnegative if the reg has been commandeered for reloading into. | |
2615 | It is copied into STATIC_RELOAD_REG_P and referenced from there | |
cb2afeb3 | 2616 | by various subroutines. |
eab89b90 | 2617 | |
cb2afeb3 R |
2618 | Return TRUE if some operands need to be changed, because of swapping |
2619 | commutative operands, reg_equiv_address substitution, or whatever. */ | |
2620 | ||
2621 | int | |
5d86f5f9 | 2622 | find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known, |
0c20a65f | 2623 | short *reload_reg_p) |
eab89b90 | 2624 | { |
b3694847 SS |
2625 | int insn_code_number; |
2626 | int i, j; | |
eab89b90 | 2627 | int noperands; |
eab89b90 RK |
2628 | /* These start out as the constraints for the insn |
2629 | and they are chewed up as we consider alternatives. */ | |
7ac28727 | 2630 | const char *constraints[MAX_RECOG_OPERANDS]; |
eab89b90 RK |
2631 | /* These are the preferred classes for an operand, or NO_REGS if it isn't |
2632 | a register. */ | |
2633 | enum reg_class preferred_class[MAX_RECOG_OPERANDS]; | |
2634 | char pref_or_nothing[MAX_RECOG_OPERANDS]; | |
b8698a0f | 2635 | /* Nonzero for a MEM operand whose entire address needs a reload. |
0b540f12 | 2636 | May be -1 to indicate the entire address may or may not need a reload. */ |
eab89b90 | 2637 | int address_reloaded[MAX_RECOG_OPERANDS]; |
0b540f12 UW |
2638 | /* Nonzero for an address operand that needs to be completely reloaded. |
2639 | May be -1 to indicate the entire operand may or may not need a reload. */ | |
9537511b | 2640 | int address_operand_reloaded[MAX_RECOG_OPERANDS]; |
a8c9daeb RK |
2641 | /* Value of enum reload_type to use for operand. */ |
2642 | enum reload_type operand_type[MAX_RECOG_OPERANDS]; | |
2643 | /* Value of enum reload_type to use within address of operand. */ | |
2644 | enum reload_type address_type[MAX_RECOG_OPERANDS]; | |
2645 | /* Save the usage of each operand. */ | |
2646 | enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS]; | |
eab89b90 RK |
2647 | int no_input_reloads = 0, no_output_reloads = 0; |
2648 | int n_alternatives; | |
abd26bfb | 2649 | reg_class_t this_alternative[MAX_RECOG_OPERANDS]; |
69add2a8 | 2650 | char this_alternative_match_win[MAX_RECOG_OPERANDS]; |
eab89b90 RK |
2651 | char this_alternative_win[MAX_RECOG_OPERANDS]; |
2652 | char this_alternative_offmemok[MAX_RECOG_OPERANDS]; | |
2653 | char this_alternative_earlyclobber[MAX_RECOG_OPERANDS]; | |
2654 | int this_alternative_matches[MAX_RECOG_OPERANDS]; | |
fba42e24 | 2655 | reg_class_t goal_alternative[MAX_RECOG_OPERANDS]; |
eab89b90 | 2656 | int this_alternative_number; |
a544cfd2 | 2657 | int goal_alternative_number = 0; |
eab89b90 RK |
2658 | int operand_reloadnum[MAX_RECOG_OPERANDS]; |
2659 | int goal_alternative_matches[MAX_RECOG_OPERANDS]; | |
2660 | int goal_alternative_matched[MAX_RECOG_OPERANDS]; | |
69add2a8 | 2661 | char goal_alternative_match_win[MAX_RECOG_OPERANDS]; |
eab89b90 RK |
2662 | char goal_alternative_win[MAX_RECOG_OPERANDS]; |
2663 | char goal_alternative_offmemok[MAX_RECOG_OPERANDS]; | |
2664 | char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS]; | |
2665 | int goal_alternative_swapped; | |
eab89b90 RK |
2666 | int best; |
2667 | int commutative; | |
2668 | char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS]; | |
2669 | rtx substed_operand[MAX_RECOG_OPERANDS]; | |
2670 | rtx body = PATTERN (insn); | |
2671 | rtx set = single_set (insn); | |
a544cfd2 | 2672 | int goal_earlyclobber = 0, this_earlyclobber; |
ef4bddc2 | 2673 | machine_mode operand_mode[MAX_RECOG_OPERANDS]; |
cb2afeb3 | 2674 | int retval = 0; |
eab89b90 RK |
2675 | |
2676 | this_insn = insn; | |
eab89b90 RK |
2677 | n_reloads = 0; |
2678 | n_replacements = 0; | |
eab89b90 RK |
2679 | n_earlyclobbers = 0; |
2680 | replace_reloads = replace; | |
2681 | hard_regs_live_known = live_known; | |
2682 | static_reload_reg_p = reload_reg_p; | |
2683 | ||
2684 | /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads; | |
2685 | neither are insns that SET cc0. Insns that use CC0 are not allowed | |
2686 | to have any input reloads. */ | |
4b4bf941 | 2687 | if (JUMP_P (insn) || CALL_P (insn)) |
eab89b90 RK |
2688 | no_output_reloads = 1; |
2689 | ||
2690 | #ifdef HAVE_cc0 | |
2691 | if (reg_referenced_p (cc0_rtx, PATTERN (insn))) | |
2692 | no_input_reloads = 1; | |
2693 | if (reg_set_p (cc0_rtx, PATTERN (insn))) | |
2694 | no_output_reloads = 1; | |
2695 | #endif | |
05d10675 | 2696 | |
0dadecf6 RK |
2697 | #ifdef SECONDARY_MEMORY_NEEDED |
2698 | /* The eliminated forms of any secondary memory locations are per-insn, so | |
2699 | clear them out here. */ | |
2700 | ||
048b0d2e JH |
2701 | if (secondary_memlocs_elim_used) |
2702 | { | |
2703 | memset (secondary_memlocs_elim, 0, | |
2704 | sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used); | |
2705 | secondary_memlocs_elim_used = 0; | |
2706 | } | |
0dadecf6 RK |
2707 | #endif |
2708 | ||
0a578fee BS |
2709 | /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it |
2710 | is cheap to move between them. If it is not, there may not be an insn | |
2711 | to do the copy, so we may need a reload. */ | |
2712 | if (GET_CODE (body) == SET | |
f8cfc6aa | 2713 | && REG_P (SET_DEST (body)) |
0a578fee | 2714 | && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER |
f8cfc6aa | 2715 | && REG_P (SET_SRC (body)) |
0a578fee | 2716 | && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER |
de8f4b07 | 2717 | && register_move_cost (GET_MODE (SET_SRC (body)), |
e56b4594 | 2718 | REGNO_REG_CLASS (REGNO (SET_SRC (body))), |
0a578fee BS |
2719 | REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2) |
2720 | return 0; | |
eab89b90 | 2721 | |
0a578fee | 2722 | extract_insn (insn); |
eab89b90 | 2723 | |
1ccbefce RH |
2724 | noperands = reload_n_operands = recog_data.n_operands; |
2725 | n_alternatives = recog_data.n_alternatives; | |
eab89b90 | 2726 | |
0a578fee BS |
2727 | /* Just return "no reloads" if insn has no operands with constraints. */ |
2728 | if (noperands == 0 || n_alternatives == 0) | |
2729 | return 0; | |
eab89b90 | 2730 | |
0a578fee BS |
2731 | insn_code_number = INSN_CODE (insn); |
2732 | this_insn_is_asm = insn_code_number < 0; | |
eab89b90 | 2733 | |
f428f252 | 2734 | memcpy (operand_mode, recog_data.operand_mode, |
ef4bddc2 | 2735 | noperands * sizeof (machine_mode)); |
7ac28727 AK |
2736 | memcpy (constraints, recog_data.constraints, |
2737 | noperands * sizeof (const char *)); | |
eab89b90 RK |
2738 | |
2739 | commutative = -1; | |
2740 | ||
2741 | /* If we will need to know, later, whether some pair of operands | |
2742 | are the same, we must compare them now and save the result. | |
2743 | Reloading the base and index registers will clobber them | |
2744 | and afterward they will fail to match. */ | |
2745 | ||
2746 | for (i = 0; i < noperands; i++) | |
2747 | { | |
7ac28727 | 2748 | const char *p; |
b3694847 | 2749 | int c; |
7ac28727 | 2750 | char *end; |
eab89b90 | 2751 | |
1ccbefce | 2752 | substed_operand[i] = recog_data.operand[i]; |
eab89b90 RK |
2753 | p = constraints[i]; |
2754 | ||
a8c9daeb RK |
2755 | modified[i] = RELOAD_READ; |
2756 | ||
05d10675 | 2757 | /* Scan this operand's constraint to see if it is an output operand, |
a8c9daeb | 2758 | an in-out operand, is commutative, or should match another. */ |
eab89b90 | 2759 | |
97488870 | 2760 | while ((c = *p)) |
a8c9daeb | 2761 | { |
97488870 | 2762 | p += CONSTRAINT_LEN (c, p); |
f345f21a | 2763 | switch (c) |
a8c9daeb | 2764 | { |
f345f21a JH |
2765 | case '=': |
2766 | modified[i] = RELOAD_WRITE; | |
2767 | break; | |
2768 | case '+': | |
2769 | modified[i] = RELOAD_READ_WRITE; | |
2770 | break; | |
2771 | case '%': | |
2772 | { | |
2773 | /* The last operand should not be marked commutative. */ | |
41374e13 | 2774 | gcc_assert (i != noperands - 1); |
f345f21a JH |
2775 | |
2776 | /* We currently only support one commutative pair of | |
2777 | operands. Some existing asm code currently uses more | |
2778 | than one pair. Previously, that would usually work, | |
2779 | but sometimes it would crash the compiler. We | |
2780 | continue supporting that case as well as we can by | |
2781 | silently ignoring all but the first pair. In the | |
2782 | future we may handle it correctly. */ | |
2783 | if (commutative < 0) | |
2784 | commutative = i; | |
41374e13 NS |
2785 | else |
2786 | gcc_assert (this_insn_is_asm); | |
f345f21a JH |
2787 | } |
2788 | break; | |
2789 | /* Use of ISDIGIT is tempting here, but it may get expensive because | |
2790 | of locale support we don't want. */ | |
2791 | case '0': case '1': case '2': case '3': case '4': | |
2792 | case '5': case '6': case '7': case '8': case '9': | |
2793 | { | |
7ac28727 AK |
2794 | c = strtoul (p - 1, &end, 10); |
2795 | p = end; | |
f345f21a JH |
2796 | |
2797 | operands_match[c][i] | |
2798 | = operands_match_p (recog_data.operand[c], | |
2799 | recog_data.operand[i]); | |
2800 | ||
2801 | /* An operand may not match itself. */ | |
41374e13 | 2802 | gcc_assert (c != i); |
f345f21a JH |
2803 | |
2804 | /* If C can be commuted with C+1, and C might need to match I, | |
2805 | then C+1 might also need to match I. */ | |
2806 | if (commutative >= 0) | |
2807 | { | |
2808 | if (c == commutative || c == commutative + 1) | |
2809 | { | |
2810 | int other = c + (c == commutative ? 1 : -1); | |
2811 | operands_match[other][i] | |
2812 | = operands_match_p (recog_data.operand[other], | |
2813 | recog_data.operand[i]); | |
2814 | } | |
2815 | if (i == commutative || i == commutative + 1) | |
2816 | { | |
2817 | int other = i + (i == commutative ? 1 : -1); | |
2818 | operands_match[c][other] | |
2819 | = operands_match_p (recog_data.operand[c], | |
2820 | recog_data.operand[other]); | |
2821 | } | |
2822 | /* Note that C is supposed to be less than I. | |
2823 | No need to consider altering both C and I because in | |
2824 | that case we would alter one into the other. */ | |
2825 | } | |
2826 | } | |
a8c9daeb RK |
2827 | } |
2828 | } | |
eab89b90 RK |
2829 | } |
2830 | ||
2831 | /* Examine each operand that is a memory reference or memory address | |
2832 | and reload parts of the addresses into index registers. | |
eab89b90 RK |
2833 | Also here any references to pseudo regs that didn't get hard regs |
2834 | but are equivalent to constants get replaced in the insn itself | |
05d10675 | 2835 | with those constants. Nobody will ever see them again. |
eab89b90 RK |
2836 | |
2837 | Finally, set up the preferred classes of each operand. */ | |
2838 | ||
2839 | for (i = 0; i < noperands; i++) | |
2840 | { | |
b3694847 | 2841 | RTX_CODE code = GET_CODE (recog_data.operand[i]); |
a8c9daeb | 2842 | |
eab89b90 | 2843 | address_reloaded[i] = 0; |
9537511b | 2844 | address_operand_reloaded[i] = 0; |
a8c9daeb RK |
2845 | operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT |
2846 | : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT | |
2847 | : RELOAD_OTHER); | |
2848 | address_type[i] | |
2849 | = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS | |
2850 | : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS | |
2851 | : RELOAD_OTHER); | |
eab89b90 | 2852 | |
0d38001f RS |
2853 | if (*constraints[i] == 0) |
2854 | /* Ignore things like match_operator operands. */ | |
2855 | ; | |
8677664e RS |
2856 | else if (insn_extra_address_constraint |
2857 | (lookup_constraint (constraints[i]))) | |
eab89b90 | 2858 | { |
9537511b UW |
2859 | address_operand_reloaded[i] |
2860 | = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0, | |
2861 | recog_data.operand[i], | |
2862 | recog_data.operand_loc[i], | |
2863 | i, operand_type[i], ind_levels, insn); | |
b685dbae | 2864 | |
05d10675 | 2865 | /* If we now have a simple operand where we used to have a |
b685dbae | 2866 | PLUS or MULT, re-recognize and try again. */ |
ec8e098d | 2867 | if ((OBJECT_P (*recog_data.operand_loc[i]) |
1ccbefce RH |
2868 | || GET_CODE (*recog_data.operand_loc[i]) == SUBREG) |
2869 | && (GET_CODE (recog_data.operand[i]) == MULT | |
2870 | || GET_CODE (recog_data.operand[i]) == PLUS)) | |
b685dbae RK |
2871 | { |
2872 | INSN_CODE (insn) = -1; | |
cb2afeb3 R |
2873 | retval = find_reloads (insn, replace, ind_levels, live_known, |
2874 | reload_reg_p); | |
2875 | return retval; | |
b685dbae RK |
2876 | } |
2877 | ||
1ccbefce RH |
2878 | recog_data.operand[i] = *recog_data.operand_loc[i]; |
2879 | substed_operand[i] = recog_data.operand[i]; | |
9537511b UW |
2880 | |
2881 | /* Address operands are reloaded in their existing mode, | |
2882 | no matter what is specified in the machine description. */ | |
2883 | operand_mode[i] = GET_MODE (recog_data.operand[i]); | |
19077123 AK |
2884 | |
2885 | /* If the address is a single CONST_INT pick address mode | |
2886 | instead otherwise we will later not know in which mode | |
2887 | the reload should be performed. */ | |
2888 | if (operand_mode[i] == VOIDmode) | |
2889 | operand_mode[i] = Pmode; | |
2890 | ||
eab89b90 RK |
2891 | } |
2892 | else if (code == MEM) | |
2893 | { | |
ab87f8c8 | 2894 | address_reloaded[i] |
1ccbefce RH |
2895 | = find_reloads_address (GET_MODE (recog_data.operand[i]), |
2896 | recog_data.operand_loc[i], | |
2897 | XEXP (recog_data.operand[i], 0), | |
2898 | &XEXP (recog_data.operand[i], 0), | |
ab87f8c8 | 2899 | i, address_type[i], ind_levels, insn); |
1ccbefce RH |
2900 | recog_data.operand[i] = *recog_data.operand_loc[i]; |
2901 | substed_operand[i] = recog_data.operand[i]; | |
eab89b90 RK |
2902 | } |
2903 | else if (code == SUBREG) | |
b60a8416 | 2904 | { |
1ccbefce | 2905 | rtx reg = SUBREG_REG (recog_data.operand[i]); |
b60a8416 | 2906 | rtx op |
1ccbefce | 2907 | = find_reloads_toplev (recog_data.operand[i], i, address_type[i], |
b60a8416 R |
2908 | ind_levels, |
2909 | set != 0 | |
1ccbefce | 2910 | && &SET_DEST (set) == recog_data.operand_loc[i], |
9246aadb AH |
2911 | insn, |
2912 | &address_reloaded[i]); | |
b60a8416 R |
2913 | |
2914 | /* If we made a MEM to load (a part of) the stackslot of a pseudo | |
2915 | that didn't get a hard register, emit a USE with a REG_EQUAL | |
2916 | note in front so that we might inherit a previous, possibly | |
2917 | wider reload. */ | |
05d10675 | 2918 | |
cb2afeb3 | 2919 | if (replace |
3c0cb5de | 2920 | && MEM_P (op) |
f8cfc6aa | 2921 | && REG_P (reg) |
b60a8416 | 2922 | && (GET_MODE_SIZE (GET_MODE (reg)) |
c07fdd94 | 2923 | >= GET_MODE_SIZE (GET_MODE (op))) |
f2034d06 | 2924 | && reg_equiv_constant (REGNO (reg)) == 0) |
3d238248 JJ |
2925 | set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg), |
2926 | insn), | |
f2034d06 | 2927 | REG_EQUAL, reg_equiv_memory_loc (REGNO (reg))); |
b60a8416 | 2928 | |
1ccbefce | 2929 | substed_operand[i] = recog_data.operand[i] = op; |
b60a8416 | 2930 | } |
ec8e098d | 2931 | else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY) |
ff428c90 ILT |
2932 | /* We can get a PLUS as an "operand" as a result of register |
2933 | elimination. See eliminate_regs and gen_reload. We handle | |
2934 | a unary operator by reloading the operand. */ | |
1ccbefce RH |
2935 | substed_operand[i] = recog_data.operand[i] |
2936 | = find_reloads_toplev (recog_data.operand[i], i, address_type[i], | |
9246aadb AH |
2937 | ind_levels, 0, insn, |
2938 | &address_reloaded[i]); | |
eab89b90 RK |
2939 | else if (code == REG) |
2940 | { | |
2941 | /* This is equivalent to calling find_reloads_toplev. | |
2942 | The code is duplicated for speed. | |
2943 | When we find a pseudo always equivalent to a constant, | |
2944 | we replace it by the constant. We must be sure, however, | |
2945 | that we don't try to replace it in the insn in which it | |
6d2f8887 | 2946 | is being set. */ |
b3694847 | 2947 | int regno = REGNO (recog_data.operand[i]); |
f2034d06 | 2948 | if (reg_equiv_constant (regno) != 0 |
1ccbefce | 2949 | && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i])) |
726e2d54 JW |
2950 | { |
2951 | /* Record the existing mode so that the check if constants are | |
eceef4c9 | 2952 | allowed will work when operand_mode isn't specified. */ |
726e2d54 JW |
2953 | |
2954 | if (operand_mode[i] == VOIDmode) | |
1ccbefce | 2955 | operand_mode[i] = GET_MODE (recog_data.operand[i]); |
726e2d54 | 2956 | |
1ccbefce | 2957 | substed_operand[i] = recog_data.operand[i] |
f2034d06 | 2958 | = reg_equiv_constant (regno); |
726e2d54 | 2959 | } |
f2034d06 JL |
2960 | if (reg_equiv_memory_loc (regno) != 0 |
2961 | && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)) | |
cb2afeb3 R |
2962 | /* We need not give a valid is_set_dest argument since the case |
2963 | of a constant equivalence was checked above. */ | |
1ccbefce RH |
2964 | substed_operand[i] = recog_data.operand[i] |
2965 | = find_reloads_toplev (recog_data.operand[i], i, address_type[i], | |
9246aadb AH |
2966 | ind_levels, 0, insn, |
2967 | &address_reloaded[i]); | |
eab89b90 | 2968 | } |
aaf9712e RS |
2969 | /* If the operand is still a register (we didn't replace it with an |
2970 | equivalent), get the preferred class to reload it into. */ | |
1ccbefce | 2971 | code = GET_CODE (recog_data.operand[i]); |
aaf9712e | 2972 | preferred_class[i] |
1ccbefce RH |
2973 | = ((code == REG && REGNO (recog_data.operand[i]) |
2974 | >= FIRST_PSEUDO_REGISTER) | |
2975 | ? reg_preferred_class (REGNO (recog_data.operand[i])) | |
2976 | : NO_REGS); | |
aaf9712e | 2977 | pref_or_nothing[i] |
1ccbefce RH |
2978 | = (code == REG |
2979 | && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER | |
2980 | && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS); | |
eab89b90 RK |
2981 | } |
2982 | ||
2983 | /* If this is simply a copy from operand 1 to operand 0, merge the | |
2984 | preferred classes for the operands. */ | |
1ccbefce RH |
2985 | if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set) |
2986 | && recog_data.operand[1] == SET_SRC (set)) | |
eab89b90 RK |
2987 | { |
2988 | preferred_class[0] = preferred_class[1] | |
2989 | = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]]; | |
2990 | pref_or_nothing[0] |= pref_or_nothing[1]; | |
2991 | pref_or_nothing[1] |= pref_or_nothing[0]; | |
2992 | } | |
2993 | ||
2994 | /* Now see what we need for pseudo-regs that didn't get hard regs | |
2995 | or got the wrong kind of hard reg. For this, we must consider | |
2996 | all the operands together against the register constraints. */ | |
2997 | ||
812f2051 | 2998 | best = MAX_RECOG_OPERANDS * 2 + 600; |
eab89b90 | 2999 | |
eab89b90 | 3000 | goal_alternative_swapped = 0; |
eab89b90 RK |
3001 | |
3002 | /* The constraints are made of several alternatives. | |
3003 | Each operand's constraint looks like foo,bar,... with commas | |
3004 | separating the alternatives. The first alternatives for all | |
3005 | operands go together, the second alternatives go together, etc. | |
3006 | ||
3007 | First loop over alternatives. */ | |
3008 | ||
795693c1 | 3009 | alternative_mask enabled = get_enabled_alternatives (insn); |
eab89b90 RK |
3010 | for (this_alternative_number = 0; |
3011 | this_alternative_number < n_alternatives; | |
3012 | this_alternative_number++) | |
3013 | { | |
b5068425 | 3014 | int swapped; |
eab89b90 | 3015 | |
795693c1 | 3016 | if (!TEST_BIT (enabled, this_alternative_number)) |
7ac28727 AK |
3017 | { |
3018 | int i; | |
3019 | ||
3020 | for (i = 0; i < recog_data.n_operands; i++) | |
3021 | constraints[i] = skip_alternative (constraints[i]); | |
3022 | ||
3023 | continue; | |
3024 | } | |
3025 | ||
b5068425 AK |
3026 | /* If insn is commutative (it's safe to exchange a certain pair |
3027 | of operands) then we need to try each alternative twice, the | |
3028 | second time matching those two operands as if we had | |
3029 | exchanged them. To do this, really exchange them in | |
3030 | operands. */ | |
3031 | for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++) | |
eab89b90 | 3032 | { |
b5068425 AK |
3033 | /* Loop over operands for one constraint alternative. */ |
3034 | /* LOSERS counts those that don't fit this alternative | |
3035 | and would require loading. */ | |
3036 | int losers = 0; | |
3037 | /* BAD is set to 1 if it some operand can't fit this alternative | |
3038 | even after reloading. */ | |
3039 | int bad = 0; | |
3040 | /* REJECT is a count of how undesirable this alternative says it is | |
3041 | if any reloading is required. If the alternative matches exactly | |
3042 | then REJECT is ignored, but otherwise it gets this much | |
3043 | counted against it in addition to the reloading needed. Each | |
3044 | ? counts three times here since we want the disparaging caused by | |
3045 | a bad register class to only count 1/3 as much. */ | |
3046 | int reject = 0; | |
3047 | ||
3048 | if (swapped) | |
eab89b90 | 3049 | { |
b5068425 AK |
3050 | enum reg_class tclass; |
3051 | int t; | |
3052 | ||
3053 | recog_data.operand[commutative] = substed_operand[commutative + 1]; | |
3054 | recog_data.operand[commutative + 1] = substed_operand[commutative]; | |
3055 | /* Swap the duplicates too. */ | |
3056 | for (i = 0; i < recog_data.n_dups; i++) | |
3057 | if (recog_data.dup_num[i] == commutative | |
3058 | || recog_data.dup_num[i] == commutative + 1) | |
3059 | *recog_data.dup_loc[i] | |
3060 | = recog_data.operand[(int) recog_data.dup_num[i]]; | |
3061 | ||
3062 | tclass = preferred_class[commutative]; | |
3063 | preferred_class[commutative] = preferred_class[commutative + 1]; | |
3064 | preferred_class[commutative + 1] = tclass; | |
3065 | ||
3066 | t = pref_or_nothing[commutative]; | |
3067 | pref_or_nothing[commutative] = pref_or_nothing[commutative + 1]; | |
3068 | pref_or_nothing[commutative + 1] = t; | |
3069 | ||
3070 | t = address_reloaded[commutative]; | |
3071 | address_reloaded[commutative] = address_reloaded[commutative + 1]; | |
3072 | address_reloaded[commutative + 1] = t; | |
3073 | } | |
3074 | ||
3075 | this_earlyclobber = 0; | |
3076 | ||
3077 | for (i = 0; i < noperands; i++) | |
3078 | { | |
3079 | const char *p = constraints[i]; | |
3080 | char *end; | |
3081 | int len; | |
3082 | int win = 0; | |
3083 | int did_match = 0; | |
3084 | /* 0 => this operand can be reloaded somehow for this alternative. */ | |
3085 | int badop = 1; | |
3086 | /* 0 => this operand can be reloaded if the alternative allows regs. */ | |
3087 | int winreg = 0; | |
3088 | int c; | |
3089 | int m; | |
3090 | rtx operand = recog_data.operand[i]; | |
3091 | int offset = 0; | |
3092 | /* Nonzero means this is a MEM that must be reloaded into a reg | |
3093 | regardless of what the constraint says. */ | |
3094 | int force_reload = 0; | |
3095 | int offmemok = 0; | |
3096 | /* Nonzero if a constant forced into memory would be OK for this | |
3097 | operand. */ | |
3098 | int constmemok = 0; | |
3099 | int earlyclobber = 0; | |
777e635f RS |
3100 | enum constraint_num cn; |
3101 | enum reg_class cl; | |
b5068425 AK |
3102 | |
3103 | /* If the predicate accepts a unary operator, it means that | |
3104 | we need to reload the operand, but do not do this for | |
3105 | match_operator and friends. */ | |
3106 | if (UNARY_P (operand) && *p != 0) | |
3107 | operand = XEXP (operand, 0); | |
3108 | ||
3109 | /* If the operand is a SUBREG, extract | |
3110 | the REG or MEM (or maybe even a constant) within. | |
3111 | (Constants can occur as a result of reg_equiv_constant.) */ | |
3112 | ||
3113 | while (GET_CODE (operand) == SUBREG) | |
ddef6bc7 | 3114 | { |
b5068425 AK |
3115 | /* Offset only matters when operand is a REG and |
3116 | it is a hard reg. This is because it is passed | |
3117 | to reg_fits_class_p if it is a REG and all pseudos | |
3118 | return 0 from that function. */ | |
3119 | if (REG_P (SUBREG_REG (operand)) | |
3120 | && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER) | |
3121 | { | |
3122 | if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)), | |
ddef6bc7 JJ |
3123 | GET_MODE (SUBREG_REG (operand)), |
3124 | SUBREG_BYTE (operand), | |
b5068425 AK |
3125 | GET_MODE (operand)) < 0) |
3126 | force_reload = 1; | |
3127 | offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)), | |
3128 | GET_MODE (SUBREG_REG (operand)), | |
3129 | SUBREG_BYTE (operand), | |
3130 | GET_MODE (operand)); | |
3131 | } | |
3132 | operand = SUBREG_REG (operand); | |
3133 | /* Force reload if this is a constant or PLUS or if there may | |
3134 | be a problem accessing OPERAND in the outer mode. */ | |
3135 | if (CONSTANT_P (operand) | |
3136 | || GET_CODE (operand) == PLUS | |
3137 | /* We must force a reload of paradoxical SUBREGs | |
3138 | of a MEM because the alignment of the inner value | |
3139 | may not be enough to do the outer reference. On | |
3140 | big-endian machines, it may also reference outside | |
3141 | the object. | |
3142 | ||
3143 | On machines that extend byte operations and we have a | |
3144 | SUBREG where both the inner and outer modes are no wider | |
3145 | than a word and the inner mode is narrower, is integral, | |
3146 | and gets extended when loaded from memory, combine.c has | |
3147 | made assumptions about the behavior of the machine in such | |
3148 | register access. If the data is, in fact, in memory we | |
3149 | must always load using the size assumed to be in the | |
3150 | register and let the insn do the different-sized | |
3151 | accesses. | |
3152 | ||
3153 | This is doubly true if WORD_REGISTER_OPERATIONS. In | |
3154 | this case eliminate_regs has left non-paradoxical | |
3155 | subregs for push_reload to see. Make sure it does | |
3156 | by forcing the reload. | |
3157 | ||
3158 | ??? When is it right at this stage to have a subreg | |
3159 | of a mem that is _not_ to be handled specially? IMO | |
3160 | those should have been reduced to just a mem. */ | |
3161 | || ((MEM_P (operand) | |
3162 | || (REG_P (operand) | |
3163 | && REGNO (operand) >= FIRST_PSEUDO_REGISTER)) | |
5ec105cd | 3164 | #ifndef WORD_REGISTER_OPERATIONS |
b5068425 AK |
3165 | && (((GET_MODE_BITSIZE (GET_MODE (operand)) |
3166 | < BIGGEST_ALIGNMENT) | |
3167 | && (GET_MODE_SIZE (operand_mode[i]) | |
3168 | > GET_MODE_SIZE (GET_MODE (operand)))) | |
3169 | || BYTES_BIG_ENDIAN | |
03b72c86 | 3170 | #ifdef LOAD_EXTEND_OP |
b5068425 AK |
3171 | || (GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD |
3172 | && (GET_MODE_SIZE (GET_MODE (operand)) | |
3173 | <= UNITS_PER_WORD) | |
3174 | && (GET_MODE_SIZE (operand_mode[i]) | |
3175 | > GET_MODE_SIZE (GET_MODE (operand))) | |
3176 | && INTEGRAL_MODE_P (GET_MODE (operand)) | |
3177 | && LOAD_EXTEND_OP (GET_MODE (operand)) != UNKNOWN) | |
46da6b3a | 3178 | #endif |
b5068425 | 3179 | ) |
5ec105cd | 3180 | #endif |
b5068425 | 3181 | ) |
5ec105cd | 3182 | ) |
b5068425 AK |
3183 | force_reload = 1; |
3184 | } | |
eab89b90 | 3185 | |
b5068425 AK |
3186 | this_alternative[i] = NO_REGS; |
3187 | this_alternative_win[i] = 0; | |
3188 | this_alternative_match_win[i] = 0; | |
3189 | this_alternative_offmemok[i] = 0; | |
3190 | this_alternative_earlyclobber[i] = 0; | |
3191 | this_alternative_matches[i] = -1; | |
3192 | ||
3193 | /* An empty constraint or empty alternative | |
3194 | allows anything which matched the pattern. */ | |
3195 | if (*p == 0 || *p == ',') | |
3196 | win = 1, badop = 0; | |
3197 | ||
3198 | /* Scan this alternative's specs for this operand; | |
3199 | set WIN if the operand fits any letter in this alternative. | |
3200 | Otherwise, clear BADOP if this operand could | |
3201 | fit some letter after reloads, | |
3202 | or set WINREG if this operand could fit after reloads | |
3203 | provided the constraint allows some registers. */ | |
3204 | ||
3205 | do | |
3206 | switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c) | |
3207 | { | |
3208 | case '\0': | |
3209 | len = 0; | |
3210 | break; | |
3211 | case ',': | |
3212 | c = '\0'; | |
3213 | break; | |
97488870 | 3214 | |
b5068425 AK |
3215 | case '?': |
3216 | reject += 6; | |
3217 | break; | |
eab89b90 | 3218 | |
b5068425 AK |
3219 | case '!': |
3220 | reject = 600; | |
3221 | break; | |
eab89b90 | 3222 | |
b5068425 AK |
3223 | case '#': |
3224 | /* Ignore rest of this alternative as far as | |
3225 | reloading is concerned. */ | |
3226 | do | |
3227 | p++; | |
3228 | while (*p && *p != ','); | |
3229 | len = 0; | |
3230 | break; | |
eab89b90 | 3231 | |
b5068425 AK |
3232 | case '0': case '1': case '2': case '3': case '4': |
3233 | case '5': case '6': case '7': case '8': case '9': | |
3234 | m = strtoul (p, &end, 10); | |
3235 | p = end; | |
3236 | len = 0; | |
3237 | ||
3238 | this_alternative_matches[i] = m; | |
3239 | /* We are supposed to match a previous operand. | |
3240 | If we do, we win if that one did. | |
3241 | If we do not, count both of the operands as losers. | |
3242 | (This is too conservative, since most of the time | |
3243 | only a single reload insn will be needed to make | |
3244 | the two operands win. As a result, this alternative | |
3245 | may be rejected when it is actually desirable.) */ | |
3246 | if ((swapped && (m != commutative || i != commutative + 1)) | |
3247 | /* If we are matching as if two operands were swapped, | |
3248 | also pretend that operands_match had been computed | |
3249 | with swapped. | |
3250 | But if I is the second of those and C is the first, | |
3251 | don't exchange them, because operands_match is valid | |
3252 | only on one side of its diagonal. */ | |
3253 | ? (operands_match | |
3254 | [(m == commutative || m == commutative + 1) | |
3255 | ? 2 * commutative + 1 - m : m] | |
3256 | [(i == commutative || i == commutative + 1) | |
3257 | ? 2 * commutative + 1 - i : i]) | |
3258 | : operands_match[m][i]) | |
3259 | { | |
3260 | /* If we are matching a non-offsettable address where an | |
3261 | offsettable address was expected, then we must reject | |
3262 | this combination, because we can't reload it. */ | |
3263 | if (this_alternative_offmemok[m] | |
3264 | && MEM_P (recog_data.operand[m]) | |
3265 | && this_alternative[m] == NO_REGS | |
3266 | && ! this_alternative_win[m]) | |
3267 | bad = 1; | |
3268 | ||
3269 | did_match = this_alternative_win[m]; | |
3270 | } | |
87cda9d6 | 3271 | else |
b5068425 AK |
3272 | { |
3273 | /* Operands don't match. */ | |
3274 | rtx value; | |
3275 | int loc1, loc2; | |
3276 | /* Retroactively mark the operand we had to match | |
3277 | as a loser, if it wasn't already. */ | |
3278 | if (this_alternative_win[m]) | |
3279 | losers++; | |
3280 | this_alternative_win[m] = 0; | |
3281 | if (this_alternative[m] == NO_REGS) | |
3282 | bad = 1; | |
3283 | /* But count the pair only once in the total badness of | |
3284 | this alternative, if the pair can be a dummy reload. | |
3285 | The pointers in operand_loc are not swapped; swap | |
3286 | them by hand if necessary. */ | |
3287 | if (swapped && i == commutative) | |
3288 | loc1 = commutative + 1; | |
3289 | else if (swapped && i == commutative + 1) | |
3290 | loc1 = commutative; | |
3291 | else | |
3292 | loc1 = i; | |
3293 | if (swapped && m == commutative) | |
3294 | loc2 = commutative + 1; | |
3295 | else if (swapped && m == commutative + 1) | |
3296 | loc2 = commutative; | |
3297 | else | |
3298 | loc2 = m; | |
3299 | value | |
3300 | = find_dummy_reload (recog_data.operand[i], | |
3301 | recog_data.operand[m], | |
3302 | recog_data.operand_loc[loc1], | |
3303 | recog_data.operand_loc[loc2], | |
3304 | operand_mode[i], operand_mode[m], | |
3305 | this_alternative[m], -1, | |
3306 | this_alternative_earlyclobber[m]); | |
3307 | ||
3308 | if (value != 0) | |
3309 | losers--; | |
3310 | } | |
3311 | /* This can be fixed with reloads if the operand | |
3312 | we are supposed to match can be fixed with reloads. */ | |
3313 | badop = 0; | |
3314 | this_alternative[i] = this_alternative[m]; | |
3315 | ||
3316 | /* If we have to reload this operand and some previous | |
3317 | operand also had to match the same thing as this | |
3318 | operand, we don't know how to do that. So reject this | |
3319 | alternative. */ | |
3320 | if (! did_match || force_reload) | |
3321 | for (j = 0; j < i; j++) | |
3322 | if (this_alternative_matches[j] | |
3323 | == this_alternative_matches[i]) | |
c7b3b99f PCC |
3324 | { |
3325 | badop = 1; | |
3326 | break; | |
3327 | } | |
b5068425 | 3328 | break; |
eab89b90 | 3329 | |
b5068425 AK |
3330 | case 'p': |
3331 | /* All necessary reloads for an address_operand | |
3332 | were handled in find_reloads_address. */ | |
3333 | this_alternative[i] | |
3334 | = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC, | |
3335 | ADDRESS, SCRATCH); | |
3336 | win = 1; | |
3337 | badop = 0; | |
3338 | break; | |
eab89b90 | 3339 | |
b5068425 AK |
3340 | case TARGET_MEM_CONSTRAINT: |
3341 | if (force_reload) | |
3342 | break; | |
3343 | if (MEM_P (operand) | |
3344 | || (REG_P (operand) | |
3345 | && REGNO (operand) >= FIRST_PSEUDO_REGISTER | |
3346 | && reg_renumber[REGNO (operand)] < 0)) | |
3347 | win = 1; | |
3348 | if (CONST_POOL_OK_P (operand_mode[i], operand)) | |
3349 | badop = 0; | |
3350 | constmemok = 1; | |
3351 | break; | |
eab89b90 | 3352 | |
b5068425 AK |
3353 | case '<': |
3354 | if (MEM_P (operand) | |
3355 | && ! address_reloaded[i] | |
3356 | && (GET_CODE (XEXP (operand, 0)) == PRE_DEC | |
3357 | || GET_CODE (XEXP (operand, 0)) == POST_DEC)) | |
3358 | win = 1; | |
3359 | break; | |
eab89b90 | 3360 | |
b5068425 AK |
3361 | case '>': |
3362 | if (MEM_P (operand) | |
3363 | && ! address_reloaded[i] | |
3364 | && (GET_CODE (XEXP (operand, 0)) == PRE_INC | |
3365 | || GET_CODE (XEXP (operand, 0)) == POST_INC)) | |
3366 | win = 1; | |
3367 | break; | |
eab89b90 | 3368 | |
b5068425 AK |
3369 | /* Memory operand whose address is not offsettable. */ |
3370 | case 'V': | |
3371 | if (force_reload) | |
3372 | break; | |
3373 | if (MEM_P (operand) | |
3374 | && ! (ind_levels ? offsettable_memref_p (operand) | |
3375 | : offsettable_nonstrict_memref_p (operand)) | |
3376 | /* Certain mem addresses will become offsettable | |
3377 | after they themselves are reloaded. This is important; | |
3378 | we don't want our own handling of unoffsettables | |
3379 | to override the handling of reg_equiv_address. */ | |
3380 | && !(REG_P (XEXP (operand, 0)) | |
3381 | && (ind_levels == 0 | |
3382 | || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0))) | |
3383 | win = 1; | |
3384 | break; | |
eab89b90 | 3385 | |
b5068425 AK |
3386 | /* Memory operand whose address is offsettable. */ |
3387 | case 'o': | |
3388 | if (force_reload) | |
3389 | break; | |
3390 | if ((MEM_P (operand) | |
3391 | /* If IND_LEVELS, find_reloads_address won't reload a | |
3392 | pseudo that didn't get a hard reg, so we have to | |
3393 | reject that case. */ | |
3394 | && ((ind_levels ? offsettable_memref_p (operand) | |
3395 | : offsettable_nonstrict_memref_p (operand)) | |
3396 | /* A reloaded address is offsettable because it is now | |
3397 | just a simple register indirect. */ | |
3398 | || address_reloaded[i] == 1)) | |
3399 | || (REG_P (operand) | |
3400 | && REGNO (operand) >= FIRST_PSEUDO_REGISTER | |
3401 | && reg_renumber[REGNO (operand)] < 0 | |
3402 | /* If reg_equiv_address is nonzero, we will be | |
3403 | loading it into a register; hence it will be | |
3404 | offsettable, but we cannot say that reg_equiv_mem | |
3405 | is offsettable without checking. */ | |
3406 | && ((reg_equiv_mem (REGNO (operand)) != 0 | |
3407 | && offsettable_memref_p (reg_equiv_mem (REGNO (operand)))) | |
3408 | || (reg_equiv_address (REGNO (operand)) != 0)))) | |
3409 | win = 1; | |
3410 | if (CONST_POOL_OK_P (operand_mode[i], operand) | |
3411 | || MEM_P (operand)) | |
3412 | badop = 0; | |
3413 | constmemok = 1; | |
3414 | offmemok = 1; | |
3415 | break; | |
eab89b90 | 3416 | |
b5068425 AK |
3417 | case '&': |
3418 | /* Output operand that is stored before the need for the | |
3419 | input operands (and their index registers) is over. */ | |
3420 | earlyclobber = 1, this_earlyclobber = 1; | |
3421 | break; | |
eab89b90 | 3422 | |
b5068425 AK |
3423 | case 'X': |
3424 | force_reload = 0; | |
3425 | win = 1; | |
3426 | break; | |
eab89b90 | 3427 | |
b5068425 AK |
3428 | case 'g': |
3429 | if (! force_reload | |
3430 | /* A PLUS is never a valid operand, but reload can make | |
3431 | it from a register when eliminating registers. */ | |
3432 | && GET_CODE (operand) != PLUS | |
3433 | /* A SCRATCH is not a valid operand. */ | |
3434 | && GET_CODE (operand) != SCRATCH | |
3435 | && (! CONSTANT_P (operand) | |
3436 | || ! flag_pic | |
3437 | || LEGITIMATE_PIC_OPERAND_P (operand)) | |
3438 | && (GENERAL_REGS == ALL_REGS | |
3439 | || !REG_P (operand) | |
3440 | || (REGNO (operand) >= FIRST_PSEUDO_REGISTER | |
3441 | && reg_renumber[REGNO (operand)] < 0))) | |
3442 | win = 1; | |
777e635f | 3443 | cl = GENERAL_REGS; |
b5068425 AK |
3444 | goto reg; |
3445 | ||
3446 | default: | |
777e635f RS |
3447 | cn = lookup_constraint (p); |
3448 | switch (get_constraint_type (cn)) | |
ccfc6cc8 | 3449 | { |
777e635f RS |
3450 | case CT_REGISTER: |
3451 | cl = reg_class_for_constraint (cn); | |
3452 | if (cl != NO_REGS) | |
3453 | goto reg; | |
3454 | break; | |
b5068425 | 3455 | |
d9c35eee RS |
3456 | case CT_CONST_INT: |
3457 | if (CONST_INT_P (operand) | |
3458 | && (insn_const_int_ok_for_constraint | |
3459 | (INTVAL (operand), cn))) | |
3460 | win = true; | |
3461 | break; | |
3462 | ||
777e635f RS |
3463 | case CT_MEMORY: |
3464 | if (force_reload) | |
3465 | break; | |
3466 | if (constraint_satisfied_p (operand, cn)) | |
3467 | win = 1; | |
3468 | /* If the address was already reloaded, | |
3469 | we win as well. */ | |
3470 | else if (MEM_P (operand) && address_reloaded[i] == 1) | |
3471 | win = 1; | |
3472 | /* Likewise if the address will be reloaded because | |
3473 | reg_equiv_address is nonzero. For reg_equiv_mem | |
3474 | we have to check. */ | |
3475 | else if (REG_P (operand) | |
3476 | && REGNO (operand) >= FIRST_PSEUDO_REGISTER | |
3477 | && reg_renumber[REGNO (operand)] < 0 | |
3478 | && ((reg_equiv_mem (REGNO (operand)) != 0 | |
3479 | && (constraint_satisfied_p | |
3480 | (reg_equiv_mem (REGNO (operand)), | |
3481 | cn))) | |
3482 | || (reg_equiv_address (REGNO (operand)) | |
3483 | != 0))) | |
3484 | win = 1; | |
3485 | ||
3486 | /* If we didn't already win, we can reload | |
3487 | constants via force_const_mem, and other | |
3488 | MEMs by reloading the address like for 'o'. */ | |
3489 | if (CONST_POOL_OK_P (operand_mode[i], operand) | |
3490 | || MEM_P (operand)) | |
3491 | badop = 0; | |
3492 | constmemok = 1; | |
3493 | offmemok = 1; | |
3494 | break; | |
3495 | ||
3496 | case CT_ADDRESS: | |
3497 | if (constraint_satisfied_p (operand, cn)) | |
3498 | win = 1; | |
3499 | ||
3500 | /* If we didn't already win, we can reload | |
3501 | the address into a base register. */ | |
3502 | this_alternative[i] | |
3503 | = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC, | |
3504 | ADDRESS, SCRATCH); | |
3505 | badop = 0; | |
3506 | break; | |
3507 | ||
3508 | case CT_FIXED_FORM: | |
3509 | if (constraint_satisfied_p (operand, cn)) | |
b5068425 | 3510 | win = 1; |
ccfc6cc8 UW |
3511 | break; |
3512 | } | |
777e635f | 3513 | break; |
ccfc6cc8 | 3514 | |
b5068425 | 3515 | reg: |
777e635f RS |
3516 | this_alternative[i] |
3517 | = reg_class_subunion[this_alternative[i]][cl]; | |
b5068425 AK |
3518 | if (GET_MODE (operand) == BLKmode) |
3519 | break; | |
3520 | winreg = 1; | |
3521 | if (REG_P (operand) | |
3522 | && reg_fits_class_p (operand, this_alternative[i], | |
3523 | offset, GET_MODE (recog_data.operand[i]))) | |
c2cba7a9 | 3524 | win = 1; |
c2cba7a9 RH |
3525 | break; |
3526 | } | |
b5068425 | 3527 | while ((p += len), c); |
05d10675 | 3528 | |
b5068425 AK |
3529 | if (swapped == (commutative >= 0 ? 1 : 0)) |
3530 | constraints[i] = p; | |
3a322c50 | 3531 | |
b5068425 AK |
3532 | /* If this operand could be handled with a reg, |
3533 | and some reg is allowed, then this operand can be handled. */ | |
3534 | if (winreg && this_alternative[i] != NO_REGS | |
3535 | && (win || !class_only_fixed_regs[this_alternative[i]])) | |
3536 | badop = 0; | |
3537 | ||
3538 | /* Record which operands fit this alternative. */ | |
3539 | this_alternative_earlyclobber[i] = earlyclobber; | |
3540 | if (win && ! force_reload) | |
3541 | this_alternative_win[i] = 1; | |
3542 | else if (did_match && ! force_reload) | |
3543 | this_alternative_match_win[i] = 1; | |
3544 | else | |
b5c82fa1 | 3545 | { |
b5068425 AK |
3546 | int const_to_mem = 0; |
3547 | ||
3548 | this_alternative_offmemok[i] = offmemok; | |
3549 | losers++; | |
3550 | if (badop) | |
3551 | bad = 1; | |
3552 | /* Alternative loses if it has no regs for a reg operand. */ | |
3553 | if (REG_P (operand) | |
3554 | && this_alternative[i] == NO_REGS | |
3555 | && this_alternative_matches[i] < 0) | |
3556 | bad = 1; | |
3557 | ||
3558 | /* If this is a constant that is reloaded into the desired | |
3559 | class by copying it to memory first, count that as another | |
3560 | reload. This is consistent with other code and is | |
3561 | required to avoid choosing another alternative when | |
3562 | the constant is moved into memory by this function on | |
3563 | an early reload pass. Note that the test here is | |
3564 | precisely the same as in the code below that calls | |
3565 | force_const_mem. */ | |
3566 | if (CONST_POOL_OK_P (operand_mode[i], operand) | |
3567 | && ((targetm.preferred_reload_class (operand, | |
3568 | this_alternative[i]) | |
3569 | == NO_REGS) | |
3570 | || no_input_reloads)) | |
3571 | { | |
3572 | const_to_mem = 1; | |
3573 | if (this_alternative[i] != NO_REGS) | |
3574 | losers++; | |
3575 | } | |
b5c82fa1 | 3576 | |
b5068425 AK |
3577 | /* Alternative loses if it requires a type of reload not |
3578 | permitted for this insn. We can always reload SCRATCH | |
3579 | and objects with a REG_UNUSED note. */ | |
3580 | if (GET_CODE (operand) != SCRATCH | |
3581 | && modified[i] != RELOAD_READ && no_output_reloads | |
3582 | && ! find_reg_note (insn, REG_UNUSED, operand)) | |
3583 | bad = 1; | |
3584 | else if (modified[i] != RELOAD_WRITE && no_input_reloads | |
3585 | && ! const_to_mem) | |
3586 | bad = 1; | |
3587 | ||
3588 | /* If we can't reload this value at all, reject this | |
3589 | alternative. Note that we could also lose due to | |
3590 | LIMIT_RELOAD_CLASS, but we don't check that | |
3591 | here. */ | |
3592 | ||
3593 | if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS) | |
3594 | { | |
3595 | if (targetm.preferred_reload_class (operand, | |
3596 | this_alternative[i]) | |
3597 | == NO_REGS) | |
3598 | reject = 600; | |
3599 | ||
3600 | if (operand_type[i] == RELOAD_FOR_OUTPUT | |
3601 | && (targetm.preferred_output_reload_class (operand, | |
3602 | this_alternative[i]) | |
3603 | == NO_REGS)) | |
3604 | reject = 600; | |
3605 | } | |
b5c82fa1 | 3606 | |
b5068425 AK |
3607 | /* We prefer to reload pseudos over reloading other things, |
3608 | since such reloads may be able to be eliminated later. | |
3609 | If we are reloading a SCRATCH, we won't be generating any | |
3610 | insns, just using a register, so it is also preferred. | |
3611 | So bump REJECT in other cases. Don't do this in the | |
3612 | case where we are forcing a constant into memory and | |
3613 | it will then win since we don't want to have a different | |
3614 | alternative match then. */ | |
3615 | if (! (REG_P (operand) | |
3616 | && REGNO (operand) >= FIRST_PSEUDO_REGISTER) | |
3617 | && GET_CODE (operand) != SCRATCH | |
3618 | && ! (const_to_mem && constmemok)) | |
3619 | reject += 2; | |
3620 | ||
3621 | /* Input reloads can be inherited more often than output | |
3622 | reloads can be removed, so penalize output reloads. */ | |
3623 | if (operand_type[i] != RELOAD_FOR_INPUT | |
3624 | && GET_CODE (operand) != SCRATCH) | |
3625 | reject++; | |
3626 | } | |
eab89b90 | 3627 | |
b5068425 AK |
3628 | /* If this operand is a pseudo register that didn't get |
3629 | a hard reg and this alternative accepts some | |
3630 | register, see if the class that we want is a subset | |
3631 | of the preferred class for this register. If not, | |
3632 | but it intersects that class, use the preferred class | |
3633 | instead. If it does not intersect the preferred | |
3634 | class, show that usage of this alternative should be | |
3635 | discouraged; it will be discouraged more still if the | |
3636 | register is `preferred or nothing'. We do this | |
3637 | because it increases the chance of reusing our spill | |
3638 | register in a later insn and avoiding a pair of | |
3639 | memory stores and loads. | |
3640 | ||
3641 | Don't bother with this if this alternative will | |
3642 | accept this operand. | |
3643 | ||
3644 | Don't do this for a multiword operand, since it is | |
3645 | only a small win and has the risk of requiring more | |
3646 | spill registers, which could cause a large loss. | |
3647 | ||
3648 | Don't do this if the preferred class has only one | |
3649 | register because we might otherwise exhaust the | |
3650 | class. */ | |
3651 | ||
3652 | if (! win && ! did_match | |
3653 | && this_alternative[i] != NO_REGS | |
3654 | && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD | |
3655 | && reg_class_size [(int) preferred_class[i]] > 0 | |
3656 | && ! small_register_class_p (preferred_class[i])) | |
eab89b90 | 3657 | { |
b5068425 AK |
3658 | if (! reg_class_subset_p (this_alternative[i], |
3659 | preferred_class[i])) | |
3660 | { | |
3661 | /* Since we don't have a way of forming the intersection, | |
3662 | we just do something special if the preferred class | |
3663 | is a subset of the class we have; that's the most | |
3664 | common case anyway. */ | |
3665 | if (reg_class_subset_p (preferred_class[i], | |
3666 | this_alternative[i])) | |
3667 | this_alternative[i] = preferred_class[i]; | |
3668 | else | |
3669 | reject += (2 + 2 * pref_or_nothing[i]); | |
3670 | } | |
eab89b90 RK |
3671 | } |
3672 | } | |
eab89b90 | 3673 | |
b5068425 AK |
3674 | /* Now see if any output operands that are marked "earlyclobber" |
3675 | in this alternative conflict with any input operands | |
3676 | or any memory addresses. */ | |
eab89b90 | 3677 | |
b5068425 AK |
3678 | for (i = 0; i < noperands; i++) |
3679 | if (this_alternative_earlyclobber[i] | |
3680 | && (this_alternative_win[i] || this_alternative_match_win[i])) | |
3681 | { | |
3682 | struct decomposition early_data; | |
eab89b90 | 3683 | |
b5068425 | 3684 | early_data = decompose (recog_data.operand[i]); |
eab89b90 | 3685 | |
b5068425 | 3686 | gcc_assert (modified[i] != RELOAD_READ); |
05d10675 | 3687 | |
b5068425 AK |
3688 | if (this_alternative[i] == NO_REGS) |
3689 | { | |
3690 | this_alternative_earlyclobber[i] = 0; | |
3691 | gcc_assert (this_insn_is_asm); | |
3692 | error_for_asm (this_insn, | |
3693 | "%<&%> constraint used with no register class"); | |
3694 | } | |
eab89b90 | 3695 | |
eab89b90 | 3696 | for (j = 0; j < noperands; j++) |
b5068425 AK |
3697 | /* Is this an input operand or a memory ref? */ |
3698 | if ((MEM_P (recog_data.operand[j]) | |
3699 | || modified[j] != RELOAD_WRITE) | |
3700 | && j != i | |
3701 | /* Ignore things like match_operator operands. */ | |
3702 | && !recog_data.is_operator[j] | |
3703 | /* Don't count an input operand that is constrained to match | |
3704 | the early clobber operand. */ | |
3705 | && ! (this_alternative_matches[j] == i | |
3706 | && rtx_equal_p (recog_data.operand[i], | |
3707 | recog_data.operand[j])) | |
3708 | /* Is it altered by storing the earlyclobber operand? */ | |
3709 | && !immune_p (recog_data.operand[j], recog_data.operand[i], | |
3710 | early_data)) | |
eab89b90 | 3711 | { |
b5068425 AK |
3712 | /* If the output is in a non-empty few-regs class, |
3713 | it's costly to reload it, so reload the input instead. */ | |
3714 | if (small_register_class_p (this_alternative[i]) | |
3715 | && (REG_P (recog_data.operand[j]) | |
3716 | || GET_CODE (recog_data.operand[j]) == SUBREG)) | |
3717 | { | |
3718 | losers++; | |
3719 | this_alternative_win[j] = 0; | |
3720 | this_alternative_match_win[j] = 0; | |
3721 | } | |
3722 | else | |
3723 | break; | |
eab89b90 | 3724 | } |
b5068425 AK |
3725 | /* If an earlyclobber operand conflicts with something, |
3726 | it must be reloaded, so request this and count the cost. */ | |
3727 | if (j != noperands) | |
3728 | { | |
3729 | losers++; | |
3730 | this_alternative_win[i] = 0; | |
3731 | this_alternative_match_win[j] = 0; | |
3732 | for (j = 0; j < noperands; j++) | |
3733 | if (this_alternative_matches[j] == i | |
3734 | && this_alternative_match_win[j]) | |
3735 | { | |
3736 | this_alternative_win[j] = 0; | |
3737 | this_alternative_match_win[j] = 0; | |
3738 | losers++; | |
3739 | } | |
3740 | } | |
eab89b90 | 3741 | } |
eab89b90 | 3742 | |
b5068425 AK |
3743 | /* If one alternative accepts all the operands, no reload required, |
3744 | choose that alternative; don't consider the remaining ones. */ | |
3745 | if (losers == 0) | |
ce18efcb | 3746 | { |
b5068425 AK |
3747 | /* Unswap these so that they are never swapped at `finish'. */ |
3748 | if (swapped) | |
3749 | { | |
3750 | recog_data.operand[commutative] = substed_operand[commutative]; | |
3751 | recog_data.operand[commutative + 1] | |
3752 | = substed_operand[commutative + 1]; | |
3753 | } | |
ce18efcb VM |
3754 | for (i = 0; i < noperands; i++) |
3755 | { | |
ce18efcb | 3756 | goal_alternative_win[i] = this_alternative_win[i]; |
b5068425 AK |
3757 | goal_alternative_match_win[i] = this_alternative_match_win[i]; |
3758 | goal_alternative[i] = this_alternative[i]; | |
3759 | goal_alternative_offmemok[i] = this_alternative_offmemok[i]; | |
ce18efcb VM |
3760 | goal_alternative_matches[i] = this_alternative_matches[i]; |
3761 | goal_alternative_earlyclobber[i] | |
3762 | = this_alternative_earlyclobber[i]; | |
3763 | } | |
ce18efcb | 3764 | goal_alternative_number = this_alternative_number; |
b5068425 | 3765 | goal_alternative_swapped = swapped; |
ce18efcb | 3766 | goal_earlyclobber = this_earlyclobber; |
b5068425 | 3767 | goto finish; |
eab89b90 | 3768 | } |
eab89b90 | 3769 | |
b5068425 AK |
3770 | /* REJECT, set by the ! and ? constraint characters and when a register |
3771 | would be reloaded into a non-preferred class, discourages the use of | |
3772 | this alternative for a reload goal. REJECT is incremented by six | |
3773 | for each ? and two for each non-preferred class. */ | |
3774 | losers = losers * 6 + reject; | |
eab89b90 | 3775 | |
b5068425 AK |
3776 | /* If this alternative can be made to work by reloading, |
3777 | and it needs less reloading than the others checked so far, | |
3778 | record it as the chosen goal for reloading. */ | |
3779 | if (! bad) | |
3780 | { | |
3781 | if (best > losers) | |
3782 | { | |
3783 | for (i = 0; i < noperands; i++) | |
3784 | { | |
3785 | goal_alternative[i] = this_alternative[i]; | |
3786 | goal_alternative_win[i] = this_alternative_win[i]; | |
3787 | goal_alternative_match_win[i] | |
3788 | = this_alternative_match_win[i]; | |
3789 | goal_alternative_offmemok[i] | |
3790 | = this_alternative_offmemok[i]; | |
3791 | goal_alternative_matches[i] = this_alternative_matches[i]; | |
3792 | goal_alternative_earlyclobber[i] | |
3793 | = this_alternative_earlyclobber[i]; | |
3794 | } | |
3795 | goal_alternative_swapped = swapped; | |
3796 | best = losers; | |
3797 | goal_alternative_number = this_alternative_number; | |
3798 | goal_earlyclobber = this_earlyclobber; | |
3799 | } | |
3800 | } | |
eab89b90 | 3801 | |
b5068425 AK |
3802 | if (swapped) |
3803 | { | |
3804 | enum reg_class tclass; | |
3805 | int t; | |
3806 | ||
3807 | /* If the commutative operands have been swapped, swap | |
3808 | them back in order to check the next alternative. */ | |
3809 | recog_data.operand[commutative] = substed_operand[commutative]; | |
3810 | recog_data.operand[commutative + 1] = substed_operand[commutative + 1]; | |
3811 | /* Unswap the duplicates too. */ | |
3812 | for (i = 0; i < recog_data.n_dups; i++) | |
3813 | if (recog_data.dup_num[i] == commutative | |
3814 | || recog_data.dup_num[i] == commutative + 1) | |
3815 | *recog_data.dup_loc[i] | |
3816 | = recog_data.operand[(int) recog_data.dup_num[i]]; | |
3817 | ||
3818 | /* Unswap the operand related information as well. */ | |
3819 | tclass = preferred_class[commutative]; | |
3820 | preferred_class[commutative] = preferred_class[commutative + 1]; | |
3821 | preferred_class[commutative + 1] = tclass; | |
3822 | ||
3823 | t = pref_or_nothing[commutative]; | |
3824 | pref_or_nothing[commutative] = pref_or_nothing[commutative + 1]; | |
3825 | pref_or_nothing[commutative + 1] = t; | |
3826 | ||
3827 | t = address_reloaded[commutative]; | |
3828 | address_reloaded[commutative] = address_reloaded[commutative + 1]; | |
3829 | address_reloaded[commutative + 1] = t; | |
3830 | } | |
eab89b90 RK |
3831 | } |
3832 | } | |
3833 | ||
3834 | /* The operands don't meet the constraints. | |
3835 | goal_alternative describes the alternative | |
3836 | that we could reach by reloading the fewest operands. | |
3837 | Reload so as to fit it. */ | |
3838 | ||
c22eaf8a | 3839 | if (best == MAX_RECOG_OPERANDS * 2 + 600) |
eab89b90 RK |
3840 | { |
3841 | /* No alternative works with reloads?? */ | |
3842 | if (insn_code_number >= 0) | |
1f978f5f | 3843 | fatal_insn ("unable to generate reloads for:", insn); |
971801ff | 3844 | error_for_asm (insn, "inconsistent operand constraints in an %<asm%>"); |
eab89b90 | 3845 | /* Avoid further trouble with this insn. */ |
38a448ca | 3846 | PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx); |
eab89b90 | 3847 | n_reloads = 0; |
cb2afeb3 | 3848 | return 0; |
eab89b90 RK |
3849 | } |
3850 | ||
3851 | /* Jump to `finish' from above if all operands are valid already. | |
3852 | In that case, goal_alternative_win is all 1. */ | |
3853 | finish: | |
3854 | ||
3855 | /* Right now, for any pair of operands I and J that are required to match, | |
3856 | with I < J, | |
3857 | goal_alternative_matches[J] is I. | |
3858 | Set up goal_alternative_matched as the inverse function: | |
3859 | goal_alternative_matched[I] = J. */ | |
3860 | ||
3861 | for (i = 0; i < noperands; i++) | |
3862 | goal_alternative_matched[i] = -1; | |
a6a2274a | 3863 | |
eab89b90 RK |
3864 | for (i = 0; i < noperands; i++) |
3865 | if (! goal_alternative_win[i] | |
3866 | && goal_alternative_matches[i] >= 0) | |
3867 | goal_alternative_matched[goal_alternative_matches[i]] = i; | |
3868 | ||
69add2a8 BS |
3869 | for (i = 0; i < noperands; i++) |
3870 | goal_alternative_win[i] |= goal_alternative_match_win[i]; | |
3871 | ||
eab89b90 | 3872 | /* If the best alternative is with operands 1 and 2 swapped, |
a8c9daeb RK |
3873 | consider them swapped before reporting the reloads. Update the |
3874 | operand numbers of any reloads already pushed. */ | |
eab89b90 RK |
3875 | |
3876 | if (goal_alternative_swapped) | |
3877 | { | |
b3694847 | 3878 | rtx tem; |
eab89b90 RK |
3879 | |
3880 | tem = substed_operand[commutative]; | |
3881 | substed_operand[commutative] = substed_operand[commutative + 1]; | |
3882 | substed_operand[commutative + 1] = tem; | |
1ccbefce RH |
3883 | tem = recog_data.operand[commutative]; |
3884 | recog_data.operand[commutative] = recog_data.operand[commutative + 1]; | |
3885 | recog_data.operand[commutative + 1] = tem; | |
3886 | tem = *recog_data.operand_loc[commutative]; | |
3887 | *recog_data.operand_loc[commutative] | |
3888 | = *recog_data.operand_loc[commutative + 1]; | |
4381f7c2 | 3889 | *recog_data.operand_loc[commutative + 1] = tem; |
a8c9daeb RK |
3890 | |
3891 | for (i = 0; i < n_reloads; i++) | |
3892 | { | |
eceef4c9 BS |
3893 | if (rld[i].opnum == commutative) |
3894 | rld[i].opnum = commutative + 1; | |
3895 | else if (rld[i].opnum == commutative + 1) | |
3896 | rld[i].opnum = commutative; | |
a8c9daeb | 3897 | } |
eab89b90 RK |
3898 | } |
3899 | ||
eab89b90 RK |
3900 | for (i = 0; i < noperands; i++) |
3901 | { | |
eab89b90 | 3902 | operand_reloadnum[i] = -1; |
a8c9daeb RK |
3903 | |
3904 | /* If this is an earlyclobber operand, we need to widen the scope. | |
3905 | The reload must remain valid from the start of the insn being | |
3906 | reloaded until after the operand is stored into its destination. | |
3907 | We approximate this with RELOAD_OTHER even though we know that we | |
3908 | do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads. | |
3909 | ||
3910 | One special case that is worth checking is when we have an | |
3911 | output that is earlyclobber but isn't used past the insn (typically | |
05d10675 | 3912 | a SCRATCH). In this case, we only need have the reload live |
a8c9daeb | 3913 | through the insn itself, but not for any of our input or output |
05d10675 | 3914 | reloads. |
f9df0a1d R |
3915 | But we must not accidentally narrow the scope of an existing |
3916 | RELOAD_OTHER reload - leave these alone. | |
a8c9daeb RK |
3917 | |
3918 | In any case, anything needed to address this operand can remain | |
3919 | however they were previously categorized. */ | |
3920 | ||
f9df0a1d | 3921 | if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER) |
a8c9daeb | 3922 | operand_type[i] |
1ccbefce | 3923 | = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i]) |
a8c9daeb | 3924 | ? RELOAD_FOR_INSN : RELOAD_OTHER); |
eab89b90 RK |
3925 | } |
3926 | ||
3927 | /* Any constants that aren't allowed and can't be reloaded | |
3928 | into registers are here changed into memory references. */ | |
3929 | for (i = 0; i < noperands; i++) | |
e0a17959 | 3930 | if (! goal_alternative_win[i]) |
eab89b90 | 3931 | { |
e0a17959 AK |
3932 | rtx op = recog_data.operand[i]; |
3933 | rtx subreg = NULL_RTX; | |
3934 | rtx plus = NULL_RTX; | |
ef4bddc2 | 3935 | machine_mode mode = operand_mode[i]; |
e0a17959 AK |
3936 | |
3937 | /* Reloads of SUBREGs of CONSTANT RTXs are handled later in | |
3938 | push_reload so we have to let them pass here. */ | |
3939 | if (GET_CODE (op) == SUBREG) | |
3940 | { | |
3941 | subreg = op; | |
3942 | op = SUBREG_REG (op); | |
3943 | mode = GET_MODE (op); | |
3944 | } | |
1f7f6676 | 3945 | |
e0a17959 AK |
3946 | if (GET_CODE (op) == PLUS) |
3947 | { | |
3948 | plus = op; | |
3949 | op = XEXP (op, 1); | |
3950 | } | |
eab89b90 | 3951 | |
fbbf66e7 | 3952 | if (CONST_POOL_OK_P (mode, op) |
fba42e24 | 3953 | && ((targetm.preferred_reload_class (op, goal_alternative[i]) |
e0a17959 | 3954 | == NO_REGS) |
fbbf66e7 | 3955 | || no_input_reloads)) |
e0a17959 AK |
3956 | { |
3957 | int this_address_reloaded; | |
3958 | rtx tem = force_const_mem (mode, op); | |
d58005c7 | 3959 | |
e0a17959 AK |
3960 | /* If we stripped a SUBREG or a PLUS above add it back. */ |
3961 | if (plus != NULL_RTX) | |
3962 | tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem); | |
3963 | ||
3964 | if (subreg != NULL_RTX) | |
3965 | tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg)); | |
3966 | ||
3967 | this_address_reloaded = 0; | |
3968 | substed_operand[i] = recog_data.operand[i] | |
3969 | = find_reloads_toplev (tem, i, address_type[i], ind_levels, | |
3970 | 0, insn, &this_address_reloaded); | |
3971 | ||
3972 | /* If the alternative accepts constant pool refs directly | |
3973 | there will be no reload needed at all. */ | |
3974 | if (plus == NULL_RTX | |
3975 | && subreg == NULL_RTX | |
3976 | && alternative_allows_const_pool_ref (this_address_reloaded == 0 | |
3977 | ? substed_operand[i] | |
3978 | : NULL, | |
3979 | recog_data.constraints[i], | |
3980 | goal_alternative_number)) | |
3981 | goal_alternative_win[i] = 1; | |
3982 | } | |
d58005c7 UW |
3983 | } |
3984 | ||
4644aad4 RK |
3985 | /* Record the values of the earlyclobber operands for the caller. */ |
3986 | if (goal_earlyclobber) | |
3987 | for (i = 0; i < noperands; i++) | |
3988 | if (goal_alternative_earlyclobber[i]) | |
1ccbefce | 3989 | reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i]; |
4644aad4 | 3990 | |
eab89b90 RK |
3991 | /* Now record reloads for all the operands that need them. */ |
3992 | for (i = 0; i < noperands; i++) | |
3993 | if (! goal_alternative_win[i]) | |
3994 | { | |
3995 | /* Operands that match previous ones have already been handled. */ | |
3996 | if (goal_alternative_matches[i] >= 0) | |
3997 | ; | |
3998 | /* Handle an operand with a nonoffsettable address | |
3999 | appearing where an offsettable address will do | |
3a322c50 RK |
4000 | by reloading the address into a base register. |
4001 | ||
4002 | ??? We can also do this when the operand is a register and | |
4003 | reg_equiv_mem is not offsettable, but this is a bit tricky, | |
4004 | so we don't bother with it. It may not be worth doing. */ | |
eab89b90 RK |
4005 | else if (goal_alternative_matched[i] == -1 |
4006 | && goal_alternative_offmemok[i] | |
3c0cb5de | 4007 | && MEM_P (recog_data.operand[i])) |
eab89b90 | 4008 | { |
7c7ce73a | 4009 | /* If the address to be reloaded is a VOIDmode constant, |
d4ebfa65 BE |
4010 | use the default address mode as mode of the reload register, |
4011 | as would have been done by find_reloads_address. */ | |
86fc3d06 | 4012 | addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]); |
ef4bddc2 | 4013 | machine_mode address_mode; |
7c7ce73a | 4014 | |
372d6395 | 4015 | address_mode = get_address_mode (recog_data.operand[i]); |
eab89b90 | 4016 | operand_reloadnum[i] |
1ccbefce | 4017 | = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX, |
f4f4d0f8 | 4018 | &XEXP (recog_data.operand[i], 0), (rtx*) 0, |
86fc3d06 | 4019 | base_reg_class (VOIDmode, as, MEM, SCRATCH), |
7c7ce73a | 4020 | address_mode, |
a8c9daeb | 4021 | VOIDmode, 0, 0, i, RELOAD_FOR_INPUT); |
eceef4c9 | 4022 | rld[operand_reloadnum[i]].inc |
1ccbefce | 4023 | = GET_MODE_SIZE (GET_MODE (recog_data.operand[i])); |
a8c9daeb RK |
4024 | |
4025 | /* If this operand is an output, we will have made any | |
4026 | reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but | |
4027 | now we are treating part of the operand as an input, so | |
4028 | we must change these to RELOAD_FOR_INPUT_ADDRESS. */ | |
4029 | ||
2d55b7e8 | 4030 | if (modified[i] == RELOAD_WRITE) |
47c8cf91 ILT |
4031 | { |
4032 | for (j = 0; j < n_reloads; j++) | |
4033 | { | |
eceef4c9 | 4034 | if (rld[j].opnum == i) |
47c8cf91 | 4035 | { |
eceef4c9 BS |
4036 | if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS) |
4037 | rld[j].when_needed = RELOAD_FOR_INPUT_ADDRESS; | |
4038 | else if (rld[j].when_needed | |
47c8cf91 | 4039 | == RELOAD_FOR_OUTADDR_ADDRESS) |
eceef4c9 | 4040 | rld[j].when_needed = RELOAD_FOR_INPADDR_ADDRESS; |
47c8cf91 ILT |
4041 | } |
4042 | } | |
4043 | } | |
eab89b90 RK |
4044 | } |
4045 | else if (goal_alternative_matched[i] == -1) | |
9ec36da5 JL |
4046 | { |
4047 | operand_reloadnum[i] | |
4048 | = push_reload ((modified[i] != RELOAD_WRITE | |
1ccbefce RH |
4049 | ? recog_data.operand[i] : 0), |
4050 | (modified[i] != RELOAD_READ | |
4051 | ? recog_data.operand[i] : 0), | |
9ec36da5 | 4052 | (modified[i] != RELOAD_WRITE |
1ccbefce | 4053 | ? recog_data.operand_loc[i] : 0), |
9ec36da5 | 4054 | (modified[i] != RELOAD_READ |
1ccbefce | 4055 | ? recog_data.operand_loc[i] : 0), |
9ec36da5 JL |
4056 | (enum reg_class) goal_alternative[i], |
4057 | (modified[i] == RELOAD_WRITE | |
4058 | ? VOIDmode : operand_mode[i]), | |
4059 | (modified[i] == RELOAD_READ | |
4060 | ? VOIDmode : operand_mode[i]), | |
4061 | (insn_code_number < 0 ? 0 | |
a995e389 | 4062 | : insn_data[insn_code_number].operand[i].strict_low), |
9ec36da5 | 4063 | 0, i, operand_type[i]); |
9ec36da5 | 4064 | } |
eab89b90 RK |
4065 | /* In a matching pair of operands, one must be input only |
4066 | and the other must be output only. | |
4067 | Pass the input operand as IN and the other as OUT. */ | |
4068 | else if (modified[i] == RELOAD_READ | |
4069 | && modified[goal_alternative_matched[i]] == RELOAD_WRITE) | |
4070 | { | |
4071 | operand_reloadnum[i] | |
1ccbefce RH |
4072 | = push_reload (recog_data.operand[i], |
4073 | recog_data.operand[goal_alternative_matched[i]], | |
4074 | recog_data.operand_loc[i], | |
4075 | recog_data.operand_loc[goal_alternative_matched[i]], | |
eab89b90 RK |
4076 | (enum reg_class) goal_alternative[i], |
4077 | operand_mode[i], | |
4078 | operand_mode[goal_alternative_matched[i]], | |
a8c9daeb | 4079 | 0, 0, i, RELOAD_OTHER); |
eab89b90 RK |
4080 | operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum; |
4081 | } | |
4082 | else if (modified[i] == RELOAD_WRITE | |
4083 | && modified[goal_alternative_matched[i]] == RELOAD_READ) | |
4084 | { | |
4085 | operand_reloadnum[goal_alternative_matched[i]] | |
1ccbefce RH |
4086 | = push_reload (recog_data.operand[goal_alternative_matched[i]], |
4087 | recog_data.operand[i], | |
4088 | recog_data.operand_loc[goal_alternative_matched[i]], | |
4089 | recog_data.operand_loc[i], | |
eab89b90 RK |
4090 | (enum reg_class) goal_alternative[i], |
4091 | operand_mode[goal_alternative_matched[i]], | |
4092 | operand_mode[i], | |
a8c9daeb | 4093 | 0, 0, i, RELOAD_OTHER); |
eab89b90 RK |
4094 | operand_reloadnum[i] = output_reloadnum; |
4095 | } | |
eab89b90 RK |
4096 | else |
4097 | { | |
41374e13 | 4098 | gcc_assert (insn_code_number < 0); |
971801ff JM |
4099 | error_for_asm (insn, "inconsistent operand constraints " |
4100 | "in an %<asm%>"); | |
eab89b90 | 4101 | /* Avoid further trouble with this insn. */ |
38a448ca | 4102 | PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx); |
eab89b90 | 4103 | n_reloads = 0; |
cb2afeb3 | 4104 | return 0; |
eab89b90 RK |
4105 | } |
4106 | } | |
4107 | else if (goal_alternative_matched[i] < 0 | |
112f7223 | 4108 | && goal_alternative_matches[i] < 0 |
0b540f12 | 4109 | && address_operand_reloaded[i] != 1 |
112f7223 | 4110 | && optimize) |
eab89b90 | 4111 | { |
05d10675 | 4112 | /* For each non-matching operand that's a MEM or a pseudo-register |
eab89b90 RK |
4113 | that didn't get a hard register, make an optional reload. |
4114 | This may get done even if the insn needs no reloads otherwise. */ | |
a8c9daeb | 4115 | |
1ccbefce | 4116 | rtx operand = recog_data.operand[i]; |
a8c9daeb | 4117 | |
eab89b90 | 4118 | while (GET_CODE (operand) == SUBREG) |
ddef6bc7 | 4119 | operand = SUBREG_REG (operand); |
3c0cb5de | 4120 | if ((MEM_P (operand) |
f8cfc6aa | 4121 | || (REG_P (operand) |
a8c9daeb | 4122 | && REGNO (operand) >= FIRST_PSEUDO_REGISTER)) |
cb2afeb3 R |
4123 | /* If this is only for an output, the optional reload would not |
4124 | actually cause us to use a register now, just note that | |
4125 | something is stored here. */ | |
fba42e24 | 4126 | && (goal_alternative[i] != NO_REGS |
cb2afeb3 | 4127 | || modified[i] == RELOAD_WRITE) |
a8c9daeb | 4128 | && ! no_input_reloads |
cb2afeb3 R |
4129 | /* An optional output reload might allow to delete INSN later. |
4130 | We mustn't make in-out reloads on insns that are not permitted | |
4131 | output reloads. | |
4132 | If this is an asm, we can't delete it; we must not even call | |
4133 | push_reload for an optional output reload in this case, | |
4134 | because we can't be sure that the constraint allows a register, | |
4135 | and push_reload verifies the constraints for asms. */ | |
eab89b90 | 4136 | && (modified[i] == RELOAD_READ |
cb2afeb3 | 4137 | || (! no_output_reloads && ! this_insn_is_asm))) |
eab89b90 | 4138 | operand_reloadnum[i] |
1ccbefce RH |
4139 | = push_reload ((modified[i] != RELOAD_WRITE |
4140 | ? recog_data.operand[i] : 0), | |
4141 | (modified[i] != RELOAD_READ | |
4142 | ? recog_data.operand[i] : 0), | |
a8c9daeb | 4143 | (modified[i] != RELOAD_WRITE |
1ccbefce | 4144 | ? recog_data.operand_loc[i] : 0), |
a8c9daeb | 4145 | (modified[i] != RELOAD_READ |
1ccbefce | 4146 | ? recog_data.operand_loc[i] : 0), |
eab89b90 | 4147 | (enum reg_class) goal_alternative[i], |
a8c9daeb RK |
4148 | (modified[i] == RELOAD_WRITE |
4149 | ? VOIDmode : operand_mode[i]), | |
4150 | (modified[i] == RELOAD_READ | |
4151 | ? VOIDmode : operand_mode[i]), | |
eab89b90 | 4152 | (insn_code_number < 0 ? 0 |
a995e389 | 4153 | : insn_data[insn_code_number].operand[i].strict_low), |
a8c9daeb | 4154 | 1, i, operand_type[i]); |
87afbee6 JL |
4155 | /* If a memory reference remains (either as a MEM or a pseudo that |
4156 | did not get a hard register), yet we can't make an optional | |
cb2afeb3 R |
4157 | reload, check if this is actually a pseudo register reference; |
4158 | we then need to emit a USE and/or a CLOBBER so that reload | |
4159 | inheritance will do the right thing. */ | |
112f7223 | 4160 | else if (replace |
3c0cb5de | 4161 | && (MEM_P (operand) |
f8cfc6aa | 4162 | || (REG_P (operand) |
112f7223 UW |
4163 | && REGNO (operand) >= FIRST_PSEUDO_REGISTER |
4164 | && reg_renumber [REGNO (operand)] < 0))) | |
cb2afeb3 | 4165 | { |
1ccbefce | 4166 | operand = *recog_data.operand_loc[i]; |
cb2afeb3 R |
4167 | |
4168 | while (GET_CODE (operand) == SUBREG) | |
ddef6bc7 | 4169 | operand = SUBREG_REG (operand); |
f8cfc6aa | 4170 | if (REG_P (operand)) |
cb2afeb3 R |
4171 | { |
4172 | if (modified[i] != RELOAD_WRITE) | |
3d17d93d AO |
4173 | /* We mark the USE with QImode so that we recognize |
4174 | it as one that can be safely deleted at the end | |
4175 | of reload. */ | |
4176 | PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand), | |
4177 | insn), QImode); | |
cb2afeb3 | 4178 | if (modified[i] != RELOAD_READ) |
c41c1387 | 4179 | emit_insn_after (gen_clobber (operand), insn); |
cb2afeb3 R |
4180 | } |
4181 | } | |
eab89b90 | 4182 | } |
a8c9daeb RK |
4183 | else if (goal_alternative_matches[i] >= 0 |
4184 | && goal_alternative_win[goal_alternative_matches[i]] | |
4185 | && modified[i] == RELOAD_READ | |
4186 | && modified[goal_alternative_matches[i]] == RELOAD_WRITE | |
112f7223 UW |
4187 | && ! no_input_reloads && ! no_output_reloads |
4188 | && optimize) | |
a8c9daeb RK |
4189 | { |
4190 | /* Similarly, make an optional reload for a pair of matching | |
4191 | objects that are in MEM or a pseudo that didn't get a hard reg. */ | |
eab89b90 | 4192 | |
1ccbefce | 4193 | rtx operand = recog_data.operand[i]; |
a8c9daeb RK |
4194 | |
4195 | while (GET_CODE (operand) == SUBREG) | |
ddef6bc7 | 4196 | operand = SUBREG_REG (operand); |
3c0cb5de | 4197 | if ((MEM_P (operand) |
f8cfc6aa | 4198 | || (REG_P (operand) |
a8c9daeb | 4199 | && REGNO (operand) >= FIRST_PSEUDO_REGISTER)) |
fba42e24 | 4200 | && (goal_alternative[goal_alternative_matches[i]] != NO_REGS)) |
a8c9daeb | 4201 | operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]] |
1ccbefce RH |
4202 | = push_reload (recog_data.operand[goal_alternative_matches[i]], |
4203 | recog_data.operand[i], | |
4204 | recog_data.operand_loc[goal_alternative_matches[i]], | |
4205 | recog_data.operand_loc[i], | |
a8c9daeb RK |
4206 | (enum reg_class) goal_alternative[goal_alternative_matches[i]], |
4207 | operand_mode[goal_alternative_matches[i]], | |
4208 | operand_mode[i], | |
4209 | 0, 1, goal_alternative_matches[i], RELOAD_OTHER); | |
4210 | } | |
05d10675 | 4211 | |
cb2afeb3 R |
4212 | /* Perform whatever substitutions on the operands we are supposed |
4213 | to make due to commutativity or replacement of registers | |
4214 | with equivalent constants or memory slots. */ | |
4215 | ||
4216 | for (i = 0; i < noperands; i++) | |
4217 | { | |
4218 | /* We only do this on the last pass through reload, because it is | |
05d10675 | 4219 | possible for some data (like reg_equiv_address) to be changed during |
425de739 | 4220 | later passes. Moreover, we lose the opportunity to get a useful |
05d10675 | 4221 | reload_{in,out}_reg when we do these replacements. */ |
cb2afeb3 R |
4222 | |
4223 | if (replace) | |
e54db24f MM |
4224 | { |
4225 | rtx substitution = substed_operand[i]; | |
4226 | ||
1ccbefce | 4227 | *recog_data.operand_loc[i] = substitution; |
e54db24f | 4228 | |
cf7c4aa6 HPN |
4229 | /* If we're replacing an operand with a LABEL_REF, we need to |
4230 | make sure that there's a REG_LABEL_OPERAND note attached to | |
e54db24f | 4231 | this instruction. */ |
cf7c4aa6 HPN |
4232 | if (GET_CODE (substitution) == LABEL_REF |
4233 | && !find_reg_note (insn, REG_LABEL_OPERAND, | |
a827d9b1 | 4234 | LABEL_REF_LABEL (substitution)) |
cf7c4aa6 HPN |
4235 | /* For a JUMP_P, if it was a branch target it must have |
4236 | already been recorded as such. */ | |
4237 | && (!JUMP_P (insn) | |
a827d9b1 | 4238 | || !label_is_jump_target_p (LABEL_REF_LABEL (substitution), |
cf7c4aa6 | 4239 | insn))) |
6bfd2688 | 4240 | { |
a827d9b1 DM |
4241 | add_reg_note (insn, REG_LABEL_OPERAND, |
4242 | LABEL_REF_LABEL (substitution)); | |
4243 | if (LABEL_P (LABEL_REF_LABEL (substitution))) | |
4244 | ++LABEL_NUSES (LABEL_REF_LABEL (substitution)); | |
6bfd2688 MS |
4245 | } |
4246 | ||
e54db24f | 4247 | } |
cb2afeb3 | 4248 | else |
1ccbefce | 4249 | retval |= (substed_operand[i] != *recog_data.operand_loc[i]); |
cb2afeb3 R |
4250 | } |
4251 | ||
eab89b90 RK |
4252 | /* If this insn pattern contains any MATCH_DUP's, make sure that |
4253 | they will be substituted if the operands they match are substituted. | |
4254 | Also do now any substitutions we already did on the operands. | |
4255 | ||
4256 | Don't do this if we aren't making replacements because we might be | |
4257 | propagating things allocated by frame pointer elimination into places | |
4258 | it doesn't expect. */ | |
4259 | ||
4260 | if (insn_code_number >= 0 && replace) | |
a995e389 | 4261 | for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--) |
eab89b90 | 4262 | { |
1ccbefce RH |
4263 | int opno = recog_data.dup_num[i]; |
4264 | *recog_data.dup_loc[i] = *recog_data.operand_loc[opno]; | |
6cabe79e | 4265 | dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]); |
eab89b90 RK |
4266 | } |
4267 | ||
4268 | #if 0 | |
4269 | /* This loses because reloading of prior insns can invalidate the equivalence | |
4270 | (or at least find_equiv_reg isn't smart enough to find it any more), | |
4271 | causing this insn to need more reload regs than it needed before. | |
4272 | It may be too late to make the reload regs available. | |
4273 | Now this optimization is done safely in choose_reload_regs. */ | |
4274 | ||
4275 | /* For each reload of a reg into some other class of reg, | |
4276 | search for an existing equivalent reg (same value now) in the right class. | |
4277 | We can use it as long as we don't need to change its contents. */ | |
4278 | for (i = 0; i < n_reloads; i++) | |
eceef4c9 BS |
4279 | if (rld[i].reg_rtx == 0 |
4280 | && rld[i].in != 0 | |
f8cfc6aa | 4281 | && REG_P (rld[i].in) |
eceef4c9 | 4282 | && rld[i].out == 0) |
eab89b90 | 4283 | { |
eceef4c9 | 4284 | rld[i].reg_rtx |
48c54229 | 4285 | = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1, |
eceef4c9 | 4286 | static_reload_reg_p, 0, rld[i].inmode); |
eab89b90 RK |
4287 | /* Prevent generation of insn to load the value |
4288 | because the one we found already has the value. */ | |
eceef4c9 BS |
4289 | if (rld[i].reg_rtx) |
4290 | rld[i].in = rld[i].reg_rtx; | |
eab89b90 RK |
4291 | } |
4292 | #endif | |
4293 | ||
71156bcc JH |
4294 | /* If we detected error and replaced asm instruction by USE, forget about the |
4295 | reloads. */ | |
4296 | if (GET_CODE (PATTERN (insn)) == USE | |
481683e1 | 4297 | && CONST_INT_P (XEXP (PATTERN (insn), 0))) |
71156bcc JH |
4298 | n_reloads = 0; |
4299 | ||
a8c9daeb RK |
4300 | /* Perhaps an output reload can be combined with another |
4301 | to reduce needs by one. */ | |
4302 | if (!goal_earlyclobber) | |
4303 | combine_reloads (); | |
4304 | ||
4305 | /* If we have a pair of reloads for parts of an address, they are reloading | |
4306 | the same object, the operands themselves were not reloaded, and they | |
4307 | are for two operands that are supposed to match, merge the reloads and | |
0f41302f | 4308 | change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */ |
a8c9daeb RK |
4309 | |
4310 | for (i = 0; i < n_reloads; i++) | |
4311 | { | |
4312 | int k; | |
4313 | ||
4314 | for (j = i + 1; j < n_reloads; j++) | |
eceef4c9 BS |
4315 | if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS |
4316 | || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS | |
4317 | || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS | |
4318 | || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) | |
4319 | && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS | |
4320 | || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS | |
4321 | || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS | |
4322 | || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) | |
4323 | && rtx_equal_p (rld[i].in, rld[j].in) | |
4324 | && (operand_reloadnum[rld[i].opnum] < 0 | |
4325 | || rld[operand_reloadnum[rld[i].opnum]].optional) | |
4326 | && (operand_reloadnum[rld[j].opnum] < 0 | |
4327 | || rld[operand_reloadnum[rld[j].opnum]].optional) | |
4328 | && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum | |
4329 | || (goal_alternative_matches[rld[j].opnum] | |
4330 | == rld[i].opnum))) | |
a8c9daeb RK |
4331 | { |
4332 | for (k = 0; k < n_replacements; k++) | |
4333 | if (replacements[k].what == j) | |
4334 | replacements[k].what = i; | |
4335 | ||
eceef4c9 BS |
4336 | if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS |
4337 | || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) | |
4338 | rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR; | |
47c8cf91 | 4339 | else |
eceef4c9 BS |
4340 | rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS; |
4341 | rld[j].in = 0; | |
a8c9daeb RK |
4342 | } |
4343 | } | |
4344 | ||
05d10675 | 4345 | /* Scan all the reloads and update their type. |
a8c9daeb RK |
4346 | If a reload is for the address of an operand and we didn't reload |
4347 | that operand, change the type. Similarly, change the operand number | |
4348 | of a reload when two operands match. If a reload is optional, treat it | |
4349 | as though the operand isn't reloaded. | |
4350 | ||
4351 | ??? This latter case is somewhat odd because if we do the optional | |
4352 | reload, it means the object is hanging around. Thus we need only | |
4353 | do the address reload if the optional reload was NOT done. | |
4354 | ||
4355 | Change secondary reloads to be the address type of their operand, not | |
4356 | the normal type. | |
4357 | ||
4358 | If an operand's reload is now RELOAD_OTHER, change any | |
4359 | RELOAD_FOR_INPUT_ADDRESS reloads of that operand to | |
4360 | RELOAD_FOR_OTHER_ADDRESS. */ | |
4361 | ||
4362 | for (i = 0; i < n_reloads; i++) | |
4363 | { | |
eceef4c9 | 4364 | if (rld[i].secondary_p |
38323cc3 RH |
4365 | && rld[i].when_needed == operand_type[rld[i].opnum]) |
4366 | rld[i].when_needed = address_type[rld[i].opnum]; | |
eceef4c9 BS |
4367 | |
4368 | if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS | |
4369 | || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS | |
4370 | || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS | |
4371 | || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) | |
4372 | && (operand_reloadnum[rld[i].opnum] < 0 | |
4373 | || rld[operand_reloadnum[rld[i].opnum]].optional)) | |
f98bb7d3 RK |
4374 | { |
4375 | /* If we have a secondary reload to go along with this reload, | |
0f41302f | 4376 | change its type to RELOAD_FOR_OPADDR_ADDR. */ |
f98bb7d3 | 4377 | |
eceef4c9 BS |
4378 | if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS |
4379 | || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS) | |
4380 | && rld[i].secondary_in_reload != -1) | |
f98bb7d3 | 4381 | { |
eceef4c9 | 4382 | int secondary_in_reload = rld[i].secondary_in_reload; |
f98bb7d3 | 4383 | |
4381f7c2 | 4384 | rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR; |
f98bb7d3 | 4385 | |
0f41302f | 4386 | /* If there's a tertiary reload we have to change it also. */ |
f98bb7d3 | 4387 | if (secondary_in_reload > 0 |
eceef4c9 BS |
4388 | && rld[secondary_in_reload].secondary_in_reload != -1) |
4389 | rld[rld[secondary_in_reload].secondary_in_reload].when_needed | |
38323cc3 | 4390 | = RELOAD_FOR_OPADDR_ADDR; |
f98bb7d3 RK |
4391 | } |
4392 | ||
eceef4c9 BS |
4393 | if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS |
4394 | || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) | |
4395 | && rld[i].secondary_out_reload != -1) | |
f98bb7d3 | 4396 | { |
eceef4c9 | 4397 | int secondary_out_reload = rld[i].secondary_out_reload; |
f98bb7d3 | 4398 | |
4381f7c2 | 4399 | rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR; |
f98bb7d3 | 4400 | |
0f41302f | 4401 | /* If there's a tertiary reload we have to change it also. */ |
f98bb7d3 | 4402 | if (secondary_out_reload |
eceef4c9 BS |
4403 | && rld[secondary_out_reload].secondary_out_reload != -1) |
4404 | rld[rld[secondary_out_reload].secondary_out_reload].when_needed | |
38323cc3 | 4405 | = RELOAD_FOR_OPADDR_ADDR; |
f98bb7d3 | 4406 | } |
e5e809f4 | 4407 | |
eceef4c9 BS |
4408 | if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS |
4409 | || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS) | |
4410 | rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR; | |
cb2afeb3 | 4411 | else |
eceef4c9 | 4412 | rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS; |
f98bb7d3 | 4413 | } |
a8c9daeb | 4414 | |
eceef4c9 BS |
4415 | if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS |
4416 | || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS) | |
4417 | && operand_reloadnum[rld[i].opnum] >= 0 | |
4418 | && (rld[operand_reloadnum[rld[i].opnum]].when_needed | |
a8c9daeb | 4419 | == RELOAD_OTHER)) |
eceef4c9 | 4420 | rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS; |
a8c9daeb | 4421 | |
eceef4c9 BS |
4422 | if (goal_alternative_matches[rld[i].opnum] >= 0) |
4423 | rld[i].opnum = goal_alternative_matches[rld[i].opnum]; | |
a8c9daeb RK |
4424 | } |
4425 | ||
a94ce333 JW |
4426 | /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads. |
4427 | If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR | |
4428 | reloads to RELOAD_FOR_OPERAND_ADDRESS reloads. | |
4429 | ||
4430 | choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never | |
4431 | conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a | |
4432 | single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads. | |
4433 | However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload, | |
4434 | then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all | |
4435 | RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it. | |
4436 | This is complicated by the fact that a single operand can have more | |
4437 | than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix | |
4438 | choose_reload_regs without affecting code quality, and cases that | |
4439 | actually fail are extremely rare, so it turns out to be better to fix | |
4440 | the problem here by not generating cases that choose_reload_regs will | |
4441 | fail for. */ | |
d3adbeea | 4442 | /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS / |
826e3854 R |
4443 | RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for |
4444 | a single operand. | |
4445 | We can reduce the register pressure by exploiting that a | |
4446 | RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads | |
c10638c9 R |
4447 | does not conflict with any of them, if it is only used for the first of |
4448 | the RELOAD_FOR_X_ADDRESS reloads. */ | |
a94ce333 | 4449 | { |
826e3854 R |
4450 | int first_op_addr_num = -2; |
4451 | int first_inpaddr_num[MAX_RECOG_OPERANDS]; | |
4452 | int first_outpaddr_num[MAX_RECOG_OPERANDS]; | |
4381f7c2 | 4453 | int need_change = 0; |
826e3854 R |
4454 | /* We use last_op_addr_reload and the contents of the above arrays |
4455 | first as flags - -2 means no instance encountered, -1 means exactly | |
4456 | one instance encountered. | |
4457 | If more than one instance has been encountered, we store the reload | |
4458 | number of the first reload of the kind in question; reload numbers | |
4459 | are known to be non-negative. */ | |
4460 | for (i = 0; i < noperands; i++) | |
4461 | first_inpaddr_num[i] = first_outpaddr_num[i] = -2; | |
4462 | for (i = n_reloads - 1; i >= 0; i--) | |
4463 | { | |
eceef4c9 | 4464 | switch (rld[i].when_needed) |
826e3854 R |
4465 | { |
4466 | case RELOAD_FOR_OPERAND_ADDRESS: | |
c10638c9 | 4467 | if (++first_op_addr_num >= 0) |
826e3854 | 4468 | { |
c10638c9 | 4469 | first_op_addr_num = i; |
826e3854 R |
4470 | need_change = 1; |
4471 | } | |
4472 | break; | |
4473 | case RELOAD_FOR_INPUT_ADDRESS: | |
eceef4c9 | 4474 | if (++first_inpaddr_num[rld[i].opnum] >= 0) |
826e3854 | 4475 | { |
eceef4c9 | 4476 | first_inpaddr_num[rld[i].opnum] = i; |
826e3854 R |
4477 | need_change = 1; |
4478 | } | |
4479 | break; | |
4480 | case RELOAD_FOR_OUTPUT_ADDRESS: | |
eceef4c9 | 4481 | if (++first_outpaddr_num[rld[i].opnum] >= 0) |
826e3854 | 4482 | { |
eceef4c9 | 4483 | first_outpaddr_num[rld[i].opnum] = i; |
826e3854 R |
4484 | need_change = 1; |
4485 | } | |
4486 | break; | |
4487 | default: | |
4488 | break; | |
4489 | } | |
4490 | } | |
a94ce333 | 4491 | |
826e3854 R |
4492 | if (need_change) |
4493 | { | |
4494 | for (i = 0; i < n_reloads; i++) | |
4495 | { | |
c8d8ed65 RK |
4496 | int first_num; |
4497 | enum reload_type type; | |
826e3854 | 4498 | |
eceef4c9 | 4499 | switch (rld[i].when_needed) |
826e3854 R |
4500 | { |
4501 | case RELOAD_FOR_OPADDR_ADDR: | |
4502 | first_num = first_op_addr_num; | |
4503 | type = RELOAD_FOR_OPERAND_ADDRESS; | |
4504 | break; | |
4505 | case RELOAD_FOR_INPADDR_ADDRESS: | |
eceef4c9 | 4506 | first_num = first_inpaddr_num[rld[i].opnum]; |
826e3854 R |
4507 | type = RELOAD_FOR_INPUT_ADDRESS; |
4508 | break; | |
4509 | case RELOAD_FOR_OUTADDR_ADDRESS: | |
eceef4c9 | 4510 | first_num = first_outpaddr_num[rld[i].opnum]; |
826e3854 R |
4511 | type = RELOAD_FOR_OUTPUT_ADDRESS; |
4512 | break; | |
4513 | default: | |
4514 | continue; | |
4515 | } | |
c10638c9 R |
4516 | if (first_num < 0) |
4517 | continue; | |
4518 | else if (i > first_num) | |
eceef4c9 | 4519 | rld[i].when_needed = type; |
c10638c9 R |
4520 | else |
4521 | { | |
4522 | /* Check if the only TYPE reload that uses reload I is | |
4523 | reload FIRST_NUM. */ | |
4524 | for (j = n_reloads - 1; j > first_num; j--) | |
4525 | { | |
eceef4c9 BS |
4526 | if (rld[j].when_needed == type |
4527 | && (rld[i].secondary_p | |
4528 | ? rld[j].secondary_in_reload == i | |
4529 | : reg_mentioned_p (rld[i].in, rld[j].in))) | |
c10638c9 | 4530 | { |
eceef4c9 | 4531 | rld[i].when_needed = type; |
c10638c9 R |
4532 | break; |
4533 | } | |
4534 | } | |
4535 | } | |
826e3854 R |
4536 | } |
4537 | } | |
a94ce333 JW |
4538 | } |
4539 | ||
a8c9daeb RK |
4540 | /* See if we have any reloads that are now allowed to be merged |
4541 | because we've changed when the reload is needed to | |
4542 | RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only | |
4543 | check for the most common cases. */ | |
4544 | ||
4545 | for (i = 0; i < n_reloads; i++) | |
eceef4c9 BS |
4546 | if (rld[i].in != 0 && rld[i].out == 0 |
4547 | && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS | |
4548 | || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR | |
4549 | || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS)) | |
a8c9daeb | 4550 | for (j = 0; j < n_reloads; j++) |
eceef4c9 BS |
4551 | if (i != j && rld[j].in != 0 && rld[j].out == 0 |
4552 | && rld[j].when_needed == rld[i].when_needed | |
4553 | && MATCHES (rld[i].in, rld[j].in) | |
48c54229 | 4554 | && rld[i].rclass == rld[j].rclass |
eceef4c9 BS |
4555 | && !rld[i].nocombine && !rld[j].nocombine |
4556 | && rld[i].reg_rtx == rld[j].reg_rtx) | |
a8c9daeb | 4557 | { |
eceef4c9 | 4558 | rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum); |
a8c9daeb | 4559 | transfer_replacements (i, j); |
eceef4c9 | 4560 | rld[j].in = 0; |
a8c9daeb RK |
4561 | } |
4562 | ||
14a774a9 RK |
4563 | #ifdef HAVE_cc0 |
4564 | /* If we made any reloads for addresses, see if they violate a | |
4565 | "no input reloads" requirement for this insn. But loads that we | |
4566 | do after the insn (such as for output addresses) are fine. */ | |
4567 | if (no_input_reloads) | |
4568 | for (i = 0; i < n_reloads; i++) | |
41374e13 NS |
4569 | gcc_assert (rld[i].in == 0 |
4570 | || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS | |
4571 | || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS); | |
14a774a9 RK |
4572 | #endif |
4573 | ||
67e61fe7 BS |
4574 | /* Compute reload_mode and reload_nregs. */ |
4575 | for (i = 0; i < n_reloads; i++) | |
4576 | { | |
4577 | rld[i].mode | |
4578 | = (rld[i].inmode == VOIDmode | |
4579 | || (GET_MODE_SIZE (rld[i].outmode) | |
4580 | > GET_MODE_SIZE (rld[i].inmode))) | |
4581 | ? rld[i].outmode : rld[i].inmode; | |
4582 | ||
a8c44c52 | 4583 | rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode]; |
67e61fe7 BS |
4584 | } |
4585 | ||
02a10130 RH |
4586 | /* Special case a simple move with an input reload and a |
4587 | destination of a hard reg, if the hard reg is ok, use it. */ | |
4588 | for (i = 0; i < n_reloads; i++) | |
4589 | if (rld[i].when_needed == RELOAD_FOR_INPUT | |
4590 | && GET_CODE (PATTERN (insn)) == SET | |
f8cfc6aa | 4591 | && REG_P (SET_DEST (PATTERN (insn))) |
b3519e7c L |
4592 | && (SET_SRC (PATTERN (insn)) == rld[i].in |
4593 | || SET_SRC (PATTERN (insn)) == rld[i].in_reg) | |
8c74fb06 | 4594 | && !elimination_target_reg_p (SET_DEST (PATTERN (insn)))) |
02a10130 | 4595 | { |
0c20a65f | 4596 | rtx dest = SET_DEST (PATTERN (insn)); |
02a10130 RH |
4597 | unsigned int regno = REGNO (dest); |
4598 | ||
0c20a65f | 4599 | if (regno < FIRST_PSEUDO_REGISTER |
48c54229 | 4600 | && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno) |
0c20a65f | 4601 | && HARD_REGNO_MODE_OK (regno, rld[i].mode)) |
57458e8a | 4602 | { |
66fd46b6 | 4603 | int nr = hard_regno_nregs[regno][rld[i].mode]; |
57458e8a DD |
4604 | int ok = 1, nri; |
4605 | ||
4606 | for (nri = 1; nri < nr; nri ++) | |
48c54229 | 4607 | if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri)) |
c7b3b99f PCC |
4608 | { |
4609 | ok = 0; | |
4610 | break; | |
4611 | } | |
57458e8a DD |
4612 | |
4613 | if (ok) | |
4614 | rld[i].reg_rtx = dest; | |
4615 | } | |
02a10130 RH |
4616 | } |
4617 | ||
cb2afeb3 | 4618 | return retval; |
eab89b90 RK |
4619 | } |
4620 | ||
1f7f6676 RS |
4621 | /* Return true if alternative number ALTNUM in constraint-string |
4622 | CONSTRAINT is guaranteed to accept a reloaded constant-pool reference. | |
4623 | MEM gives the reference if it didn't need any reloads, otherwise it | |
4624 | is null. */ | |
eab89b90 | 4625 | |
1f7f6676 | 4626 | static bool |
9a476c9c ILT |
4627 | alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED, |
4628 | const char *constraint, int altnum) | |
eab89b90 | 4629 | { |
b3694847 | 4630 | int c; |
1f7f6676 | 4631 | |
eab89b90 RK |
4632 | /* Skip alternatives before the one requested. */ |
4633 | while (altnum > 0) | |
4634 | { | |
e84a58ff EB |
4635 | while (*constraint++ != ',') |
4636 | ; | |
eab89b90 RK |
4637 | altnum--; |
4638 | } | |
a4edaf83 | 4639 | /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'. |
1f7f6676 RS |
4640 | If one of them is present, this alternative accepts the result of |
4641 | passing a constant-pool reference through find_reloads_toplev. | |
4642 | ||
4643 | The same is true of extra memory constraints if the address | |
4644 | was reloaded into a register. However, the target may elect | |
4645 | to disallow the original constant address, forcing it to be | |
4646 | reloaded into a register instead. */ | |
97488870 R |
4647 | for (; (c = *constraint) && c != ',' && c != '#'; |
4648 | constraint += CONSTRAINT_LEN (c, constraint)) | |
1f7f6676 | 4649 | { |
777e635f RS |
4650 | enum constraint_num cn = lookup_constraint (constraint); |
4651 | if (insn_extra_memory_constraint (cn) | |
4652 | && (mem == NULL || constraint_satisfied_p (mem, cn))) | |
1f7f6676 | 4653 | return true; |
1f7f6676 RS |
4654 | } |
4655 | return false; | |
eab89b90 RK |
4656 | } |
4657 | \f | |
4658 | /* Scan X for memory references and scan the addresses for reloading. | |
4659 | Also checks for references to "constant" regs that we want to eliminate | |
4660 | and replaces them with the values they stand for. | |
6dc42e49 | 4661 | We may alter X destructively if it contains a reference to such. |
eab89b90 RK |
4662 | If X is just a constant reg, we return the equivalent value |
4663 | instead of X. | |
4664 | ||
4665 | IND_LEVELS says how many levels of indirect addressing this machine | |
4666 | supports. | |
4667 | ||
a8c9daeb RK |
4668 | OPNUM and TYPE identify the purpose of the reload. |
4669 | ||
eab89b90 | 4670 | IS_SET_DEST is true if X is the destination of a SET, which is not |
cb2afeb3 R |
4671 | appropriate to be replaced by a constant. |
4672 | ||
4673 | INSN, if nonzero, is the insn in which we do the reload. It is used | |
4674 | to determine if we may generate output reloads, and where to put USEs | |
9246aadb AH |
4675 | for pseudos that we have to replace with stack slots. |
4676 | ||
4677 | ADDRESS_RELOADED. If nonzero, is a pointer to where we put the | |
4678 | result of find_reloads_address. */ | |
eab89b90 RK |
4679 | |
4680 | static rtx | |
0c20a65f | 4681 | find_reloads_toplev (rtx x, int opnum, enum reload_type type, |
5d86f5f9 | 4682 | int ind_levels, int is_set_dest, rtx_insn *insn, |
0c20a65f | 4683 | int *address_reloaded) |
eab89b90 | 4684 | { |
b3694847 | 4685 | RTX_CODE code = GET_CODE (x); |
eab89b90 | 4686 | |
b3694847 SS |
4687 | const char *fmt = GET_RTX_FORMAT (code); |
4688 | int i; | |
9f4749b1 | 4689 | int copied; |
eab89b90 RK |
4690 | |
4691 | if (code == REG) | |
4692 | { | |
4693 | /* This code is duplicated for speed in find_reloads. */ | |
b3694847 | 4694 | int regno = REGNO (x); |
f2034d06 JL |
4695 | if (reg_equiv_constant (regno) != 0 && !is_set_dest) |
4696 | x = reg_equiv_constant (regno); | |
eab89b90 | 4697 | #if 0 |
05d10675 BS |
4698 | /* This creates (subreg (mem...)) which would cause an unnecessary |
4699 | reload of the mem. */ | |
f2034d06 JL |
4700 | else if (reg_equiv_mem (regno) != 0) |
4701 | x = reg_equiv_mem (regno); | |
eab89b90 | 4702 | #endif |
f2034d06 JL |
4703 | else if (reg_equiv_memory_loc (regno) |
4704 | && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)) | |
eab89b90 | 4705 | { |
cb2afeb3 | 4706 | rtx mem = make_memloc (x, regno); |
f2034d06 JL |
4707 | if (reg_equiv_address (regno) |
4708 | || ! rtx_equal_p (mem, reg_equiv_mem (regno))) | |
cb2afeb3 R |
4709 | { |
4710 | /* If this is not a toplevel operand, find_reloads doesn't see | |
4711 | this substitution. We have to emit a USE of the pseudo so | |
4712 | that delete_output_reload can see it. */ | |
1ccbefce | 4713 | if (replace_reloads && recog_data.operand[opnum] != x) |
3d17d93d AO |
4714 | /* We mark the USE with QImode so that we recognize it |
4715 | as one that can be safely deleted at the end of | |
4716 | reload. */ | |
4717 | PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn), | |
4718 | QImode); | |
cb2afeb3 | 4719 | x = mem; |
9246aadb AH |
4720 | i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0), |
4721 | opnum, type, ind_levels, insn); | |
0f4b25a3 | 4722 | if (!rtx_equal_p (x, mem)) |
3f1e3e70 | 4723 | push_reg_equiv_alt_mem (regno, x); |
9246aadb AH |
4724 | if (address_reloaded) |
4725 | *address_reloaded = i; | |
cb2afeb3 | 4726 | } |
eab89b90 RK |
4727 | } |
4728 | return x; | |
4729 | } | |
4730 | if (code == MEM) | |
4731 | { | |
4732 | rtx tem = x; | |
9246aadb AH |
4733 | |
4734 | i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0), | |
4735 | opnum, type, ind_levels, insn); | |
4736 | if (address_reloaded) | |
4737 | *address_reloaded = i; | |
4738 | ||
eab89b90 RK |
4739 | return tem; |
4740 | } | |
4741 | ||
f8cfc6aa | 4742 | if (code == SUBREG && REG_P (SUBREG_REG (x))) |
eab89b90 | 4743 | { |
0e61db61 NS |
4744 | /* Check for SUBREG containing a REG that's equivalent to a |
4745 | constant. If the constant has a known value, truncate it | |
4746 | right now. Similarly if we are extracting a single-word of a | |
4747 | multi-word constant. If the constant is symbolic, allow it | |
4748 | to be substituted normally. push_reload will strip the | |
4749 | subreg later. The constant must not be VOIDmode, because we | |
4750 | will lose the mode of the register (this should never happen | |
4751 | because one of the cases above should handle it). */ | |
eab89b90 | 4752 | |
b3694847 | 4753 | int regno = REGNO (SUBREG_REG (x)); |
eab89b90 RK |
4754 | rtx tem; |
4755 | ||
857e5753 RS |
4756 | if (regno >= FIRST_PSEUDO_REGISTER |
4757 | && reg_renumber[regno] < 0 | |
f2034d06 | 4758 | && reg_equiv_constant (regno) != 0) |
0365438d | 4759 | { |
451f86fd | 4760 | tem = |
f2034d06 | 4761 | simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno), |
451f86fd | 4762 | GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x)); |
41374e13 | 4763 | gcc_assert (tem); |
1a627b35 RS |
4764 | if (CONSTANT_P (tem) |
4765 | && !targetm.legitimate_constant_p (GET_MODE (x), tem)) | |
c07fdd94 RS |
4766 | { |
4767 | tem = force_const_mem (GET_MODE (x), tem); | |
4768 | i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0), | |
4769 | &XEXP (tem, 0), opnum, type, | |
4770 | ind_levels, insn); | |
4771 | if (address_reloaded) | |
4772 | *address_reloaded = i; | |
4773 | } | |
4119019b | 4774 | return tem; |
0365438d | 4775 | } |
eab89b90 | 4776 | |
eab89b90 | 4777 | /* If the subreg contains a reg that will be converted to a mem, |
80de67b8 UW |
4778 | attempt to convert the whole subreg to a (narrower or wider) |
4779 | memory reference instead. If this succeeds, we're done -- | |
4780 | otherwise fall through to check whether the inner reg still | |
4781 | needs address reloads anyway. */ | |
eab89b90 | 4782 | |
857e5753 | 4783 | if (regno >= FIRST_PSEUDO_REGISTER |
80de67b8 UW |
4784 | && reg_equiv_memory_loc (regno) != 0) |
4785 | { | |
4786 | tem = find_reloads_subreg_address (x, opnum, type, ind_levels, | |
4787 | insn, address_reloaded); | |
4788 | if (tem) | |
4789 | return tem; | |
4790 | } | |
eab89b90 RK |
4791 | } |
4792 | ||
9f4749b1 | 4793 | for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
eab89b90 RK |
4794 | { |
4795 | if (fmt[i] == 'e') | |
9f4749b1 R |
4796 | { |
4797 | rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type, | |
9246aadb AH |
4798 | ind_levels, is_set_dest, insn, |
4799 | address_reloaded); | |
9f4749b1 R |
4800 | /* If we have replaced a reg with it's equivalent memory loc - |
4801 | that can still be handled here e.g. if it's in a paradoxical | |
4802 | subreg - we must make the change in a copy, rather than using | |
4803 | a destructive change. This way, find_reloads can still elect | |
4804 | not to do the change. */ | |
4805 | if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied) | |
4806 | { | |
ce9d4c6d | 4807 | x = shallow_copy_rtx (x); |
9f4749b1 R |
4808 | copied = 1; |
4809 | } | |
4810 | XEXP (x, i) = new_part; | |
4811 | } | |
eab89b90 RK |
4812 | } |
4813 | return x; | |
4814 | } | |
4815 | ||
dbf85761 RS |
4816 | /* Return a mem ref for the memory equivalent of reg REGNO. |
4817 | This mem ref is not shared with anything. */ | |
4818 | ||
eab89b90 | 4819 | static rtx |
0c20a65f | 4820 | make_memloc (rtx ad, int regno) |
eab89b90 | 4821 | { |
4ffeab02 JW |
4822 | /* We must rerun eliminate_regs, in case the elimination |
4823 | offsets have changed. */ | |
cb2afeb3 | 4824 | rtx tem |
f2034d06 | 4825 | = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX), |
bbbbb16a | 4826 | 0); |
eab89b90 RK |
4827 | |
4828 | /* If TEM might contain a pseudo, we must copy it to avoid | |
4829 | modifying it when we do the substitution for the reload. */ | |
e38fe8e0 | 4830 | if (rtx_varies_p (tem, 0)) |
eab89b90 RK |
4831 | tem = copy_rtx (tem); |
4832 | ||
f2034d06 | 4833 | tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem); |
cf728d61 HPN |
4834 | tem = adjust_address_nv (tem, GET_MODE (ad), 0); |
4835 | ||
4836 | /* Copy the result if it's still the same as the equivalence, to avoid | |
4837 | modifying it when we do the substitution for the reload. */ | |
f2034d06 | 4838 | if (tem == reg_equiv_memory_loc (regno)) |
cf728d61 HPN |
4839 | tem = copy_rtx (tem); |
4840 | return tem; | |
eab89b90 RK |
4841 | } |
4842 | ||
acf9fa5f | 4843 | /* Returns true if AD could be turned into a valid memory reference |
09e881c9 BE |
4844 | to mode MODE in address space AS by reloading the part pointed to |
4845 | by PART into a register. */ | |
acf9fa5f UW |
4846 | |
4847 | static int | |
ef4bddc2 | 4848 | maybe_memory_address_addr_space_p (machine_mode mode, rtx ad, |
09e881c9 | 4849 | addr_space_t as, rtx *part) |
acf9fa5f UW |
4850 | { |
4851 | int retv; | |
4852 | rtx tem = *part; | |
4853 | rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ()); | |
4854 | ||
4855 | *part = reg; | |
09e881c9 | 4856 | retv = memory_address_addr_space_p (mode, ad, as); |
acf9fa5f UW |
4857 | *part = tem; |
4858 | ||
4859 | return retv; | |
4860 | } | |
4861 | ||
eab89b90 RK |
4862 | /* Record all reloads needed for handling memory address AD |
4863 | which appears in *LOC in a memory reference to mode MODE | |
4864 | which itself is found in location *MEMREFLOC. | |
4865 | Note that we take shortcuts assuming that no multi-reg machine mode | |
4866 | occurs as part of an address. | |
4867 | ||
a8c9daeb | 4868 | OPNUM and TYPE specify the purpose of this reload. |
eab89b90 RK |
4869 | |
4870 | IND_LEVELS says how many levels of indirect addressing this machine | |
4871 | supports. | |
4872 | ||
55c22565 | 4873 | INSN, if nonzero, is the insn in which we do the reload. It is used |
cb2afeb3 R |
4874 | to determine if we may generate output reloads, and where to put USEs |
4875 | for pseudos that we have to replace with stack slots. | |
55c22565 | 4876 | |
0b540f12 UW |
4877 | Value is one if this address is reloaded or replaced as a whole; it is |
4878 | zero if the top level of this address was not reloaded or replaced, and | |
4879 | it is -1 if it may or may not have been reloaded or replaced. | |
eab89b90 RK |
4880 | |
4881 | Note that there is no verification that the address will be valid after | |
4882 | this routine does its work. Instead, we rely on the fact that the address | |
4883 | was valid when reload started. So we need only undo things that reload | |
4884 | could have broken. These are wrong register types, pseudos not allocated | |
4885 | to a hard register, and frame pointer elimination. */ | |
4886 | ||
4887 | static int | |
ef4bddc2 | 4888 | find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad, |
0c20a65f | 4889 | rtx *loc, int opnum, enum reload_type type, |
5d86f5f9 | 4890 | int ind_levels, rtx_insn *insn) |
eab89b90 | 4891 | { |
09e881c9 BE |
4892 | addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc) |
4893 | : ADDR_SPACE_GENERIC; | |
b3694847 | 4894 | int regno; |
ab87f8c8 | 4895 | int removed_and = 0; |
14d3dc34 | 4896 | int op_index; |
eab89b90 RK |
4897 | rtx tem; |
4898 | ||
4899 | /* If the address is a register, see if it is a legitimate address and | |
4900 | reload if not. We first handle the cases where we need not reload | |
4901 | or where we must reload in a non-standard way. */ | |
4902 | ||
f8cfc6aa | 4903 | if (REG_P (ad)) |
eab89b90 RK |
4904 | { |
4905 | regno = REGNO (ad); | |
4906 | ||
f2034d06 | 4907 | if (reg_equiv_constant (regno) != 0) |
eab89b90 | 4908 | { |
f2034d06 | 4909 | find_reloads_address_part (reg_equiv_constant (regno), loc, |
86fc3d06 | 4910 | base_reg_class (mode, as, MEM, SCRATCH), |
90d12f1f AK |
4911 | GET_MODE (ad), opnum, type, ind_levels); |
4912 | return 1; | |
eab89b90 RK |
4913 | } |
4914 | ||
f2034d06 | 4915 | tem = reg_equiv_memory_loc (regno); |
cb2afeb3 | 4916 | if (tem != 0) |
eab89b90 | 4917 | { |
f2034d06 | 4918 | if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset) |
cb2afeb3 R |
4919 | { |
4920 | tem = make_memloc (ad, regno); | |
09e881c9 BE |
4921 | if (! strict_memory_address_addr_space_p (GET_MODE (tem), |
4922 | XEXP (tem, 0), | |
4923 | MEM_ADDR_SPACE (tem))) | |
cb2afeb3 | 4924 | { |
3f1e3e70 AO |
4925 | rtx orig = tem; |
4926 | ||
57292ec3 AL |
4927 | find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0), |
4928 | &XEXP (tem, 0), opnum, | |
4929 | ADDR_TYPE (type), ind_levels, insn); | |
0f4b25a3 | 4930 | if (!rtx_equal_p (tem, orig)) |
3f1e3e70 | 4931 | push_reg_equiv_alt_mem (regno, tem); |
cb2afeb3 R |
4932 | } |
4933 | /* We can avoid a reload if the register's equivalent memory | |
4934 | expression is valid as an indirect memory address. | |
4935 | But not all addresses are valid in a mem used as an indirect | |
4936 | address: only reg or reg+constant. */ | |
4937 | ||
4938 | if (ind_levels > 0 | |
09e881c9 | 4939 | && strict_memory_address_addr_space_p (mode, tem, as) |
f8cfc6aa | 4940 | && (REG_P (XEXP (tem, 0)) |
cb2afeb3 | 4941 | || (GET_CODE (XEXP (tem, 0)) == PLUS |
f8cfc6aa | 4942 | && REG_P (XEXP (XEXP (tem, 0), 0)) |
cb2afeb3 R |
4943 | && CONSTANT_P (XEXP (XEXP (tem, 0), 1))))) |
4944 | { | |
4945 | /* TEM is not the same as what we'll be replacing the | |
4946 | pseudo with after reload, put a USE in front of INSN | |
4947 | in the final reload pass. */ | |
4948 | if (replace_reloads | |
4949 | && num_not_at_initial_offset | |
f2034d06 | 4950 | && ! rtx_equal_p (tem, reg_equiv_mem (regno))) |
cb2afeb3 R |
4951 | { |
4952 | *loc = tem; | |
3d17d93d AO |
4953 | /* We mark the USE with QImode so that we |
4954 | recognize it as one that can be safely | |
4955 | deleted at the end of reload. */ | |
4956 | PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), | |
4957 | insn), QImode); | |
4958 | ||
cb2afeb3 R |
4959 | /* This doesn't really count as replacing the address |
4960 | as a whole, since it is still a memory access. */ | |
4961 | } | |
4962 | return 0; | |
4963 | } | |
4964 | ad = tem; | |
4965 | } | |
eab89b90 RK |
4966 | } |
4967 | ||
eab89b90 RK |
4968 | /* The only remaining case where we can avoid a reload is if this is a |
4969 | hard register that is valid as a base register and which is not the | |
4970 | subject of a CLOBBER in this insn. */ | |
4971 | ||
858c3c8c | 4972 | else if (regno < FIRST_PSEUDO_REGISTER |
86fc3d06 | 4973 | && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH) |
9532e31f | 4974 | && ! regno_clobbered_p (regno, this_insn, mode, 0)) |
eab89b90 RK |
4975 | return 0; |
4976 | ||
4977 | /* If we do not have one of the cases above, we must do the reload. */ | |
86fc3d06 UW |
4978 | push_reload (ad, NULL_RTX, loc, (rtx*) 0, |
4979 | base_reg_class (mode, as, MEM, SCRATCH), | |
a8c9daeb | 4980 | GET_MODE (ad), VOIDmode, 0, 0, opnum, type); |
eab89b90 RK |
4981 | return 1; |
4982 | } | |
4983 | ||
09e881c9 | 4984 | if (strict_memory_address_addr_space_p (mode, ad, as)) |
eab89b90 RK |
4985 | { |
4986 | /* The address appears valid, so reloads are not needed. | |
4987 | But the address may contain an eliminable register. | |
4988 | This can happen because a machine with indirect addressing | |
4989 | may consider a pseudo register by itself a valid address even when | |
4990 | it has failed to get a hard reg. | |
4991 | So do a tree-walk to find and eliminate all such regs. */ | |
4992 | ||
4993 | /* But first quickly dispose of a common case. */ | |
4994 | if (GET_CODE (ad) == PLUS | |
481683e1 | 4995 | && CONST_INT_P (XEXP (ad, 1)) |
f8cfc6aa | 4996 | && REG_P (XEXP (ad, 0)) |
f2034d06 | 4997 | && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0) |
eab89b90 RK |
4998 | return 0; |
4999 | ||
5000 | subst_reg_equivs_changed = 0; | |
cb2afeb3 | 5001 | *loc = subst_reg_equivs (ad, insn); |
eab89b90 RK |
5002 | |
5003 | if (! subst_reg_equivs_changed) | |
5004 | return 0; | |
5005 | ||
5006 | /* Check result for validity after substitution. */ | |
09e881c9 | 5007 | if (strict_memory_address_addr_space_p (mode, ad, as)) |
eab89b90 RK |
5008 | return 0; |
5009 | } | |
5010 | ||
a9a2595b JR |
5011 | #ifdef LEGITIMIZE_RELOAD_ADDRESS |
5012 | do | |
5013 | { | |
09e881c9 | 5014 | if (memrefloc && ADDR_SPACE_GENERIC_P (as)) |
a9a2595b JR |
5015 | { |
5016 | LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type, | |
5017 | ind_levels, win); | |
5018 | } | |
5019 | break; | |
5020 | win: | |
5021 | *memrefloc = copy_rtx (*memrefloc); | |
5022 | XEXP (*memrefloc, 0) = ad; | |
5023 | move_replacements (&ad, &XEXP (*memrefloc, 0)); | |
0b540f12 | 5024 | return -1; |
a9a2595b JR |
5025 | } |
5026 | while (0); | |
5027 | #endif | |
5028 | ||
ab87f8c8 JL |
5029 | /* The address is not valid. We have to figure out why. First see if |
5030 | we have an outer AND and remove it if so. Then analyze what's inside. */ | |
5031 | ||
5032 | if (GET_CODE (ad) == AND) | |
5033 | { | |
5034 | removed_and = 1; | |
5035 | loc = &XEXP (ad, 0); | |
5036 | ad = *loc; | |
5037 | } | |
5038 | ||
5039 | /* One possibility for why the address is invalid is that it is itself | |
5040 | a MEM. This can happen when the frame pointer is being eliminated, a | |
5041 | pseudo is not allocated to a hard register, and the offset between the | |
5042 | frame and stack pointers is not its initial value. In that case the | |
5043 | pseudo will have been replaced by a MEM referring to the | |
5044 | stack pointer. */ | |
3c0cb5de | 5045 | if (MEM_P (ad)) |
eab89b90 RK |
5046 | { |
5047 | /* First ensure that the address in this MEM is valid. Then, unless | |
5048 | indirect addresses are valid, reload the MEM into a register. */ | |
5049 | tem = ad; | |
5050 | find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0), | |
47c8cf91 | 5051 | opnum, ADDR_TYPE (type), |
55c22565 | 5052 | ind_levels == 0 ? 0 : ind_levels - 1, insn); |
d2555454 RS |
5053 | |
5054 | /* If tem was changed, then we must create a new memory reference to | |
5055 | hold it and store it back into memrefloc. */ | |
5056 | if (tem != ad && memrefloc) | |
ca3e4a2f | 5057 | { |
ca3e4a2f | 5058 | *memrefloc = copy_rtx (*memrefloc); |
3c80f7ed | 5059 | copy_replacements (tem, XEXP (*memrefloc, 0)); |
ca3e4a2f | 5060 | loc = &XEXP (*memrefloc, 0); |
ab87f8c8 JL |
5061 | if (removed_and) |
5062 | loc = &XEXP (*loc, 0); | |
ca3e4a2f | 5063 | } |
d2555454 | 5064 | |
eab89b90 RK |
5065 | /* Check similar cases as for indirect addresses as above except |
5066 | that we can allow pseudos and a MEM since they should have been | |
5067 | taken care of above. */ | |
5068 | ||
5069 | if (ind_levels == 0 | |
5070 | || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok) | |
3c0cb5de | 5071 | || MEM_P (XEXP (tem, 0)) |
f8cfc6aa | 5072 | || ! (REG_P (XEXP (tem, 0)) |
eab89b90 | 5073 | || (GET_CODE (XEXP (tem, 0)) == PLUS |
f8cfc6aa | 5074 | && REG_P (XEXP (XEXP (tem, 0), 0)) |
481683e1 | 5075 | && CONST_INT_P (XEXP (XEXP (tem, 0), 1))))) |
eab89b90 RK |
5076 | { |
5077 | /* Must use TEM here, not AD, since it is the one that will | |
5078 | have any subexpressions reloaded, if needed. */ | |
f4f4d0f8 | 5079 | push_reload (tem, NULL_RTX, loc, (rtx*) 0, |
86fc3d06 | 5080 | base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem), |
1ba61f4e | 5081 | VOIDmode, 0, |
a8c9daeb | 5082 | 0, opnum, type); |
ab87f8c8 | 5083 | return ! removed_and; |
eab89b90 RK |
5084 | } |
5085 | else | |
5086 | return 0; | |
5087 | } | |
5088 | ||
1b4d2764 RK |
5089 | /* If we have address of a stack slot but it's not valid because the |
5090 | displacement is too large, compute the sum in a register. | |
5091 | Handle all base registers here, not just fp/ap/sp, because on some | |
5092 | targets (namely SH) we can also get too large displacements from | |
5093 | big-endian corrections. */ | |
eab89b90 | 5094 | else if (GET_CODE (ad) == PLUS |
f8cfc6aa | 5095 | && REG_P (XEXP (ad, 0)) |
1b4d2764 | 5096 | && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER |
481683e1 | 5097 | && CONST_INT_P (XEXP (ad, 1)) |
86fc3d06 | 5098 | && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS, |
2a737389 JL |
5099 | CONST_INT) |
5100 | /* Similarly, if we were to reload the base register and the | |
5101 | mem+offset address is still invalid, then we want to reload | |
5102 | the whole address, not just the base register. */ | |
5103 | || ! maybe_memory_address_addr_space_p | |
5104 | (mode, ad, as, &(XEXP (ad, 0))))) | |
c4963a0a | 5105 | |
eab89b90 RK |
5106 | { |
5107 | /* Unshare the MEM rtx so we can safely alter it. */ | |
5108 | if (memrefloc) | |
5109 | { | |
eab89b90 RK |
5110 | *memrefloc = copy_rtx (*memrefloc); |
5111 | loc = &XEXP (*memrefloc, 0); | |
ab87f8c8 JL |
5112 | if (removed_and) |
5113 | loc = &XEXP (*loc, 0); | |
eab89b90 | 5114 | } |
ab87f8c8 | 5115 | |
2a737389 | 5116 | if (double_reg_address_ok |
86fc3d06 | 5117 | && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, |
2a737389 | 5118 | PLUS, CONST_INT)) |
eab89b90 RK |
5119 | { |
5120 | /* Unshare the sum as well. */ | |
5121 | *loc = ad = copy_rtx (ad); | |
ab87f8c8 | 5122 | |
eab89b90 RK |
5123 | /* Reload the displacement into an index reg. |
5124 | We assume the frame pointer or arg pointer is a base reg. */ | |
5125 | find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1), | |
03acd8f8 BS |
5126 | INDEX_REG_CLASS, GET_MODE (ad), opnum, |
5127 | type, ind_levels); | |
ab87f8c8 | 5128 | return 0; |
eab89b90 RK |
5129 | } |
5130 | else | |
5131 | { | |
5132 | /* If the sum of two regs is not necessarily valid, | |
5133 | reload the sum into a base reg. | |
5134 | That will at least work. */ | |
c4963a0a | 5135 | find_reloads_address_part (ad, loc, |
86fc3d06 | 5136 | base_reg_class (mode, as, MEM, SCRATCH), |
d4ebfa65 | 5137 | GET_MODE (ad), opnum, type, ind_levels); |
eab89b90 | 5138 | } |
ab87f8c8 | 5139 | return ! removed_and; |
eab89b90 RK |
5140 | } |
5141 | ||
5142 | /* If we have an indexed stack slot, there are three possible reasons why | |
5143 | it might be invalid: The index might need to be reloaded, the address | |
5144 | might have been made by frame pointer elimination and hence have a | |
05d10675 | 5145 | constant out of range, or both reasons might apply. |
eab89b90 RK |
5146 | |
5147 | We can easily check for an index needing reload, but even if that is the | |
5148 | case, we might also have an invalid constant. To avoid making the | |
5149 | conservative assumption and requiring two reloads, we see if this address | |
5150 | is valid when not interpreted strictly. If it is, the only problem is | |
5151 | that the index needs a reload and find_reloads_address_1 will take care | |
5152 | of it. | |
5153 | ||
27e484bc | 5154 | Handle all base registers here, not just fp/ap/sp, because on some |
6356f892 | 5155 | targets (namely SPARC) we can also get invalid addresses from preventive |
14d3dc34 MM |
5156 | subreg big-endian corrections made by find_reloads_toplev. We |
5157 | can also get expressions involving LO_SUM (rather than PLUS) from | |
5158 | find_reloads_subreg_address. | |
27e484bc EB |
5159 | |
5160 | If we decide to do something, it must be that `double_reg_address_ok' | |
5161 | is true. We generate a reload of the base register + constant and | |
eab89b90 RK |
5162 | rework the sum so that the reload register will be added to the index. |
5163 | This is safe because we know the address isn't shared. | |
5164 | ||
27e484bc | 5165 | We check for the base register as both the first and second operand of |
14d3dc34 MM |
5166 | the innermost PLUS and/or LO_SUM. */ |
5167 | ||
5168 | for (op_index = 0; op_index < 2; ++op_index) | |
eab89b90 | 5169 | { |
c4963a0a BS |
5170 | rtx operand, addend; |
5171 | enum rtx_code inner_code; | |
5172 | ||
5173 | if (GET_CODE (ad) != PLUS) | |
5174 | continue; | |
eab89b90 | 5175 | |
c4963a0a | 5176 | inner_code = GET_CODE (XEXP (ad, 0)); |
b8698a0f | 5177 | if (!(GET_CODE (ad) == PLUS |
481683e1 | 5178 | && CONST_INT_P (XEXP (ad, 1)) |
c4963a0a | 5179 | && (inner_code == PLUS || inner_code == LO_SUM))) |
14d3dc34 | 5180 | continue; |
05d10675 | 5181 | |
14d3dc34 | 5182 | operand = XEXP (XEXP (ad, 0), op_index); |
0e544c37 | 5183 | if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER) |
14d3dc34 MM |
5184 | continue; |
5185 | ||
c4963a0a BS |
5186 | addend = XEXP (XEXP (ad, 0), 1 - op_index); |
5187 | ||
86fc3d06 | 5188 | if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code, |
c4963a0a | 5189 | GET_CODE (addend)) |
14d3dc34 | 5190 | || operand == frame_pointer_rtx |
e3339d0f | 5191 | #if !HARD_FRAME_POINTER_IS_FRAME_POINTER |
14d3dc34 | 5192 | || operand == hard_frame_pointer_rtx |
0422e940 UW |
5193 | #endif |
5194 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
14d3dc34 | 5195 | || operand == arg_pointer_rtx |
0422e940 | 5196 | #endif |
14d3dc34 | 5197 | || operand == stack_pointer_rtx) |
09e881c9 BE |
5198 | && ! maybe_memory_address_addr_space_p |
5199 | (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index))) | |
14d3dc34 MM |
5200 | { |
5201 | rtx offset_reg; | |
c4963a0a | 5202 | enum reg_class cls; |
14d3dc34 | 5203 | |
0a81f074 RS |
5204 | offset_reg = plus_constant (GET_MODE (ad), operand, |
5205 | INTVAL (XEXP (ad, 1))); | |
c4963a0a | 5206 | |
14d3dc34 MM |
5207 | /* Form the adjusted address. */ |
5208 | if (GET_CODE (XEXP (ad, 0)) == PLUS) | |
b8698a0f L |
5209 | ad = gen_rtx_PLUS (GET_MODE (ad), |
5210 | op_index == 0 ? offset_reg : addend, | |
14d3dc34 MM |
5211 | op_index == 0 ? addend : offset_reg); |
5212 | else | |
b8698a0f L |
5213 | ad = gen_rtx_LO_SUM (GET_MODE (ad), |
5214 | op_index == 0 ? offset_reg : addend, | |
14d3dc34 MM |
5215 | op_index == 0 ? addend : offset_reg); |
5216 | *loc = ad; | |
5217 | ||
86fc3d06 | 5218 | cls = base_reg_class (mode, as, MEM, GET_CODE (addend)); |
b8698a0f | 5219 | find_reloads_address_part (XEXP (ad, op_index), |
c4963a0a | 5220 | &XEXP (ad, op_index), cls, |
14d3dc34 | 5221 | GET_MODE (ad), opnum, type, ind_levels); |
86fc3d06 | 5222 | find_reloads_address_1 (mode, as, |
c4963a0a BS |
5223 | XEXP (ad, 1 - op_index), 1, GET_CODE (ad), |
5224 | GET_CODE (XEXP (ad, op_index)), | |
14d3dc34 MM |
5225 | &XEXP (ad, 1 - op_index), opnum, |
5226 | type, 0, insn); | |
eab89b90 | 5227 | |
14d3dc34 MM |
5228 | return 0; |
5229 | } | |
eab89b90 | 5230 | } |
05d10675 | 5231 | |
eab89b90 RK |
5232 | /* See if address becomes valid when an eliminable register |
5233 | in a sum is replaced. */ | |
5234 | ||
5235 | tem = ad; | |
5236 | if (GET_CODE (ad) == PLUS) | |
5237 | tem = subst_indexed_address (ad); | |
09e881c9 | 5238 | if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as)) |
eab89b90 RK |
5239 | { |
5240 | /* Ok, we win that way. Replace any additional eliminable | |
5241 | registers. */ | |
5242 | ||
5243 | subst_reg_equivs_changed = 0; | |
cb2afeb3 | 5244 | tem = subst_reg_equivs (tem, insn); |
eab89b90 RK |
5245 | |
5246 | /* Make sure that didn't make the address invalid again. */ | |
5247 | ||
09e881c9 BE |
5248 | if (! subst_reg_equivs_changed |
5249 | || strict_memory_address_addr_space_p (mode, tem, as)) | |
eab89b90 RK |
5250 | { |
5251 | *loc = tem; | |
5252 | return 0; | |
5253 | } | |
5254 | } | |
5255 | ||
5256 | /* If constants aren't valid addresses, reload the constant address | |
5257 | into a register. */ | |
09e881c9 | 5258 | if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as)) |
eab89b90 | 5259 | { |
ef4bddc2 | 5260 | machine_mode address_mode = GET_MODE (ad); |
263839d3 | 5261 | if (address_mode == VOIDmode) |
d4ebfa65 BE |
5262 | address_mode = targetm.addr_space.address_mode (as); |
5263 | ||
e0120d6e | 5264 | /* If AD is an address in the constant pool, the MEM rtx may be shared. |
eab89b90 RK |
5265 | Unshare it so we can safely alter it. */ |
5266 | if (memrefloc && GET_CODE (ad) == SYMBOL_REF | |
5267 | && CONSTANT_POOL_ADDRESS_P (ad)) | |
5268 | { | |
eab89b90 RK |
5269 | *memrefloc = copy_rtx (*memrefloc); |
5270 | loc = &XEXP (*memrefloc, 0); | |
ab87f8c8 JL |
5271 | if (removed_and) |
5272 | loc = &XEXP (*loc, 0); | |
eab89b90 RK |
5273 | } |
5274 | ||
86fc3d06 UW |
5275 | find_reloads_address_part (ad, loc, |
5276 | base_reg_class (mode, as, MEM, SCRATCH), | |
d4ebfa65 | 5277 | address_mode, opnum, type, ind_levels); |
ab87f8c8 | 5278 | return ! removed_and; |
eab89b90 RK |
5279 | } |
5280 | ||
86fc3d06 UW |
5281 | return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc, |
5282 | opnum, type, ind_levels, insn); | |
eab89b90 RK |
5283 | } |
5284 | \f | |
5285 | /* Find all pseudo regs appearing in AD | |
5286 | that are eliminable in favor of equivalent values | |
cb2afeb3 R |
5287 | and do not have hard regs; replace them by their equivalents. |
5288 | INSN, if nonzero, is the insn in which we do the reload. We put USEs in | |
5289 | front of it for pseudos that we have to replace with stack slots. */ | |
eab89b90 RK |
5290 | |
5291 | static rtx | |
5d86f5f9 | 5292 | subst_reg_equivs (rtx ad, rtx_insn *insn) |
eab89b90 | 5293 | { |
b3694847 SS |
5294 | RTX_CODE code = GET_CODE (ad); |
5295 | int i; | |
5296 | const char *fmt; | |
eab89b90 RK |
5297 | |
5298 | switch (code) | |
5299 | { | |
5300 | case HIGH: | |
eab89b90 | 5301 | case CONST: |
d8116890 | 5302 | CASE_CONST_ANY: |
eab89b90 RK |
5303 | case SYMBOL_REF: |
5304 | case LABEL_REF: | |
5305 | case PC: | |
5306 | case CC0: | |
5307 | return ad; | |
5308 | ||
5309 | case REG: | |
5310 | { | |
b3694847 | 5311 | int regno = REGNO (ad); |
eab89b90 | 5312 | |
f2034d06 | 5313 | if (reg_equiv_constant (regno) != 0) |
eab89b90 RK |
5314 | { |
5315 | subst_reg_equivs_changed = 1; | |
f2034d06 | 5316 | return reg_equiv_constant (regno); |
eab89b90 | 5317 | } |
f2034d06 | 5318 | if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset) |
cb2afeb3 R |
5319 | { |
5320 | rtx mem = make_memloc (ad, regno); | |
f2034d06 | 5321 | if (! rtx_equal_p (mem, reg_equiv_mem (regno))) |
cb2afeb3 R |
5322 | { |
5323 | subst_reg_equivs_changed = 1; | |
3d17d93d AO |
5324 | /* We mark the USE with QImode so that we recognize it |
5325 | as one that can be safely deleted at the end of | |
5326 | reload. */ | |
5327 | PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn), | |
5328 | QImode); | |
cb2afeb3 R |
5329 | return mem; |
5330 | } | |
5331 | } | |
eab89b90 RK |
5332 | } |
5333 | return ad; | |
5334 | ||
5335 | case PLUS: | |
5336 | /* Quickly dispose of a common case. */ | |
5337 | if (XEXP (ad, 0) == frame_pointer_rtx | |
481683e1 | 5338 | && CONST_INT_P (XEXP (ad, 1))) |
eab89b90 | 5339 | return ad; |
e9a25f70 | 5340 | break; |
05d10675 | 5341 | |
e9a25f70 JL |
5342 | default: |
5343 | break; | |
eab89b90 RK |
5344 | } |
5345 | ||
5346 | fmt = GET_RTX_FORMAT (code); | |
5347 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
5348 | if (fmt[i] == 'e') | |
cb2afeb3 | 5349 | XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn); |
eab89b90 RK |
5350 | return ad; |
5351 | } | |
5352 | \f | |
5353 | /* Compute the sum of X and Y, making canonicalizations assumed in an | |
5354 | address, namely: sum constant integers, surround the sum of two | |
5355 | constants with a CONST, put the constant as the second operand, and | |
5356 | group the constant on the outermost sum. | |
5357 | ||
5358 | This routine assumes both inputs are already in canonical form. */ | |
5359 | ||
5360 | rtx | |
ef4bddc2 | 5361 | form_sum (machine_mode mode, rtx x, rtx y) |
eab89b90 RK |
5362 | { |
5363 | rtx tem; | |
2c0623e8 | 5364 | |
d4ebfa65 BE |
5365 | gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); |
5366 | gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode); | |
eab89b90 | 5367 | |
481683e1 | 5368 | if (CONST_INT_P (x)) |
0a81f074 | 5369 | return plus_constant (mode, y, INTVAL (x)); |
481683e1 | 5370 | else if (CONST_INT_P (y)) |
0a81f074 | 5371 | return plus_constant (mode, x, INTVAL (y)); |
eab89b90 RK |
5372 | else if (CONSTANT_P (x)) |
5373 | tem = x, x = y, y = tem; | |
5374 | ||
5375 | if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1))) | |
d4ebfa65 | 5376 | return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y)); |
eab89b90 RK |
5377 | |
5378 | /* Note that if the operands of Y are specified in the opposite | |
5379 | order in the recursive calls below, infinite recursion will occur. */ | |
d9771f62 | 5380 | if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1))) |
d4ebfa65 | 5381 | return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1)); |
eab89b90 RK |
5382 | |
5383 | /* If both constant, encapsulate sum. Otherwise, just form sum. A | |
5384 | constant will have been placed second. */ | |
5385 | if (CONSTANT_P (x) && CONSTANT_P (y)) | |
5386 | { | |
5387 | if (GET_CODE (x) == CONST) | |
5388 | x = XEXP (x, 0); | |
5389 | if (GET_CODE (y) == CONST) | |
5390 | y = XEXP (y, 0); | |
5391 | ||
38a448ca | 5392 | return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y)); |
eab89b90 RK |
5393 | } |
5394 | ||
38a448ca | 5395 | return gen_rtx_PLUS (mode, x, y); |
eab89b90 RK |
5396 | } |
5397 | \f | |
5398 | /* If ADDR is a sum containing a pseudo register that should be | |
5399 | replaced with a constant (from reg_equiv_constant), | |
5400 | return the result of doing so, and also apply the associative | |
5401 | law so that the result is more likely to be a valid address. | |
5402 | (But it is not guaranteed to be one.) | |
5403 | ||
5404 | Note that at most one register is replaced, even if more are | |
5405 | replaceable. Also, we try to put the result into a canonical form | |
5406 | so it is more likely to be a valid address. | |
5407 | ||
5408 | In all other cases, return ADDR. */ | |
5409 | ||
5410 | static rtx | |
0c20a65f | 5411 | subst_indexed_address (rtx addr) |
eab89b90 RK |
5412 | { |
5413 | rtx op0 = 0, op1 = 0, op2 = 0; | |
5414 | rtx tem; | |
5415 | int regno; | |
5416 | ||
5417 | if (GET_CODE (addr) == PLUS) | |
5418 | { | |
5419 | /* Try to find a register to replace. */ | |
5420 | op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0; | |
f8cfc6aa | 5421 | if (REG_P (op0) |
eab89b90 RK |
5422 | && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER |
5423 | && reg_renumber[regno] < 0 | |
f2034d06 JL |
5424 | && reg_equiv_constant (regno) != 0) |
5425 | op0 = reg_equiv_constant (regno); | |
f8cfc6aa | 5426 | else if (REG_P (op1) |
05d10675 BS |
5427 | && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER |
5428 | && reg_renumber[regno] < 0 | |
f2034d06 JL |
5429 | && reg_equiv_constant (regno) != 0) |
5430 | op1 = reg_equiv_constant (regno); | |
eab89b90 RK |
5431 | else if (GET_CODE (op0) == PLUS |
5432 | && (tem = subst_indexed_address (op0)) != op0) | |
5433 | op0 = tem; | |
5434 | else if (GET_CODE (op1) == PLUS | |
5435 | && (tem = subst_indexed_address (op1)) != op1) | |
5436 | op1 = tem; | |
5437 | else | |
5438 | return addr; | |
5439 | ||
5440 | /* Pick out up to three things to add. */ | |
5441 | if (GET_CODE (op1) == PLUS) | |
5442 | op2 = XEXP (op1, 1), op1 = XEXP (op1, 0); | |
5443 | else if (GET_CODE (op0) == PLUS) | |
5444 | op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0); | |
5445 | ||
5446 | /* Compute the sum. */ | |
5447 | if (op2 != 0) | |
d4ebfa65 | 5448 | op1 = form_sum (GET_MODE (addr), op1, op2); |
eab89b90 | 5449 | if (op1 != 0) |
d4ebfa65 | 5450 | op0 = form_sum (GET_MODE (addr), op0, op1); |
eab89b90 RK |
5451 | |
5452 | return op0; | |
5453 | } | |
5454 | return addr; | |
5455 | } | |
5456 | \f | |
98c17df2 GS |
5457 | /* Update the REG_INC notes for an insn. It updates all REG_INC |
5458 | notes for the instruction which refer to REGNO the to refer | |
5459 | to the reload number. | |
5460 | ||
5461 | INSN is the insn for which any REG_INC notes need updating. | |
5462 | ||
5463 | REGNO is the register number which has been reloaded. | |
5464 | ||
5465 | RELOADNUM is the reload number. */ | |
5466 | ||
5467 | static void | |
5d86f5f9 | 5468 | update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED, |
0c20a65f | 5469 | int reloadnum ATTRIBUTE_UNUSED) |
98c17df2 GS |
5470 | { |
5471 | #ifdef AUTO_INC_DEC | |
5472 | rtx link; | |
5473 | ||
5474 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
5475 | if (REG_NOTE_KIND (link) == REG_INC | |
fc555370 | 5476 | && (int) REGNO (XEXP (link, 0)) == regno) |
98c17df2 GS |
5477 | push_replacement (&XEXP (link, 0), reloadnum, VOIDmode); |
5478 | #endif | |
5479 | } | |
5480 | \f | |
858c3c8c ILT |
5481 | /* Record the pseudo registers we must reload into hard registers in a |
5482 | subexpression of a would-be memory address, X referring to a value | |
5483 | in mode MODE. (This function is not called if the address we find | |
5484 | is strictly valid.) | |
5485 | ||
eab89b90 | 5486 | CONTEXT = 1 means we are considering regs as index regs, |
c4963a0a BS |
5487 | = 0 means we are considering them as base regs. |
5488 | OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS, | |
5489 | or an autoinc code. | |
5490 | If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE | |
5491 | is the code of the index part of the address. Otherwise, pass SCRATCH | |
5492 | for this argument. | |
a8c9daeb | 5493 | OPNUM and TYPE specify the purpose of any reloads made. |
eab89b90 RK |
5494 | |
5495 | IND_LEVELS says how many levels of indirect addressing are | |
5496 | supported at this point in the address. | |
5497 | ||
55c22565 RK |
5498 | INSN, if nonzero, is the insn in which we do the reload. It is used |
5499 | to determine if we may generate output reloads. | |
5500 | ||
eab89b90 RK |
5501 | We return nonzero if X, as a whole, is reloaded or replaced. */ |
5502 | ||
5503 | /* Note that we take shortcuts assuming that no multi-reg machine mode | |
5504 | occurs as part of an address. | |
5505 | Also, this is not fully machine-customizable; it works for machines | |
8aeea6e6 | 5506 | such as VAXen and 68000's and 32000's, but other possible machines |
ff0d9879 HPN |
5507 | could have addressing modes that this does not handle right. |
5508 | If you add push_reload calls here, you need to make sure gen_reload | |
5509 | handles those cases gracefully. */ | |
eab89b90 RK |
5510 | |
5511 | static int | |
ef4bddc2 | 5512 | find_reloads_address_1 (machine_mode mode, addr_space_t as, |
86fc3d06 | 5513 | rtx x, int context, |
c4963a0a | 5514 | enum rtx_code outer_code, enum rtx_code index_code, |
0c20a65f | 5515 | rtx *loc, int opnum, enum reload_type type, |
5d86f5f9 | 5516 | int ind_levels, rtx_insn *insn) |
eab89b90 | 5517 | { |
86fc3d06 | 5518 | #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \ |
c4963a0a | 5519 | ((CONTEXT) == 0 \ |
86fc3d06 | 5520 | ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \ |
b8698a0f | 5521 | : REGNO_OK_FOR_INDEX_P (REGNO)) |
888d2cd6 DJ |
5522 | |
5523 | enum reg_class context_reg_class; | |
b3694847 | 5524 | RTX_CODE code = GET_CODE (x); |
9e6f9ad6 | 5525 | bool reloaded_inner_of_autoinc = false; |
eab89b90 | 5526 | |
c4963a0a | 5527 | if (context == 1) |
888d2cd6 DJ |
5528 | context_reg_class = INDEX_REG_CLASS; |
5529 | else | |
86fc3d06 | 5530 | context_reg_class = base_reg_class (mode, as, outer_code, index_code); |
888d2cd6 | 5531 | |
a2d353e5 | 5532 | switch (code) |
eab89b90 | 5533 | { |
a2d353e5 RK |
5534 | case PLUS: |
5535 | { | |
b3694847 SS |
5536 | rtx orig_op0 = XEXP (x, 0); |
5537 | rtx orig_op1 = XEXP (x, 1); | |
5538 | RTX_CODE code0 = GET_CODE (orig_op0); | |
5539 | RTX_CODE code1 = GET_CODE (orig_op1); | |
5540 | rtx op0 = orig_op0; | |
5541 | rtx op1 = orig_op1; | |
a2d353e5 RK |
5542 | |
5543 | if (GET_CODE (op0) == SUBREG) | |
5544 | { | |
5545 | op0 = SUBREG_REG (op0); | |
5546 | code0 = GET_CODE (op0); | |
922db4bb | 5547 | if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER) |
38a448ca | 5548 | op0 = gen_rtx_REG (word_mode, |
ddef6bc7 JJ |
5549 | (REGNO (op0) + |
5550 | subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)), | |
5551 | GET_MODE (SUBREG_REG (orig_op0)), | |
5552 | SUBREG_BYTE (orig_op0), | |
5553 | GET_MODE (orig_op0)))); | |
a2d353e5 | 5554 | } |
87935f60 | 5555 | |
a2d353e5 RK |
5556 | if (GET_CODE (op1) == SUBREG) |
5557 | { | |
5558 | op1 = SUBREG_REG (op1); | |
5559 | code1 = GET_CODE (op1); | |
922db4bb | 5560 | if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER) |
ddef6bc7 JJ |
5561 | /* ??? Why is this given op1's mode and above for |
5562 | ??? op0 SUBREGs we use word_mode? */ | |
38a448ca | 5563 | op1 = gen_rtx_REG (GET_MODE (op1), |
ddef6bc7 JJ |
5564 | (REGNO (op1) + |
5565 | subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)), | |
5566 | GET_MODE (SUBREG_REG (orig_op1)), | |
5567 | SUBREG_BYTE (orig_op1), | |
5568 | GET_MODE (orig_op1)))); | |
a2d353e5 | 5569 | } |
04c5580f | 5570 | /* Plus in the index register may be created only as a result of |
6fc0bb99 | 5571 | register rematerialization for expression like &localvar*4. Reload it. |
04c5580f JH |
5572 | It may be possible to combine the displacement on the outer level, |
5573 | but it is probably not worthwhile to do so. */ | |
888d2cd6 | 5574 | if (context == 1) |
04c5580f JH |
5575 | { |
5576 | find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0), | |
5577 | opnum, ADDR_TYPE (type), ind_levels, insn); | |
5578 | push_reload (*loc, NULL_RTX, loc, (rtx*) 0, | |
888d2cd6 | 5579 | context_reg_class, |
04c5580f JH |
5580 | GET_MODE (x), VOIDmode, 0, 0, opnum, type); |
5581 | return 1; | |
5582 | } | |
a2d353e5 | 5583 | |
05d10675 | 5584 | if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE |
5f8997b9 | 5585 | || code0 == ZERO_EXTEND || code1 == MEM) |
a2d353e5 | 5586 | { |
86fc3d06 | 5587 | find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH, |
c4963a0a BS |
5588 | &XEXP (x, 0), opnum, type, ind_levels, |
5589 | insn); | |
86fc3d06 | 5590 | find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0, |
c4963a0a BS |
5591 | &XEXP (x, 1), opnum, type, ind_levels, |
5592 | insn); | |
a2d353e5 RK |
5593 | } |
5594 | ||
5f8997b9 SC |
5595 | else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE |
5596 | || code1 == ZERO_EXTEND || code0 == MEM) | |
a2d353e5 | 5597 | { |
86fc3d06 | 5598 | find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1, |
c4963a0a BS |
5599 | &XEXP (x, 0), opnum, type, ind_levels, |
5600 | insn); | |
86fc3d06 | 5601 | find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH, |
c4963a0a BS |
5602 | &XEXP (x, 1), opnum, type, ind_levels, |
5603 | insn); | |
a2d353e5 RK |
5604 | } |
5605 | ||
5606 | else if (code0 == CONST_INT || code0 == CONST | |
5607 | || code0 == SYMBOL_REF || code0 == LABEL_REF) | |
86fc3d06 | 5608 | find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0, |
c4963a0a BS |
5609 | &XEXP (x, 1), opnum, type, ind_levels, |
5610 | insn); | |
a2d353e5 RK |
5611 | |
5612 | else if (code1 == CONST_INT || code1 == CONST | |
5613 | || code1 == SYMBOL_REF || code1 == LABEL_REF) | |
86fc3d06 | 5614 | find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1, |
c4963a0a BS |
5615 | &XEXP (x, 0), opnum, type, ind_levels, |
5616 | insn); | |
a2d353e5 RK |
5617 | |
5618 | else if (code0 == REG && code1 == REG) | |
5619 | { | |
bd379f73 | 5620 | if (REGNO_OK_FOR_INDEX_P (REGNO (op1)) |
86fc3d06 | 5621 | && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG)) |
a2d353e5 | 5622 | return 0; |
bd379f73 | 5623 | else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)) |
86fc3d06 | 5624 | && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG)) |
a2d353e5 | 5625 | return 0; |
86fc3d06 UW |
5626 | else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG)) |
5627 | find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH, | |
c4963a0a BS |
5628 | &XEXP (x, 1), opnum, type, ind_levels, |
5629 | insn); | |
5630 | else if (REGNO_OK_FOR_INDEX_P (REGNO (op1))) | |
86fc3d06 | 5631 | find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG, |
c4963a0a BS |
5632 | &XEXP (x, 0), opnum, type, ind_levels, |
5633 | insn); | |
86fc3d06 UW |
5634 | else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG)) |
5635 | find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH, | |
bd379f73 PH |
5636 | &XEXP (x, 0), opnum, type, ind_levels, |
5637 | insn); | |
c4963a0a | 5638 | else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))) |
86fc3d06 | 5639 | find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG, |
c4963a0a BS |
5640 | &XEXP (x, 1), opnum, type, ind_levels, |
5641 | insn); | |
a2d353e5 RK |
5642 | else |
5643 | { | |
86fc3d06 | 5644 | find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG, |
c4963a0a BS |
5645 | &XEXP (x, 0), opnum, type, ind_levels, |
5646 | insn); | |
86fc3d06 | 5647 | find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH, |
c4963a0a BS |
5648 | &XEXP (x, 1), opnum, type, ind_levels, |
5649 | insn); | |
a2d353e5 RK |
5650 | } |
5651 | } | |
5652 | ||
5653 | else if (code0 == REG) | |
5654 | { | |
86fc3d06 | 5655 | find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH, |
c4963a0a BS |
5656 | &XEXP (x, 0), opnum, type, ind_levels, |
5657 | insn); | |
86fc3d06 | 5658 | find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG, |
c4963a0a BS |
5659 | &XEXP (x, 1), opnum, type, ind_levels, |
5660 | insn); | |
a2d353e5 RK |
5661 | } |
5662 | ||
5663 | else if (code1 == REG) | |
5664 | { | |
86fc3d06 | 5665 | find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH, |
c4963a0a BS |
5666 | &XEXP (x, 1), opnum, type, ind_levels, |
5667 | insn); | |
86fc3d06 | 5668 | find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG, |
c4963a0a BS |
5669 | &XEXP (x, 0), opnum, type, ind_levels, |
5670 | insn); | |
a2d353e5 RK |
5671 | } |
5672 | } | |
5673 | ||
5674 | return 0; | |
5675 | ||
4b983fdc RH |
5676 | case POST_MODIFY: |
5677 | case PRE_MODIFY: | |
5678 | { | |
5679 | rtx op0 = XEXP (x, 0); | |
5680 | rtx op1 = XEXP (x, 1); | |
c4963a0a | 5681 | enum rtx_code index_code; |
41374e13 NS |
5682 | int regno; |
5683 | int reloadnum; | |
4b983fdc RH |
5684 | |
5685 | if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS) | |
5686 | return 0; | |
5687 | ||
5688 | /* Currently, we only support {PRE,POST}_MODIFY constructs | |
5689 | where a base register is {inc,dec}remented by the contents | |
5690 | of another register or by a constant value. Thus, these | |
5691 | operands must match. */ | |
41374e13 | 5692 | gcc_assert (op0 == XEXP (op1, 0)); |
4b983fdc RH |
5693 | |
5694 | /* Require index register (or constant). Let's just handle the | |
5695 | register case in the meantime... If the target allows | |
5696 | auto-modify by a constant then we could try replacing a pseudo | |
4b7b277a RS |
5697 | register with its equivalent constant where applicable. |
5698 | ||
b098aaf2 UW |
5699 | We also handle the case where the register was eliminated |
5700 | resulting in a PLUS subexpression. | |
5701 | ||
4b7b277a RS |
5702 | If we later decide to reload the whole PRE_MODIFY or |
5703 | POST_MODIFY, inc_for_reload might clobber the reload register | |
5704 | before reading the index. The index register might therefore | |
5705 | need to live longer than a TYPE reload normally would, so be | |
5706 | conservative and class it as RELOAD_OTHER. */ | |
b098aaf2 UW |
5707 | if ((REG_P (XEXP (op1, 1)) |
5708 | && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1)))) | |
5709 | || GET_CODE (XEXP (op1, 1)) == PLUS) | |
86fc3d06 | 5710 | find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH, |
b098aaf2 UW |
5711 | &XEXP (op1, 1), opnum, RELOAD_OTHER, |
5712 | ind_levels, insn); | |
4b983fdc | 5713 | |
41374e13 | 5714 | gcc_assert (REG_P (XEXP (op1, 0))); |
4b983fdc | 5715 | |
41374e13 | 5716 | regno = REGNO (XEXP (op1, 0)); |
c4963a0a | 5717 | index_code = GET_CODE (XEXP (op1, 1)); |
41374e13 NS |
5718 | |
5719 | /* A register that is incremented cannot be constant! */ | |
5720 | gcc_assert (regno < FIRST_PSEUDO_REGISTER | |
f2034d06 | 5721 | || reg_equiv_constant (regno) == 0); |
4b983fdc | 5722 | |
41374e13 NS |
5723 | /* Handle a register that is equivalent to a memory location |
5724 | which cannot be addressed directly. */ | |
f2034d06 JL |
5725 | if (reg_equiv_memory_loc (regno) != 0 |
5726 | && (reg_equiv_address (regno) != 0 | |
41374e13 NS |
5727 | || num_not_at_initial_offset)) |
5728 | { | |
5729 | rtx tem = make_memloc (XEXP (x, 0), regno); | |
4b983fdc | 5730 | |
f2034d06 JL |
5731 | if (reg_equiv_address (regno) |
5732 | || ! rtx_equal_p (tem, reg_equiv_mem (regno))) | |
4b983fdc | 5733 | { |
3f1e3e70 AO |
5734 | rtx orig = tem; |
5735 | ||
41374e13 NS |
5736 | /* First reload the memory location's address. |
5737 | We can't use ADDR_TYPE (type) here, because we need to | |
5738 | write back the value after reading it, hence we actually | |
5739 | need two registers. */ | |
5740 | find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0), | |
5741 | &XEXP (tem, 0), opnum, | |
5742 | RELOAD_OTHER, | |
5743 | ind_levels, insn); | |
5744 | ||
0f4b25a3 | 5745 | if (!rtx_equal_p (tem, orig)) |
3f1e3e70 AO |
5746 | push_reg_equiv_alt_mem (regno, tem); |
5747 | ||
41374e13 | 5748 | /* Then reload the memory location into a base |
c4963a0a | 5749 | register. */ |
41374e13 | 5750 | reloadnum = push_reload (tem, tem, &XEXP (x, 0), |
c4963a0a | 5751 | &XEXP (op1, 0), |
86fc3d06 UW |
5752 | base_reg_class (mode, as, |
5753 | code, index_code), | |
c4963a0a BS |
5754 | GET_MODE (x), GET_MODE (x), 0, |
5755 | 0, opnum, RELOAD_OTHER); | |
9532e31f | 5756 | |
98c17df2 GS |
5757 | update_auto_inc_notes (this_insn, regno, reloadnum); |
5758 | return 0; | |
5759 | } | |
4b983fdc | 5760 | } |
41374e13 NS |
5761 | |
5762 | if (reg_renumber[regno] >= 0) | |
5763 | regno = reg_renumber[regno]; | |
5764 | ||
5765 | /* We require a base register here... */ | |
86fc3d06 | 5766 | if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code)) |
41374e13 NS |
5767 | { |
5768 | reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0), | |
c4963a0a | 5769 | &XEXP (op1, 0), &XEXP (x, 0), |
86fc3d06 UW |
5770 | base_reg_class (mode, as, |
5771 | code, index_code), | |
c4963a0a BS |
5772 | GET_MODE (x), GET_MODE (x), 0, 0, |
5773 | opnum, RELOAD_OTHER); | |
41374e13 NS |
5774 | |
5775 | update_auto_inc_notes (this_insn, regno, reloadnum); | |
5776 | return 0; | |
5777 | } | |
4b983fdc RH |
5778 | } |
5779 | return 0; | |
5780 | ||
a2d353e5 RK |
5781 | case POST_INC: |
5782 | case POST_DEC: | |
5783 | case PRE_INC: | |
5784 | case PRE_DEC: | |
f8cfc6aa | 5785 | if (REG_P (XEXP (x, 0))) |
eab89b90 | 5786 | { |
b3694847 | 5787 | int regno = REGNO (XEXP (x, 0)); |
eab89b90 RK |
5788 | int value = 0; |
5789 | rtx x_orig = x; | |
5790 | ||
5791 | /* A register that is incremented cannot be constant! */ | |
41374e13 | 5792 | gcc_assert (regno < FIRST_PSEUDO_REGISTER |
f2034d06 | 5793 | || reg_equiv_constant (regno) == 0); |
eab89b90 RK |
5794 | |
5795 | /* Handle a register that is equivalent to a memory location | |
5796 | which cannot be addressed directly. */ | |
f2034d06 JL |
5797 | if (reg_equiv_memory_loc (regno) != 0 |
5798 | && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)) | |
eab89b90 RK |
5799 | { |
5800 | rtx tem = make_memloc (XEXP (x, 0), regno); | |
f2034d06 JL |
5801 | if (reg_equiv_address (regno) |
5802 | || ! rtx_equal_p (tem, reg_equiv_mem (regno))) | |
cb2afeb3 | 5803 | { |
3f1e3e70 AO |
5804 | rtx orig = tem; |
5805 | ||
cb2afeb3 R |
5806 | /* First reload the memory location's address. |
5807 | We can't use ADDR_TYPE (type) here, because we need to | |
5808 | write back the value after reading it, hence we actually | |
5809 | need two registers. */ | |
5810 | find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0), | |
5811 | &XEXP (tem, 0), opnum, type, | |
5812 | ind_levels, insn); | |
9e6f9ad6 | 5813 | reloaded_inner_of_autoinc = true; |
0f4b25a3 | 5814 | if (!rtx_equal_p (tem, orig)) |
3f1e3e70 | 5815 | push_reg_equiv_alt_mem (regno, tem); |
cb2afeb3 R |
5816 | /* Put this inside a new increment-expression. */ |
5817 | x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem); | |
5818 | /* Proceed to reload that, as if it contained a register. */ | |
5819 | } | |
eab89b90 RK |
5820 | } |
5821 | ||
f89935ed HPN |
5822 | /* If we have a hard register that is ok in this incdec context, |
5823 | don't make a reload. If the register isn't nice enough for | |
5824 | autoincdec, we can reload it. But, if an autoincrement of a | |
5825 | register that we here verified as playing nice, still outside | |
eab89b90 RK |
5826 | isn't "valid", it must be that no autoincrement is "valid". |
5827 | If that is true and something made an autoincrement anyway, | |
5828 | this must be a special context where one is allowed. | |
5829 | (For example, a "push" instruction.) | |
5830 | We can't improve this address, so leave it alone. */ | |
5831 | ||
5832 | /* Otherwise, reload the autoincrement into a suitable hard reg | |
5833 | and record how much to increment by. */ | |
5834 | ||
5835 | if (reg_renumber[regno] >= 0) | |
5836 | regno = reg_renumber[regno]; | |
888d2cd6 | 5837 | if (regno >= FIRST_PSEUDO_REGISTER |
86fc3d06 | 5838 | || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code, |
c4963a0a | 5839 | index_code)) |
eab89b90 | 5840 | { |
55c22565 RK |
5841 | int reloadnum; |
5842 | ||
5843 | /* If we can output the register afterwards, do so, this | |
5844 | saves the extra update. | |
5845 | We can do so if we have an INSN - i.e. no JUMP_INSN nor | |
5846 | CALL_INSN - and it does not set CC0. | |
5847 | But don't do this if we cannot directly address the | |
5848 | memory location, since this will make it harder to | |
956d6950 | 5849 | reuse address reloads, and increases register pressure. |
55c22565 | 5850 | Also don't do this if we can probably update x directly. */ |
3c0cb5de | 5851 | rtx equiv = (MEM_P (XEXP (x, 0)) |
cb2afeb3 | 5852 | ? XEXP (x, 0) |
f2034d06 | 5853 | : reg_equiv_mem (regno)); |
2ef6ce06 | 5854 | enum insn_code icode = optab_handler (add_optab, GET_MODE (x)); |
4b4bf941 | 5855 | if (insn && NONJUMP_INSN_P (insn) && equiv |
cb2afeb3 | 5856 | && memory_operand (equiv, GET_MODE (equiv)) |
55c22565 RK |
5857 | #ifdef HAVE_cc0 |
5858 | && ! sets_cc0_p (PATTERN (insn)) | |
5859 | #endif | |
5860 | && ! (icode != CODE_FOR_nothing | |
2ef6ce06 | 5861 | && insn_operand_matches (icode, 0, equiv) |
9e6f9ad6 BS |
5862 | && insn_operand_matches (icode, 1, equiv)) |
5863 | /* Using RELOAD_OTHER means we emit this and the reload we | |
5864 | made earlier in the wrong order. */ | |
5865 | && !reloaded_inner_of_autoinc) | |
55c22565 | 5866 | { |
9e8f528c AO |
5867 | /* We use the original pseudo for loc, so that |
5868 | emit_reload_insns() knows which pseudo this | |
5869 | reload refers to and updates the pseudo rtx, not | |
5870 | its equivalent memory location, as well as the | |
5871 | corresponding entry in reg_last_reload_reg. */ | |
5872 | loc = &XEXP (x_orig, 0); | |
55c22565 RK |
5873 | x = XEXP (x, 0); |
5874 | reloadnum | |
5875 | = push_reload (x, x, loc, loc, | |
888d2cd6 | 5876 | context_reg_class, |
05d10675 BS |
5877 | GET_MODE (x), GET_MODE (x), 0, 0, |
5878 | opnum, RELOAD_OTHER); | |
55c22565 RK |
5879 | } |
5880 | else | |
5881 | { | |
5882 | reloadnum | |
7fb446a3 | 5883 | = push_reload (x, x, loc, (rtx*) 0, |
888d2cd6 | 5884 | context_reg_class, |
e9a25f70 | 5885 | GET_MODE (x), GET_MODE (x), 0, 0, |
55c22565 | 5886 | opnum, type); |
eceef4c9 | 5887 | rld[reloadnum].inc |
55c22565 | 5888 | = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0)); |
05d10675 | 5889 | |
55c22565 RK |
5890 | value = 1; |
5891 | } | |
eab89b90 | 5892 | |
98c17df2 GS |
5893 | update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)), |
5894 | reloadnum); | |
eab89b90 RK |
5895 | } |
5896 | return value; | |
5897 | } | |
a2d353e5 RK |
5898 | return 0; |
5899 | ||
19fe122f HPN |
5900 | case TRUNCATE: |
5901 | case SIGN_EXTEND: | |
5902 | case ZERO_EXTEND: | |
5903 | /* Look for parts to reload in the inner expression and reload them | |
5904 | too, in addition to this operation. Reloading all inner parts in | |
5905 | addition to this one shouldn't be necessary, but at this point, | |
5906 | we don't know if we can possibly omit any part that *can* be | |
5907 | reloaded. Targets that are better off reloading just either part | |
5908 | (or perhaps even a different part of an outer expression), should | |
5909 | define LEGITIMIZE_RELOAD_ADDRESS. */ | |
86fc3d06 | 5910 | find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0), |
c4963a0a | 5911 | context, code, SCRATCH, &XEXP (x, 0), opnum, |
19fe122f HPN |
5912 | type, ind_levels, insn); |
5913 | push_reload (x, NULL_RTX, loc, (rtx*) 0, | |
5914 | context_reg_class, | |
5915 | GET_MODE (x), VOIDmode, 0, 0, opnum, type); | |
5916 | return 1; | |
5917 | ||
a2d353e5 RK |
5918 | case MEM: |
5919 | /* This is probably the result of a substitution, by eliminate_regs, of | |
5920 | an equivalent address for a pseudo that was not allocated to a hard | |
5921 | register. Verify that the specified address is valid and reload it | |
5922 | into a register. | |
eab89b90 | 5923 | |
a2d353e5 RK |
5924 | Since we know we are going to reload this item, don't decrement for |
5925 | the indirection level. | |
eab89b90 RK |
5926 | |
5927 | Note that this is actually conservative: it would be slightly more | |
5928 | efficient to use the value of SPILL_INDIRECT_LEVELS from | |
5929 | reload1.c here. */ | |
5930 | ||
5931 | find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0), | |
55c22565 | 5932 | opnum, ADDR_TYPE (type), ind_levels, insn); |
f4f4d0f8 | 5933 | push_reload (*loc, NULL_RTX, loc, (rtx*) 0, |
888d2cd6 | 5934 | context_reg_class, |
a8c9daeb | 5935 | GET_MODE (x), VOIDmode, 0, 0, opnum, type); |
eab89b90 | 5936 | return 1; |
eab89b90 | 5937 | |
a2d353e5 RK |
5938 | case REG: |
5939 | { | |
b3694847 | 5940 | int regno = REGNO (x); |
a2d353e5 | 5941 | |
f2034d06 | 5942 | if (reg_equiv_constant (regno) != 0) |
a2d353e5 | 5943 | { |
f2034d06 | 5944 | find_reloads_address_part (reg_equiv_constant (regno), loc, |
888d2cd6 | 5945 | context_reg_class, |
a2d353e5 RK |
5946 | GET_MODE (x), opnum, type, ind_levels); |
5947 | return 1; | |
5948 | } | |
eab89b90 RK |
5949 | |
5950 | #if 0 /* This might screw code in reload1.c to delete prior output-reload | |
5951 | that feeds this insn. */ | |
f2034d06 | 5952 | if (reg_equiv_mem (regno) != 0) |
a2d353e5 | 5953 | { |
f2034d06 | 5954 | push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0, |
888d2cd6 | 5955 | context_reg_class, |
a2d353e5 RK |
5956 | GET_MODE (x), VOIDmode, 0, 0, opnum, type); |
5957 | return 1; | |
5958 | } | |
eab89b90 | 5959 | #endif |
eab89b90 | 5960 | |
f2034d06 JL |
5961 | if (reg_equiv_memory_loc (regno) |
5962 | && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)) | |
a2d353e5 | 5963 | { |
cb2afeb3 | 5964 | rtx tem = make_memloc (x, regno); |
f2034d06 JL |
5965 | if (reg_equiv_address (regno) != 0 |
5966 | || ! rtx_equal_p (tem, reg_equiv_mem (regno))) | |
cb2afeb3 R |
5967 | { |
5968 | x = tem; | |
5969 | find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), | |
5970 | &XEXP (x, 0), opnum, ADDR_TYPE (type), | |
5971 | ind_levels, insn); | |
0f4b25a3 | 5972 | if (!rtx_equal_p (x, tem)) |
3f1e3e70 | 5973 | push_reg_equiv_alt_mem (regno, x); |
cb2afeb3 | 5974 | } |
a2d353e5 | 5975 | } |
eab89b90 | 5976 | |
a2d353e5 RK |
5977 | if (reg_renumber[regno] >= 0) |
5978 | regno = reg_renumber[regno]; | |
5979 | ||
888d2cd6 | 5980 | if (regno >= FIRST_PSEUDO_REGISTER |
86fc3d06 | 5981 | || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code, |
c4963a0a | 5982 | index_code)) |
a2d353e5 | 5983 | { |
f4f4d0f8 | 5984 | push_reload (x, NULL_RTX, loc, (rtx*) 0, |
888d2cd6 | 5985 | context_reg_class, |
a2d353e5 RK |
5986 | GET_MODE (x), VOIDmode, 0, 0, opnum, type); |
5987 | return 1; | |
5988 | } | |
5989 | ||
5990 | /* If a register appearing in an address is the subject of a CLOBBER | |
5991 | in this insn, reload it into some other register to be safe. | |
5992 | The CLOBBER is supposed to make the register unavailable | |
5993 | from before this insn to after it. */ | |
9532e31f | 5994 | if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0)) |
a2d353e5 | 5995 | { |
f4f4d0f8 | 5996 | push_reload (x, NULL_RTX, loc, (rtx*) 0, |
888d2cd6 | 5997 | context_reg_class, |
a2d353e5 RK |
5998 | GET_MODE (x), VOIDmode, 0, 0, opnum, type); |
5999 | return 1; | |
6000 | } | |
6001 | } | |
6002 | return 0; | |
6003 | ||
6004 | case SUBREG: | |
f8cfc6aa | 6005 | if (REG_P (SUBREG_REG (x))) |
eab89b90 | 6006 | { |
922db4bb RK |
6007 | /* If this is a SUBREG of a hard register and the resulting register |
6008 | is of the wrong class, reload the whole SUBREG. This avoids | |
6009 | needless copies if SUBREG_REG is multi-word. */ | |
6010 | if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) | |
6011 | { | |
4977bab6 | 6012 | int regno ATTRIBUTE_UNUSED = subreg_regno (x); |
a2d353e5 | 6013 | |
86fc3d06 | 6014 | if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code, |
c4963a0a | 6015 | index_code)) |
922db4bb | 6016 | { |
f4f4d0f8 | 6017 | push_reload (x, NULL_RTX, loc, (rtx*) 0, |
888d2cd6 | 6018 | context_reg_class, |
922db4bb RK |
6019 | GET_MODE (x), VOIDmode, 0, 0, opnum, type); |
6020 | return 1; | |
6021 | } | |
6022 | } | |
abc95ed3 | 6023 | /* If this is a SUBREG of a pseudo-register, and the pseudo-register |
922db4bb RK |
6024 | is larger than the class size, then reload the whole SUBREG. */ |
6025 | else | |
a2d353e5 | 6026 | { |
55d796da | 6027 | enum reg_class rclass = context_reg_class; |
a8c44c52 AS |
6028 | if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))] |
6029 | > reg_class_size[(int) rclass]) | |
922db4bb | 6030 | { |
80de67b8 UW |
6031 | /* If the inner register will be replaced by a memory |
6032 | reference, we can do this only if we can replace the | |
6033 | whole subreg by a (narrower) memory reference. If | |
6034 | this is not possible, fall through and reload just | |
6035 | the inner register (including address reloads). */ | |
6036 | if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0) | |
6037 | { | |
6038 | rtx tem = find_reloads_subreg_address (x, opnum, | |
6039 | ADDR_TYPE (type), | |
6040 | ind_levels, insn, | |
6041 | NULL); | |
6042 | if (tem) | |
6043 | { | |
6044 | push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass, | |
6045 | GET_MODE (tem), VOIDmode, 0, 0, | |
6046 | opnum, type); | |
6047 | return 1; | |
6048 | } | |
6049 | } | |
6050 | else | |
6051 | { | |
6052 | push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass, | |
6053 | GET_MODE (x), VOIDmode, 0, 0, opnum, type); | |
6054 | return 1; | |
6055 | } | |
922db4bb | 6056 | } |
a2d353e5 | 6057 | } |
eab89b90 | 6058 | } |
a2d353e5 | 6059 | break; |
05d10675 | 6060 | |
e9a25f70 JL |
6061 | default: |
6062 | break; | |
eab89b90 RK |
6063 | } |
6064 | ||
a2d353e5 | 6065 | { |
b3694847 SS |
6066 | const char *fmt = GET_RTX_FORMAT (code); |
6067 | int i; | |
a2d353e5 RK |
6068 | |
6069 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
6070 | { | |
6071 | if (fmt[i] == 'e') | |
c4963a0a BS |
6072 | /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once |
6073 | we get here. */ | |
86fc3d06 UW |
6074 | find_reloads_address_1 (mode, as, XEXP (x, i), context, |
6075 | code, SCRATCH, &XEXP (x, i), | |
6076 | opnum, type, ind_levels, insn); | |
a2d353e5 RK |
6077 | } |
6078 | } | |
6079 | ||
888d2cd6 | 6080 | #undef REG_OK_FOR_CONTEXT |
eab89b90 RK |
6081 | return 0; |
6082 | } | |
6083 | \f | |
6084 | /* X, which is found at *LOC, is a part of an address that needs to be | |
55d796da | 6085 | reloaded into a register of class RCLASS. If X is a constant, or if |
eab89b90 RK |
6086 | X is a PLUS that contains a constant, check that the constant is a |
6087 | legitimate operand and that we are supposed to be able to load | |
6088 | it into the register. | |
6089 | ||
6090 | If not, force the constant into memory and reload the MEM instead. | |
6091 | ||
6092 | MODE is the mode to use, in case X is an integer constant. | |
6093 | ||
a8c9daeb | 6094 | OPNUM and TYPE describe the purpose of any reloads made. |
eab89b90 RK |
6095 | |
6096 | IND_LEVELS says how many levels of indirect addressing this machine | |
6097 | supports. */ | |
6098 | ||
6099 | static void | |
55d796da | 6100 | find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass, |
ef4bddc2 | 6101 | machine_mode mode, int opnum, |
0c20a65f | 6102 | enum reload_type type, int ind_levels) |
eab89b90 RK |
6103 | { |
6104 | if (CONSTANT_P (x) | |
1a627b35 | 6105 | && (!targetm.legitimate_constant_p (mode, x) |
fba42e24 | 6106 | || targetm.preferred_reload_class (x, rclass) == NO_REGS)) |
eab89b90 | 6107 | { |
e4fd64d6 SL |
6108 | x = force_const_mem (mode, x); |
6109 | find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0), | |
55c22565 | 6110 | opnum, type, ind_levels, 0); |
eab89b90 RK |
6111 | } |
6112 | ||
6113 | else if (GET_CODE (x) == PLUS | |
6114 | && CONSTANT_P (XEXP (x, 1)) | |
1a627b35 | 6115 | && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1)) |
fba42e24 AS |
6116 | || targetm.preferred_reload_class (XEXP (x, 1), rclass) |
6117 | == NO_REGS)) | |
eab89b90 | 6118 | { |
ef18065c JW |
6119 | rtx tem; |
6120 | ||
081b49f1 | 6121 | tem = force_const_mem (GET_MODE (x), XEXP (x, 1)); |
38a448ca | 6122 | x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem); |
e4fd64d6 | 6123 | find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0), |
55c22565 | 6124 | opnum, type, ind_levels, 0); |
eab89b90 RK |
6125 | } |
6126 | ||
55d796da | 6127 | push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass, |
a8c9daeb | 6128 | mode, VOIDmode, 0, 0, opnum, type); |
eab89b90 RK |
6129 | } |
6130 | \f | |
22505ad8 | 6131 | /* X, a subreg of a pseudo, is a part of an address that needs to be |
80de67b8 | 6132 | reloaded, and the pseusdo is equivalent to a memory location. |
22505ad8 | 6133 | |
80de67b8 UW |
6134 | Attempt to replace the whole subreg by a (possibly narrower or wider) |
6135 | memory reference. If this is possible, return this new memory | |
6136 | reference, and push all required address reloads. Otherwise, | |
6137 | return NULL. | |
22505ad8 R |
6138 | |
6139 | OPNUM and TYPE identify the purpose of the reload. | |
6140 | ||
6141 | IND_LEVELS says how many levels of indirect addressing are | |
6142 | supported at this point in the address. | |
6143 | ||
6144 | INSN, if nonzero, is the insn in which we do the reload. It is used | |
6145 | to determine where to put USEs for pseudos that we have to replace with | |
6146 | stack slots. */ | |
6147 | ||
6148 | static rtx | |
80de67b8 | 6149 | find_reloads_subreg_address (rtx x, int opnum, enum reload_type type, |
5d86f5f9 DM |
6150 | int ind_levels, rtx_insn *insn, |
6151 | int *address_reloaded) | |
22505ad8 | 6152 | { |
ef4bddc2 RS |
6153 | machine_mode outer_mode = GET_MODE (x); |
6154 | machine_mode inner_mode = GET_MODE (SUBREG_REG (x)); | |
22505ad8 | 6155 | int regno = REGNO (SUBREG_REG (x)); |
ccecd6ea | 6156 | int reloaded = 0; |
80de67b8 UW |
6157 | rtx tem, orig; |
6158 | int offset; | |
22505ad8 | 6159 | |
80de67b8 | 6160 | gcc_assert (reg_equiv_memory_loc (regno) != 0); |
22505ad8 | 6161 | |
80de67b8 | 6162 | /* We cannot replace the subreg with a modified memory reference if: |
0a28aef9 | 6163 | |
80de67b8 UW |
6164 | - we have a paradoxical subreg that implicitly acts as a zero or |
6165 | sign extension operation due to LOAD_EXTEND_OP; | |
0a28aef9 | 6166 | |
80de67b8 UW |
6167 | - we have a subreg that is implicitly supposed to act on the full |
6168 | register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs); | |
6169 | ||
6170 | - the address of the equivalent memory location is mode-dependent; or | |
6171 | ||
6172 | - we have a paradoxical subreg and the resulting memory is not | |
6173 | sufficiently aligned to allow access in the wider mode. | |
6174 | ||
6175 | In addition, we choose not to perform the replacement for *any* | |
6176 | paradoxical subreg, even if it were possible in principle. This | |
6177 | is to avoid generating wider memory references than necessary. | |
6178 | ||
6179 | This corresponds to how previous versions of reload used to handle | |
6180 | paradoxical subregs where no address reload was required. */ | |
6181 | ||
6182 | if (paradoxical_subreg_p (x)) | |
6183 | return NULL; | |
6184 | ||
6185 | #ifdef WORD_REGISTER_OPERATIONS | |
6186 | if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode) | |
6187 | && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD | |
6188 | == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD)) | |
6189 | return NULL; | |
6190 | #endif | |
6191 | ||
6192 | /* Since we don't attempt to handle paradoxical subregs, we can just | |
6193 | call into simplify_subreg, which will handle all remaining checks | |
6194 | for us. */ | |
6195 | orig = make_memloc (SUBREG_REG (x), regno); | |
6196 | offset = SUBREG_BYTE (x); | |
6197 | tem = simplify_subreg (outer_mode, orig, inner_mode, offset); | |
6198 | if (!tem || !MEM_P (tem)) | |
6199 | return NULL; | |
6200 | ||
6201 | /* Now push all required address reloads, if any. */ | |
6202 | reloaded = find_reloads_address (GET_MODE (tem), &tem, | |
6203 | XEXP (tem, 0), &XEXP (tem, 0), | |
6204 | opnum, type, ind_levels, insn); | |
6205 | /* ??? Do we need to handle nonzero offsets somehow? */ | |
6206 | if (!offset && !rtx_equal_p (tem, orig)) | |
6207 | push_reg_equiv_alt_mem (regno, tem); | |
6208 | ||
6209 | /* For some processors an address may be valid in the original mode but | |
6210 | not in a smaller mode. For example, ARM accepts a scaled index register | |
6211 | in SImode but not in HImode. Note that this is only a problem if the | |
6212 | address in reg_equiv_mem is already invalid in the new mode; other | |
6213 | cases would be fixed by find_reloads_address as usual. | |
6214 | ||
6215 | ??? We attempt to handle such cases here by doing an additional reload | |
6216 | of the full address after the usual processing by find_reloads_address. | |
6217 | Note that this may not work in the general case, but it seems to cover | |
6218 | the cases where this situation currently occurs. A more general fix | |
6219 | might be to reload the *value* instead of the address, but this would | |
6220 | not be expected by the callers of this routine as-is. | |
6221 | ||
6222 | If find_reloads_address already completed replaced the address, there | |
6223 | is nothing further to do. */ | |
6224 | if (reloaded == 0 | |
6225 | && reg_equiv_mem (regno) != 0 | |
6226 | && !strict_memory_address_addr_space_p | |
6227 | (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0), | |
6228 | MEM_ADDR_SPACE (reg_equiv_mem (regno)))) | |
6229 | { | |
6230 | push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0, | |
6231 | base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem), | |
6232 | MEM, SCRATCH), | |
6233 | GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type); | |
6234 | reloaded = 1; | |
22505ad8 | 6235 | } |
80de67b8 UW |
6236 | |
6237 | /* If this is not a toplevel operand, find_reloads doesn't see this | |
6238 | substitution. We have to emit a USE of the pseudo so that | |
6239 | delete_output_reload can see it. */ | |
6240 | if (replace_reloads && recog_data.operand[opnum] != x) | |
6241 | /* We mark the USE with QImode so that we recognize it as one that | |
6242 | can be safely deleted at the end of reload. */ | |
6243 | PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn), | |
6244 | QImode); | |
6245 | ||
afa73d3a AK |
6246 | if (address_reloaded) |
6247 | *address_reloaded = reloaded; | |
ccecd6ea | 6248 | |
80de67b8 | 6249 | return tem; |
22505ad8 R |
6250 | } |
6251 | \f | |
a8c9daeb | 6252 | /* Substitute into the current INSN the registers into which we have reloaded |
eab89b90 | 6253 | the things that need reloading. The array `replacements' |
ecbe6c61 | 6254 | contains the locations of all pointers that must be changed |
eab89b90 RK |
6255 | and says what to replace them with. |
6256 | ||
6257 | Return the rtx that X translates into; usually X, but modified. */ | |
6258 | ||
6259 | void | |
5d86f5f9 | 6260 | subst_reloads (rtx_insn *insn) |
eab89b90 | 6261 | { |
b3694847 | 6262 | int i; |
eab89b90 RK |
6263 | |
6264 | for (i = 0; i < n_replacements; i++) | |
6265 | { | |
b3694847 SS |
6266 | struct replacement *r = &replacements[i]; |
6267 | rtx reloadreg = rld[r->what].reg_rtx; | |
eab89b90 RK |
6268 | if (reloadreg) |
6269 | { | |
0fa4cb7d SE |
6270 | #ifdef DEBUG_RELOAD |
6271 | /* This checking takes a very long time on some platforms | |
6272 | causing the gcc.c-torture/compile/limits-fnargs.c test | |
6273 | to time out during testing. See PR 31850. | |
6274 | ||
6275 | Internal consistency test. Check that we don't modify | |
cf728d61 HPN |
6276 | anything in the equivalence arrays. Whenever something from |
6277 | those arrays needs to be reloaded, it must be unshared before | |
6278 | being substituted into; the equivalence must not be modified. | |
6279 | Otherwise, if the equivalence is used after that, it will | |
6280 | have been modified, and the thing substituted (probably a | |
6281 | register) is likely overwritten and not a usable equivalence. */ | |
6282 | int check_regno; | |
6283 | ||
6284 | for (check_regno = 0; check_regno < max_regno; check_regno++) | |
6285 | { | |
6286 | #define CHECK_MODF(ARRAY) \ | |
2b5987b5 | 6287 | gcc_assert (!(*reg_equivs)[check_regno].ARRAY \ |
41374e13 | 6288 | || !loc_mentioned_in_p (r->where, \ |
2b5987b5 | 6289 | (*reg_equivs)[check_regno].ARRAY)) |
cf728d61 | 6290 | |
2b5987b5 MGD |
6291 | CHECK_MODF (constant); |
6292 | CHECK_MODF (memory_loc); | |
6293 | CHECK_MODF (address); | |
6294 | CHECK_MODF (mem); | |
cf728d61 HPN |
6295 | #undef CHECK_MODF |
6296 | } | |
0fa4cb7d | 6297 | #endif /* DEBUG_RELOAD */ |
cf728d61 | 6298 | |
cf7c4aa6 HPN |
6299 | /* If we're replacing a LABEL_REF with a register, there must |
6300 | already be an indication (to e.g. flow) which label this | |
f759eb8b | 6301 | register refers to. */ |
cf7c4aa6 HPN |
6302 | gcc_assert (GET_CODE (*r->where) != LABEL_REF |
6303 | || !JUMP_P (insn) | |
6304 | || find_reg_note (insn, | |
6305 | REG_LABEL_OPERAND, | |
6306 | XEXP (*r->where, 0)) | |
6307 | || label_is_jump_target_p (XEXP (*r->where, 0), insn)); | |
f759eb8b | 6308 | |
eab89b90 | 6309 | /* Encapsulate RELOADREG so its machine mode matches what |
26f1a00e RK |
6310 | used to be there. Note that gen_lowpart_common will |
6311 | do the wrong thing if RELOADREG is multi-word. RELOADREG | |
6312 | will always be a REG here. */ | |
eab89b90 | 6313 | if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode) |
f12448c8 | 6314 | reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode); |
eab89b90 | 6315 | |
f3678bfb | 6316 | *r->where = reloadreg; |
eab89b90 RK |
6317 | } |
6318 | /* If reload got no reg and isn't optional, something's wrong. */ | |
41374e13 NS |
6319 | else |
6320 | gcc_assert (rld[r->what].optional); | |
eab89b90 RK |
6321 | } |
6322 | } | |
6323 | \f | |
561c9153 RH |
6324 | /* Make a copy of any replacements being done into X and move those |
6325 | copies to locations in Y, a copy of X. */ | |
eab89b90 RK |
6326 | |
6327 | void | |
0c20a65f | 6328 | copy_replacements (rtx x, rtx y) |
eab89b90 | 6329 | { |
561c9153 RH |
6330 | copy_replacements_1 (&x, &y, n_replacements); |
6331 | } | |
6332 | ||
6333 | static void | |
0c20a65f | 6334 | copy_replacements_1 (rtx *px, rtx *py, int orig_replacements) |
561c9153 RH |
6335 | { |
6336 | int i, j; | |
6337 | rtx x, y; | |
6338 | struct replacement *r; | |
6339 | enum rtx_code code; | |
6340 | const char *fmt; | |
6341 | ||
6342 | for (j = 0; j < orig_replacements; j++) | |
f3678bfb UW |
6343 | if (replacements[j].where == px) |
6344 | { | |
6345 | r = &replacements[n_replacements++]; | |
6346 | r->where = py; | |
6347 | r->what = replacements[j].what; | |
6348 | r->mode = replacements[j].mode; | |
6349 | } | |
561c9153 RH |
6350 | |
6351 | x = *px; | |
6352 | y = *py; | |
6353 | code = GET_CODE (x); | |
6354 | fmt = GET_RTX_FORMAT (code); | |
6355 | ||
6356 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
6357 | { | |
6358 | if (fmt[i] == 'e') | |
6359 | copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements); | |
6360 | else if (fmt[i] == 'E') | |
6361 | for (j = XVECLEN (x, i); --j >= 0; ) | |
6362 | copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j), | |
6363 | orig_replacements); | |
6364 | } | |
eab89b90 | 6365 | } |
a9a2595b | 6366 | |
3eae4643 | 6367 | /* Change any replacements being done to *X to be done to *Y. */ |
a9a2595b JR |
6368 | |
6369 | void | |
0c20a65f | 6370 | move_replacements (rtx *x, rtx *y) |
a9a2595b JR |
6371 | { |
6372 | int i; | |
6373 | ||
6374 | for (i = 0; i < n_replacements; i++) | |
f3678bfb UW |
6375 | if (replacements[i].where == x) |
6376 | replacements[i].where = y; | |
a9a2595b | 6377 | } |
eab89b90 | 6378 | \f |
af929c62 RK |
6379 | /* If LOC was scheduled to be replaced by something, return the replacement. |
6380 | Otherwise, return *LOC. */ | |
6381 | ||
6382 | rtx | |
0c20a65f | 6383 | find_replacement (rtx *loc) |
af929c62 RK |
6384 | { |
6385 | struct replacement *r; | |
6386 | ||
6387 | for (r = &replacements[0]; r < &replacements[n_replacements]; r++) | |
6388 | { | |
eceef4c9 | 6389 | rtx reloadreg = rld[r->what].reg_rtx; |
af929c62 RK |
6390 | |
6391 | if (reloadreg && r->where == loc) | |
6392 | { | |
6393 | if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode) | |
f3678bfb | 6394 | reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode); |
af929c62 RK |
6395 | |
6396 | return reloadreg; | |
6397 | } | |
f3678bfb UW |
6398 | else if (reloadreg && GET_CODE (*loc) == SUBREG |
6399 | && r->where == &SUBREG_REG (*loc)) | |
af929c62 | 6400 | { |
f3678bfb UW |
6401 | if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode) |
6402 | reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode); | |
6403 | ||
6404 | return simplify_gen_subreg (GET_MODE (*loc), reloadreg, | |
6405 | GET_MODE (SUBREG_REG (*loc)), | |
6406 | SUBREG_BYTE (*loc)); | |
af929c62 RK |
6407 | } |
6408 | } | |
6409 | ||
956d6950 JL |
6410 | /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for |
6411 | what's inside and make a new rtl if so. */ | |
6412 | if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS | |
6413 | || GET_CODE (*loc) == MULT) | |
6414 | { | |
6415 | rtx x = find_replacement (&XEXP (*loc, 0)); | |
6416 | rtx y = find_replacement (&XEXP (*loc, 1)); | |
6417 | ||
6418 | if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1)) | |
38a448ca | 6419 | return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y); |
956d6950 JL |
6420 | } |
6421 | ||
af929c62 RK |
6422 | return *loc; |
6423 | } | |
6424 | \f | |
eab89b90 RK |
6425 | /* Return nonzero if register in range [REGNO, ENDREGNO) |
6426 | appears either explicitly or implicitly in X | |
4644aad4 | 6427 | other than being stored into (except for earlyclobber operands). |
eab89b90 RK |
6428 | |
6429 | References contained within the substructure at LOC do not count. | |
6430 | LOC may be zero, meaning don't ignore anything. | |
6431 | ||
6432 | This is similar to refers_to_regno_p in rtlanal.c except that we | |
6433 | look at equivalences for pseudos that didn't get hard registers. */ | |
6434 | ||
10015a27 | 6435 | static int |
0c20a65f AJ |
6436 | refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno, |
6437 | rtx x, rtx *loc) | |
eab89b90 | 6438 | { |
770ae6cc RK |
6439 | int i; |
6440 | unsigned int r; | |
6441 | RTX_CODE code; | |
6442 | const char *fmt; | |
eab89b90 RK |
6443 | |
6444 | if (x == 0) | |
6445 | return 0; | |
6446 | ||
6447 | repeat: | |
6448 | code = GET_CODE (x); | |
6449 | ||
6450 | switch (code) | |
6451 | { | |
6452 | case REG: | |
770ae6cc | 6453 | r = REGNO (x); |
eab89b90 | 6454 | |
4803a34a RK |
6455 | /* If this is a pseudo, a hard register must not have been allocated. |
6456 | X must therefore either be a constant or be in memory. */ | |
770ae6cc | 6457 | if (r >= FIRST_PSEUDO_REGISTER) |
4803a34a | 6458 | { |
f2034d06 | 6459 | if (reg_equiv_memory_loc (r)) |
4803a34a | 6460 | return refers_to_regno_for_reload_p (regno, endregno, |
f2034d06 | 6461 | reg_equiv_memory_loc (r), |
f4f4d0f8 | 6462 | (rtx*) 0); |
4803a34a | 6463 | |
f2034d06 | 6464 | gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r)); |
41374e13 | 6465 | return 0; |
4803a34a | 6466 | } |
eab89b90 | 6467 | |
770ae6cc RK |
6468 | return (endregno > r |
6469 | && regno < r + (r < FIRST_PSEUDO_REGISTER | |
66fd46b6 | 6470 | ? hard_regno_nregs[r][GET_MODE (x)] |
eab89b90 RK |
6471 | : 1)); |
6472 | ||
6473 | case SUBREG: | |
6474 | /* If this is a SUBREG of a hard reg, we can see exactly which | |
6475 | registers are being modified. Otherwise, handle normally. */ | |
f8cfc6aa | 6476 | if (REG_P (SUBREG_REG (x)) |
eab89b90 RK |
6477 | && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER) |
6478 | { | |
ddef6bc7 | 6479 | unsigned int inner_regno = subreg_regno (x); |
770ae6cc | 6480 | unsigned int inner_endregno |
403c659c | 6481 | = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER |
f1f4e530 | 6482 | ? subreg_nregs (x) : 1); |
eab89b90 RK |
6483 | |
6484 | return endregno > inner_regno && regno < inner_endregno; | |
6485 | } | |
6486 | break; | |
6487 | ||
6488 | case CLOBBER: | |
6489 | case SET: | |
6490 | if (&SET_DEST (x) != loc | |
6491 | /* Note setting a SUBREG counts as referring to the REG it is in for | |
6492 | a pseudo but not for hard registers since we can | |
6493 | treat each word individually. */ | |
6494 | && ((GET_CODE (SET_DEST (x)) == SUBREG | |
6495 | && loc != &SUBREG_REG (SET_DEST (x)) | |
f8cfc6aa | 6496 | && REG_P (SUBREG_REG (SET_DEST (x))) |
eab89b90 RK |
6497 | && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER |
6498 | && refers_to_regno_for_reload_p (regno, endregno, | |
6499 | SUBREG_REG (SET_DEST (x)), | |
6500 | loc)) | |
abc95ed3 | 6501 | /* If the output is an earlyclobber operand, this is |
4644aad4 | 6502 | a conflict. */ |
f8cfc6aa | 6503 | || ((!REG_P (SET_DEST (x)) |
4644aad4 | 6504 | || earlyclobber_operand_p (SET_DEST (x))) |
eab89b90 RK |
6505 | && refers_to_regno_for_reload_p (regno, endregno, |
6506 | SET_DEST (x), loc)))) | |
6507 | return 1; | |
6508 | ||
6509 | if (code == CLOBBER || loc == &SET_SRC (x)) | |
6510 | return 0; | |
6511 | x = SET_SRC (x); | |
6512 | goto repeat; | |
05d10675 | 6513 | |
e9a25f70 JL |
6514 | default: |
6515 | break; | |
eab89b90 RK |
6516 | } |
6517 | ||
6518 | /* X does not match, so try its subexpressions. */ | |
6519 | ||
6520 | fmt = GET_RTX_FORMAT (code); | |
6521 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
6522 | { | |
6523 | if (fmt[i] == 'e' && loc != &XEXP (x, i)) | |
6524 | { | |
6525 | if (i == 0) | |
6526 | { | |
6527 | x = XEXP (x, 0); | |
6528 | goto repeat; | |
6529 | } | |
6530 | else | |
6531 | if (refers_to_regno_for_reload_p (regno, endregno, | |
6532 | XEXP (x, i), loc)) | |
6533 | return 1; | |
6534 | } | |
6535 | else if (fmt[i] == 'E') | |
6536 | { | |
b3694847 | 6537 | int j; |
4381f7c2 | 6538 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
eab89b90 RK |
6539 | if (loc != &XVECEXP (x, i, j) |
6540 | && refers_to_regno_for_reload_p (regno, endregno, | |
6541 | XVECEXP (x, i, j), loc)) | |
6542 | return 1; | |
6543 | } | |
6544 | } | |
6545 | return 0; | |
6546 | } | |
bfa30b22 RK |
6547 | |
6548 | /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG, | |
6549 | we check if any register number in X conflicts with the relevant register | |
6550 | numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN | |
6551 | contains a MEM (we don't bother checking for memory addresses that can't | |
05d10675 | 6552 | conflict because we expect this to be a rare case. |
bfa30b22 | 6553 | |
e0120d6e | 6554 | This function is similar to reg_overlap_mentioned_p in rtlanal.c except |
bfa30b22 RK |
6555 | that we look at equivalences for pseudos that didn't get hard registers. */ |
6556 | ||
6557 | int | |
0c20a65f | 6558 | reg_overlap_mentioned_for_reload_p (rtx x, rtx in) |
bfa30b22 RK |
6559 | { |
6560 | int regno, endregno; | |
6561 | ||
b98b49ac | 6562 | /* Overly conservative. */ |
38979c65 | 6563 | if (GET_CODE (x) == STRICT_LOW_PART |
ec8e098d | 6564 | || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC) |
b98b49ac JL |
6565 | x = XEXP (x, 0); |
6566 | ||
6567 | /* If either argument is a constant, then modifying X can not affect IN. */ | |
6568 | if (CONSTANT_P (x) || CONSTANT_P (in)) | |
6569 | return 0; | |
481683e1 | 6570 | else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x))) |
838f78d6 | 6571 | return refers_to_mem_for_reload_p (in); |
b98b49ac | 6572 | else if (GET_CODE (x) == SUBREG) |
bfa30b22 RK |
6573 | { |
6574 | regno = REGNO (SUBREG_REG (x)); | |
6575 | if (regno < FIRST_PSEUDO_REGISTER) | |
ddef6bc7 JJ |
6576 | regno += subreg_regno_offset (REGNO (SUBREG_REG (x)), |
6577 | GET_MODE (SUBREG_REG (x)), | |
6578 | SUBREG_BYTE (x), | |
6579 | GET_MODE (x)); | |
f1f4e530 JM |
6580 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
6581 | ? subreg_nregs (x) : 1); | |
6582 | ||
6583 | return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0); | |
bfa30b22 | 6584 | } |
f8cfc6aa | 6585 | else if (REG_P (x)) |
bfa30b22 RK |
6586 | { |
6587 | regno = REGNO (x); | |
4803a34a RK |
6588 | |
6589 | /* If this is a pseudo, it must not have been assigned a hard register. | |
6590 | Therefore, it must either be in memory or be a constant. */ | |
6591 | ||
6592 | if (regno >= FIRST_PSEUDO_REGISTER) | |
6593 | { | |
f2034d06 | 6594 | if (reg_equiv_memory_loc (regno)) |
4803a34a | 6595 | return refers_to_mem_for_reload_p (in); |
f2034d06 | 6596 | gcc_assert (reg_equiv_constant (regno)); |
41374e13 | 6597 | return 0; |
4803a34a | 6598 | } |
f1f4e530 | 6599 | |
09e18274 | 6600 | endregno = END_HARD_REGNO (x); |
f1f4e530 JM |
6601 | |
6602 | return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0); | |
bfa30b22 | 6603 | } |
3c0cb5de | 6604 | else if (MEM_P (x)) |
4803a34a | 6605 | return refers_to_mem_for_reload_p (in); |
bfa30b22 RK |
6606 | else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC |
6607 | || GET_CODE (x) == CC0) | |
6608 | return reg_mentioned_p (x, in); | |
b8698a0f | 6609 | else |
69f38ab9 | 6610 | { |
41374e13 NS |
6611 | gcc_assert (GET_CODE (x) == PLUS); |
6612 | ||
69f38ab9 R |
6613 | /* We actually want to know if X is mentioned somewhere inside IN. |
6614 | We must not say that (plus (sp) (const_int 124)) is in | |
6615 | (plus (sp) (const_int 64)), since that can lead to incorrect reload | |
6616 | allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS | |
6617 | into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */ | |
3c0cb5de | 6618 | while (MEM_P (in)) |
69f38ab9 | 6619 | in = XEXP (in, 0); |
f8cfc6aa | 6620 | if (REG_P (in)) |
69f38ab9 R |
6621 | return 0; |
6622 | else if (GET_CODE (in) == PLUS) | |
10050f74 KK |
6623 | return (rtx_equal_p (x, in) |
6624 | || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0)) | |
69f38ab9 R |
6625 | || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1))); |
6626 | else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in) | |
6627 | || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in)); | |
6628 | } | |
bfa30b22 | 6629 | |
f1f4e530 | 6630 | gcc_unreachable (); |
bfa30b22 | 6631 | } |
4803a34a RK |
6632 | |
6633 | /* Return nonzero if anything in X contains a MEM. Look also for pseudo | |
6634 | registers. */ | |
6635 | ||
10015a27 | 6636 | static int |
0c20a65f | 6637 | refers_to_mem_for_reload_p (rtx x) |
4803a34a | 6638 | { |
6f7d635c | 6639 | const char *fmt; |
4803a34a RK |
6640 | int i; |
6641 | ||
3c0cb5de | 6642 | if (MEM_P (x)) |
4803a34a RK |
6643 | return 1; |
6644 | ||
f8cfc6aa | 6645 | if (REG_P (x)) |
4803a34a | 6646 | return (REGNO (x) >= FIRST_PSEUDO_REGISTER |
f2034d06 | 6647 | && reg_equiv_memory_loc (REGNO (x))); |
05d10675 | 6648 | |
4803a34a RK |
6649 | fmt = GET_RTX_FORMAT (GET_CODE (x)); |
6650 | for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--) | |
6651 | if (fmt[i] == 'e' | |
3c0cb5de | 6652 | && (MEM_P (XEXP (x, i)) |
4803a34a RK |
6653 | || refers_to_mem_for_reload_p (XEXP (x, i)))) |
6654 | return 1; | |
05d10675 | 6655 | |
4803a34a RK |
6656 | return 0; |
6657 | } | |
eab89b90 | 6658 | \f |
eab89b90 RK |
6659 | /* Check the insns before INSN to see if there is a suitable register |
6660 | containing the same value as GOAL. | |
55d796da | 6661 | If OTHER is -1, look for a register in class RCLASS. |
eab89b90 RK |
6662 | Otherwise, just see if register number OTHER shares GOAL's value. |
6663 | ||
6664 | Return an rtx for the register found, or zero if none is found. | |
6665 | ||
6666 | If RELOAD_REG_P is (short *)1, | |
6667 | we reject any hard reg that appears in reload_reg_rtx | |
6668 | because such a hard reg is also needed coming into this insn. | |
6669 | ||
6670 | If RELOAD_REG_P is any other nonzero value, | |
6671 | it is a vector indexed by hard reg number | |
6672 | and we reject any hard reg whose element in the vector is nonnegative | |
6673 | as well as any that appears in reload_reg_rtx. | |
6674 | ||
6675 | If GOAL is zero, then GOALREG is a register number; we look | |
6676 | for an equivalent for that register. | |
6677 | ||
6678 | MODE is the machine mode of the value we want an equivalence for. | |
6679 | If GOAL is nonzero and not VOIDmode, then it must have mode MODE. | |
6680 | ||
6681 | This function is used by jump.c as well as in the reload pass. | |
6682 | ||
6683 | If GOAL is the sum of the stack pointer and a constant, we treat it | |
6684 | as if it were a constant except that sp is required to be unchanging. */ | |
6685 | ||
6686 | rtx | |
5d86f5f9 | 6687 | find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other, |
ef4bddc2 | 6688 | short *reload_reg_p, int goalreg, machine_mode mode) |
eab89b90 | 6689 | { |
5d86f5f9 | 6690 | rtx_insn *p = insn; |
e8a54173 DM |
6691 | rtx goaltry, valtry, value; |
6692 | rtx_insn *where; | |
b3694847 SS |
6693 | rtx pat; |
6694 | int regno = -1; | |
eab89b90 RK |
6695 | int valueno; |
6696 | int goal_mem = 0; | |
6697 | int goal_const = 0; | |
6698 | int goal_mem_addr_varies = 0; | |
6699 | int need_stable_sp = 0; | |
6700 | int nregs; | |
6701 | int valuenregs; | |
0bcf8261 | 6702 | int num = 0; |
eab89b90 RK |
6703 | |
6704 | if (goal == 0) | |
6705 | regno = goalreg; | |
f8cfc6aa | 6706 | else if (REG_P (goal)) |
eab89b90 | 6707 | regno = REGNO (goal); |
3c0cb5de | 6708 | else if (MEM_P (goal)) |
eab89b90 RK |
6709 | { |
6710 | enum rtx_code code = GET_CODE (XEXP (goal, 0)); | |
6711 | if (MEM_VOLATILE_P (goal)) | |
6712 | return 0; | |
3d8bf70f | 6713 | if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal))) |
eab89b90 RK |
6714 | return 0; |
6715 | /* An address with side effects must be reexecuted. */ | |
6716 | switch (code) | |
6717 | { | |
6718 | case POST_INC: | |
6719 | case PRE_INC: | |
6720 | case POST_DEC: | |
6721 | case PRE_DEC: | |
4b983fdc RH |
6722 | case POST_MODIFY: |
6723 | case PRE_MODIFY: | |
eab89b90 | 6724 | return 0; |
e9a25f70 JL |
6725 | default: |
6726 | break; | |
eab89b90 RK |
6727 | } |
6728 | goal_mem = 1; | |
6729 | } | |
6730 | else if (CONSTANT_P (goal)) | |
6731 | goal_const = 1; | |
6732 | else if (GET_CODE (goal) == PLUS | |
6733 | && XEXP (goal, 0) == stack_pointer_rtx | |
6734 | && CONSTANT_P (XEXP (goal, 1))) | |
6735 | goal_const = need_stable_sp = 1; | |
812f2051 R |
6736 | else if (GET_CODE (goal) == PLUS |
6737 | && XEXP (goal, 0) == frame_pointer_rtx | |
6738 | && CONSTANT_P (XEXP (goal, 1))) | |
6739 | goal_const = 1; | |
eab89b90 RK |
6740 | else |
6741 | return 0; | |
6742 | ||
0bcf8261 | 6743 | num = 0; |
eab89b90 RK |
6744 | /* Scan insns back from INSN, looking for one that copies |
6745 | a value into or out of GOAL. | |
6746 | Stop and give up if we reach a label. */ | |
6747 | ||
6748 | while (1) | |
6749 | { | |
6750 | p = PREV_INSN (p); | |
b5b8b0ac AO |
6751 | if (p && DEBUG_INSN_P (p)) |
6752 | continue; | |
0bcf8261 | 6753 | num++; |
4b4bf941 | 6754 | if (p == 0 || LABEL_P (p) |
0bcf8261 | 6755 | || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS)) |
eab89b90 | 6756 | return 0; |
770ae6cc | 6757 | |
9a2091cd HPN |
6758 | /* Don't reuse register contents from before a setjmp-type |
6759 | function call; on the second return (from the longjmp) it | |
6760 | might have been clobbered by a later reuse. It doesn't | |
6761 | seem worthwhile to actually go and see if it is actually | |
6762 | reused even if that information would be readily available; | |
6763 | just don't reuse it across the setjmp call. */ | |
6764 | if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX)) | |
6765 | return 0; | |
6766 | ||
4b4bf941 | 6767 | if (NONJUMP_INSN_P (p) |
0f41302f | 6768 | /* If we don't want spill regs ... */ |
a8c9daeb RK |
6769 | && (! (reload_reg_p != 0 |
6770 | && reload_reg_p != (short *) (HOST_WIDE_INT) 1) | |
770ae6cc RK |
6771 | /* ... then ignore insns introduced by reload; they aren't |
6772 | useful and can cause results in reload_as_needed to be | |
6773 | different from what they were when calculating the need for | |
6774 | spills. If we notice an input-reload insn here, we will | |
6775 | reject it below, but it might hide a usable equivalent. | |
0e61db61 | 6776 | That makes bad code. It may even fail: perhaps no reg was |
770ae6cc RK |
6777 | spilled for this insn because it was assumed we would find |
6778 | that equivalent. */ | |
eab89b90 RK |
6779 | || INSN_UID (p) < reload_first_uid)) |
6780 | { | |
e8094962 | 6781 | rtx tem; |
eab89b90 | 6782 | pat = single_set (p); |
770ae6cc | 6783 | |
eab89b90 RK |
6784 | /* First check for something that sets some reg equal to GOAL. */ |
6785 | if (pat != 0 | |
6786 | && ((regno >= 0 | |
6787 | && true_regnum (SET_SRC (pat)) == regno | |
6788 | && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0) | |
6789 | || | |
6790 | (regno >= 0 | |
6791 | && true_regnum (SET_DEST (pat)) == regno | |
6792 | && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0) | |
6793 | || | |
6794 | (goal_const && rtx_equal_p (SET_SRC (pat), goal) | |
a5546290 R |
6795 | /* When looking for stack pointer + const, |
6796 | make sure we don't use a stack adjust. */ | |
6797 | && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal) | |
eab89b90 RK |
6798 | && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0) |
6799 | || (goal_mem | |
6800 | && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0 | |
6801 | && rtx_renumbered_equal_p (goal, SET_SRC (pat))) | |
6802 | || (goal_mem | |
6803 | && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0 | |
e8094962 RK |
6804 | && rtx_renumbered_equal_p (goal, SET_DEST (pat))) |
6805 | /* If we are looking for a constant, | |
6806 | and something equivalent to that constant was copied | |
6807 | into a reg, we can use that reg. */ | |
efc9bd41 RK |
6808 | || (goal_const && REG_NOTES (p) != 0 |
6809 | && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX)) | |
6810 | && ((rtx_equal_p (XEXP (tem, 0), goal) | |
6811 | && (valueno | |
6812 | = true_regnum (valtry = SET_DEST (pat))) >= 0) | |
f8cfc6aa | 6813 | || (REG_P (SET_DEST (pat)) |
48175537 | 6814 | && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0)) |
3d8bf70f | 6815 | && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0))) |
481683e1 | 6816 | && CONST_INT_P (goal) |
efc9bd41 RK |
6817 | && 0 != (goaltry |
6818 | = operand_subword (XEXP (tem, 0), 0, 0, | |
f55b1d97 | 6819 | VOIDmode)) |
efc9bd41 RK |
6820 | && rtx_equal_p (goal, goaltry) |
6821 | && (valtry | |
6822 | = operand_subword (SET_DEST (pat), 0, 0, | |
6823 | VOIDmode)) | |
6824 | && (valueno = true_regnum (valtry)) >= 0))) | |
fb3821f7 CH |
6825 | || (goal_const && (tem = find_reg_note (p, REG_EQUIV, |
6826 | NULL_RTX)) | |
f8cfc6aa | 6827 | && REG_P (SET_DEST (pat)) |
48175537 | 6828 | && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0)) |
3d8bf70f | 6829 | && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0))) |
481683e1 | 6830 | && CONST_INT_P (goal) |
f55b1d97 RK |
6831 | && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0, |
6832 | VOIDmode)) | |
6833 | && rtx_equal_p (goal, goaltry) | |
e8094962 RK |
6834 | && (valtry |
6835 | = operand_subword (SET_DEST (pat), 1, 0, VOIDmode)) | |
95d3562b | 6836 | && (valueno = true_regnum (valtry)) >= 0))) |
0192d704 R |
6837 | { |
6838 | if (other >= 0) | |
6839 | { | |
6840 | if (valueno != other) | |
6841 | continue; | |
6842 | } | |
6843 | else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER) | |
6844 | continue; | |
55d796da | 6845 | else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass], |
09e18274 RS |
6846 | mode, valueno)) |
6847 | continue; | |
0192d704 R |
6848 | value = valtry; |
6849 | where = p; | |
6850 | break; | |
6851 | } | |
eab89b90 RK |
6852 | } |
6853 | } | |
6854 | ||
6855 | /* We found a previous insn copying GOAL into a suitable other reg VALUE | |
6856 | (or copying VALUE into GOAL, if GOAL is also a register). | |
6857 | Now verify that VALUE is really valid. */ | |
6858 | ||
6859 | /* VALUENO is the register number of VALUE; a hard register. */ | |
6860 | ||
6861 | /* Don't try to re-use something that is killed in this insn. We want | |
6862 | to be able to trust REG_UNUSED notes. */ | |
efc9bd41 | 6863 | if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value)) |
eab89b90 RK |
6864 | return 0; |
6865 | ||
6866 | /* If we propose to get the value from the stack pointer or if GOAL is | |
6867 | a MEM based on the stack pointer, we need a stable SP. */ | |
d5a1d1c7 | 6868 | if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM |
bfa30b22 RK |
6869 | || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx, |
6870 | goal))) | |
eab89b90 RK |
6871 | need_stable_sp = 1; |
6872 | ||
6873 | /* Reject VALUE if the copy-insn moved the wrong sort of datum. */ | |
6874 | if (GET_MODE (value) != mode) | |
6875 | return 0; | |
6876 | ||
6877 | /* Reject VALUE if it was loaded from GOAL | |
6878 | and is also a register that appears in the address of GOAL. */ | |
6879 | ||
bd5f6d44 | 6880 | if (goal_mem && value == SET_DEST (single_set (where)) |
09e18274 | 6881 | && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno), |
f4f4d0f8 | 6882 | goal, (rtx*) 0)) |
eab89b90 RK |
6883 | return 0; |
6884 | ||
6885 | /* Reject registers that overlap GOAL. */ | |
6886 | ||
66fd46b6 JH |
6887 | if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER) |
6888 | nregs = hard_regno_nregs[regno][mode]; | |
6889 | else | |
6890 | nregs = 1; | |
6891 | valuenregs = hard_regno_nregs[valueno][mode]; | |
16ab191f | 6892 | |
3c241c19 GK |
6893 | if (!goal_mem && !goal_const |
6894 | && regno + nregs > valueno && regno < valueno + valuenregs) | |
6895 | return 0; | |
6896 | ||
eab89b90 RK |
6897 | /* Reject VALUE if it is one of the regs reserved for reloads. |
6898 | Reload1 knows how to reuse them anyway, and it would get | |
6899 | confused if we allocated one without its knowledge. | |
6900 | (Now that insns introduced by reload are ignored above, | |
6901 | this case shouldn't happen, but I'm not positive.) */ | |
6902 | ||
16ab191f DC |
6903 | if (reload_reg_p != 0 && reload_reg_p != (short *) (HOST_WIDE_INT) 1) |
6904 | { | |
6905 | int i; | |
6906 | for (i = 0; i < valuenregs; ++i) | |
6907 | if (reload_reg_p[valueno + i] >= 0) | |
6908 | return 0; | |
6909 | } | |
eab89b90 RK |
6910 | |
6911 | /* Reject VALUE if it is a register being used for an input reload | |
6912 | even if it is not one of those reserved. */ | |
6913 | ||
6914 | if (reload_reg_p != 0) | |
6915 | { | |
6916 | int i; | |
6917 | for (i = 0; i < n_reloads; i++) | |
eceef4c9 | 6918 | if (rld[i].reg_rtx != 0 && rld[i].in) |
eab89b90 | 6919 | { |
eceef4c9 | 6920 | int regno1 = REGNO (rld[i].reg_rtx); |
66fd46b6 JH |
6921 | int nregs1 = hard_regno_nregs[regno1] |
6922 | [GET_MODE (rld[i].reg_rtx)]; | |
eab89b90 RK |
6923 | if (regno1 < valueno + valuenregs |
6924 | && regno1 + nregs1 > valueno) | |
6925 | return 0; | |
6926 | } | |
6927 | } | |
6928 | ||
6929 | if (goal_mem) | |
54b5ffe9 RS |
6930 | /* We must treat frame pointer as varying here, |
6931 | since it can vary--in a nonlocal goto as generated by expand_goto. */ | |
6932 | goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0)); | |
eab89b90 RK |
6933 | |
6934 | /* Now verify that the values of GOAL and VALUE remain unaltered | |
6935 | until INSN is reached. */ | |
6936 | ||
6937 | p = insn; | |
6938 | while (1) | |
6939 | { | |
6940 | p = PREV_INSN (p); | |
6941 | if (p == where) | |
6942 | return value; | |
6943 | ||
6944 | /* Don't trust the conversion past a function call | |
6945 | if either of the two is in a call-clobbered register, or memory. */ | |
4b4bf941 | 6946 | if (CALL_P (p)) |
aad2919f DC |
6947 | { |
6948 | int i; | |
4381f7c2 | 6949 | |
aad2919f DC |
6950 | if (goal_mem || need_stable_sp) |
6951 | return 0; | |
4381f7c2 | 6952 | |
aad2919f DC |
6953 | if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER) |
6954 | for (i = 0; i < nregs; ++i) | |
7e42db17 DJ |
6955 | if (call_used_regs[regno + i] |
6956 | || HARD_REGNO_CALL_PART_CLOBBERED (regno + i, mode)) | |
aad2919f DC |
6957 | return 0; |
6958 | ||
6959 | if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER) | |
6960 | for (i = 0; i < valuenregs; ++i) | |
7e42db17 DJ |
6961 | if (call_used_regs[valueno + i] |
6962 | || HARD_REGNO_CALL_PART_CLOBBERED (valueno + i, mode)) | |
aad2919f | 6963 | return 0; |
570a98eb | 6964 | } |
41fe17ab | 6965 | |
2c3c49de | 6966 | if (INSN_P (p)) |
eab89b90 | 6967 | { |
8ec82f87 RH |
6968 | pat = PATTERN (p); |
6969 | ||
05d10675 BS |
6970 | /* Watch out for unspec_volatile, and volatile asms. */ |
6971 | if (volatile_insn_p (pat)) | |
8ec82f87 RH |
6972 | return 0; |
6973 | ||
eab89b90 RK |
6974 | /* If this insn P stores in either GOAL or VALUE, return 0. |
6975 | If GOAL is a memory ref and this insn writes memory, return 0. | |
6976 | If GOAL is a memory ref and its address is not constant, | |
6977 | and this insn P changes a register used in GOAL, return 0. */ | |
6978 | ||
0c99ec5c RH |
6979 | if (GET_CODE (pat) == COND_EXEC) |
6980 | pat = COND_EXEC_CODE (pat); | |
eab89b90 RK |
6981 | if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER) |
6982 | { | |
b3694847 | 6983 | rtx dest = SET_DEST (pat); |
eab89b90 RK |
6984 | while (GET_CODE (dest) == SUBREG |
6985 | || GET_CODE (dest) == ZERO_EXTRACT | |
eab89b90 RK |
6986 | || GET_CODE (dest) == STRICT_LOW_PART) |
6987 | dest = XEXP (dest, 0); | |
f8cfc6aa | 6988 | if (REG_P (dest)) |
eab89b90 | 6989 | { |
b3694847 | 6990 | int xregno = REGNO (dest); |
eab89b90 RK |
6991 | int xnregs; |
6992 | if (REGNO (dest) < FIRST_PSEUDO_REGISTER) | |
66fd46b6 | 6993 | xnregs = hard_regno_nregs[xregno][GET_MODE (dest)]; |
eab89b90 RK |
6994 | else |
6995 | xnregs = 1; | |
6996 | if (xregno < regno + nregs && xregno + xnregs > regno) | |
6997 | return 0; | |
6998 | if (xregno < valueno + valuenregs | |
6999 | && xregno + xnregs > valueno) | |
7000 | return 0; | |
7001 | if (goal_mem_addr_varies | |
bfa30b22 | 7002 | && reg_overlap_mentioned_for_reload_p (dest, goal)) |
eab89b90 | 7003 | return 0; |
1b4d8b2b R |
7004 | if (xregno == STACK_POINTER_REGNUM && need_stable_sp) |
7005 | return 0; | |
eab89b90 | 7006 | } |
3c0cb5de | 7007 | else if (goal_mem && MEM_P (dest) |
eab89b90 RK |
7008 | && ! push_operand (dest, GET_MODE (dest))) |
7009 | return 0; | |
3c0cb5de | 7010 | else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER |
f2034d06 | 7011 | && reg_equiv_memory_loc (regno) != 0) |
9fac9680 | 7012 | return 0; |
eab89b90 RK |
7013 | else if (need_stable_sp && push_operand (dest, GET_MODE (dest))) |
7014 | return 0; | |
7015 | } | |
7016 | else if (GET_CODE (pat) == PARALLEL) | |
7017 | { | |
b3694847 | 7018 | int i; |
eab89b90 RK |
7019 | for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) |
7020 | { | |
b3694847 | 7021 | rtx v1 = XVECEXP (pat, 0, i); |
0c99ec5c RH |
7022 | if (GET_CODE (v1) == COND_EXEC) |
7023 | v1 = COND_EXEC_CODE (v1); | |
eab89b90 RK |
7024 | if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER) |
7025 | { | |
b3694847 | 7026 | rtx dest = SET_DEST (v1); |
eab89b90 RK |
7027 | while (GET_CODE (dest) == SUBREG |
7028 | || GET_CODE (dest) == ZERO_EXTRACT | |
eab89b90 RK |
7029 | || GET_CODE (dest) == STRICT_LOW_PART) |
7030 | dest = XEXP (dest, 0); | |
f8cfc6aa | 7031 | if (REG_P (dest)) |
eab89b90 | 7032 | { |
b3694847 | 7033 | int xregno = REGNO (dest); |
eab89b90 RK |
7034 | int xnregs; |
7035 | if (REGNO (dest) < FIRST_PSEUDO_REGISTER) | |
66fd46b6 | 7036 | xnregs = hard_regno_nregs[xregno][GET_MODE (dest)]; |
eab89b90 RK |
7037 | else |
7038 | xnregs = 1; | |
7039 | if (xregno < regno + nregs | |
7040 | && xregno + xnregs > regno) | |
7041 | return 0; | |
7042 | if (xregno < valueno + valuenregs | |
7043 | && xregno + xnregs > valueno) | |
7044 | return 0; | |
7045 | if (goal_mem_addr_varies | |
bfa30b22 RK |
7046 | && reg_overlap_mentioned_for_reload_p (dest, |
7047 | goal)) | |
eab89b90 | 7048 | return 0; |
930176e7 R |
7049 | if (xregno == STACK_POINTER_REGNUM && need_stable_sp) |
7050 | return 0; | |
eab89b90 | 7051 | } |
3c0cb5de | 7052 | else if (goal_mem && MEM_P (dest) |
eab89b90 RK |
7053 | && ! push_operand (dest, GET_MODE (dest))) |
7054 | return 0; | |
3c0cb5de | 7055 | else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER |
f2034d06 | 7056 | && reg_equiv_memory_loc (regno) != 0) |
e9a25f70 | 7057 | return 0; |
369c7ab6 JW |
7058 | else if (need_stable_sp |
7059 | && push_operand (dest, GET_MODE (dest))) | |
7060 | return 0; | |
7061 | } | |
7062 | } | |
7063 | } | |
7064 | ||
4b4bf941 | 7065 | if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p)) |
369c7ab6 JW |
7066 | { |
7067 | rtx link; | |
7068 | ||
7069 | for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0; | |
7070 | link = XEXP (link, 1)) | |
7071 | { | |
7072 | pat = XEXP (link, 0); | |
7073 | if (GET_CODE (pat) == CLOBBER) | |
7074 | { | |
b3694847 | 7075 | rtx dest = SET_DEST (pat); |
f8cd4126 | 7076 | |
f8cfc6aa | 7077 | if (REG_P (dest)) |
369c7ab6 | 7078 | { |
b3694847 | 7079 | int xregno = REGNO (dest); |
f8cd4126 | 7080 | int xnregs |
66fd46b6 | 7081 | = hard_regno_nregs[xregno][GET_MODE (dest)]; |
f8cd4126 | 7082 | |
369c7ab6 JW |
7083 | if (xregno < regno + nregs |
7084 | && xregno + xnregs > regno) | |
7085 | return 0; | |
f8cd4126 | 7086 | else if (xregno < valueno + valuenregs |
4381f7c2 | 7087 | && xregno + xnregs > valueno) |
369c7ab6 | 7088 | return 0; |
f8cd4126 RK |
7089 | else if (goal_mem_addr_varies |
7090 | && reg_overlap_mentioned_for_reload_p (dest, | |
369c7ab6 JW |
7091 | goal)) |
7092 | return 0; | |
7093 | } | |
f8cd4126 | 7094 | |
3c0cb5de | 7095 | else if (goal_mem && MEM_P (dest) |
369c7ab6 JW |
7096 | && ! push_operand (dest, GET_MODE (dest))) |
7097 | return 0; | |
eab89b90 RK |
7098 | else if (need_stable_sp |
7099 | && push_operand (dest, GET_MODE (dest))) | |
7100 | return 0; | |
7101 | } | |
7102 | } | |
7103 | } | |
7104 | ||
7105 | #ifdef AUTO_INC_DEC | |
7106 | /* If this insn auto-increments or auto-decrements | |
7107 | either regno or valueno, return 0 now. | |
7108 | If GOAL is a memory ref and its address is not constant, | |
7109 | and this insn P increments a register used in GOAL, return 0. */ | |
7110 | { | |
b3694847 | 7111 | rtx link; |
eab89b90 RK |
7112 | |
7113 | for (link = REG_NOTES (p); link; link = XEXP (link, 1)) | |
7114 | if (REG_NOTE_KIND (link) == REG_INC | |
f8cfc6aa | 7115 | && REG_P (XEXP (link, 0))) |
eab89b90 | 7116 | { |
b3694847 | 7117 | int incno = REGNO (XEXP (link, 0)); |
eab89b90 RK |
7118 | if (incno < regno + nregs && incno >= regno) |
7119 | return 0; | |
7120 | if (incno < valueno + valuenregs && incno >= valueno) | |
7121 | return 0; | |
7122 | if (goal_mem_addr_varies | |
bfa30b22 RK |
7123 | && reg_overlap_mentioned_for_reload_p (XEXP (link, 0), |
7124 | goal)) | |
eab89b90 RK |
7125 | return 0; |
7126 | } | |
7127 | } | |
7128 | #endif | |
7129 | } | |
7130 | } | |
7131 | } | |
7132 | \f | |
7133 | /* Find a place where INCED appears in an increment or decrement operator | |
7134 | within X, and return the amount INCED is incremented or decremented by. | |
7135 | The value is always positive. */ | |
7136 | ||
7137 | static int | |
0c20a65f | 7138 | find_inc_amount (rtx x, rtx inced) |
eab89b90 | 7139 | { |
b3694847 SS |
7140 | enum rtx_code code = GET_CODE (x); |
7141 | const char *fmt; | |
7142 | int i; | |
eab89b90 RK |
7143 | |
7144 | if (code == MEM) | |
7145 | { | |
b3694847 | 7146 | rtx addr = XEXP (x, 0); |
eab89b90 RK |
7147 | if ((GET_CODE (addr) == PRE_DEC |
7148 | || GET_CODE (addr) == POST_DEC | |
7149 | || GET_CODE (addr) == PRE_INC | |
7150 | || GET_CODE (addr) == POST_INC) | |
7151 | && XEXP (addr, 0) == inced) | |
7152 | return GET_MODE_SIZE (GET_MODE (x)); | |
4b983fdc RH |
7153 | else if ((GET_CODE (addr) == PRE_MODIFY |
7154 | || GET_CODE (addr) == POST_MODIFY) | |
7155 | && GET_CODE (XEXP (addr, 1)) == PLUS | |
7156 | && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0) | |
7157 | && XEXP (addr, 0) == inced | |
481683e1 | 7158 | && CONST_INT_P (XEXP (XEXP (addr, 1), 1))) |
4381f7c2 KH |
7159 | { |
7160 | i = INTVAL (XEXP (XEXP (addr, 1), 1)); | |
7161 | return i < 0 ? -i : i; | |
7162 | } | |
7163 | } | |
eab89b90 RK |
7164 | |
7165 | fmt = GET_RTX_FORMAT (code); | |
7166 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
7167 | { | |
7168 | if (fmt[i] == 'e') | |
7169 | { | |
b3694847 | 7170 | int tem = find_inc_amount (XEXP (x, i), inced); |
eab89b90 RK |
7171 | if (tem != 0) |
7172 | return tem; | |
7173 | } | |
7174 | if (fmt[i] == 'E') | |
7175 | { | |
b3694847 | 7176 | int j; |
eab89b90 RK |
7177 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
7178 | { | |
b3694847 | 7179 | int tem = find_inc_amount (XVECEXP (x, i, j), inced); |
eab89b90 RK |
7180 | if (tem != 0) |
7181 | return tem; | |
7182 | } | |
7183 | } | |
7184 | } | |
7185 | ||
7186 | return 0; | |
7187 | } | |
7188 | \f | |
d0236c3b DN |
7189 | /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a |
7190 | REG_INC note in insn INSN. REGNO must refer to a hard register. */ | |
7191 | ||
7192 | #ifdef AUTO_INC_DEC | |
b8698a0f | 7193 | static int |
d0236c3b DN |
7194 | reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno, |
7195 | rtx insn) | |
7196 | { | |
7197 | rtx link; | |
7198 | ||
7199 | gcc_assert (insn); | |
7200 | ||
7201 | if (! INSN_P (insn)) | |
7202 | return 0; | |
b8698a0f | 7203 | |
d0236c3b DN |
7204 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
7205 | if (REG_NOTE_KIND (link) == REG_INC) | |
7206 | { | |
7207 | unsigned int test = (int) REGNO (XEXP (link, 0)); | |
7208 | if (test >= regno && test < endregno) | |
b8698a0f | 7209 | return 1; |
d0236c3b DN |
7210 | } |
7211 | return 0; | |
7212 | } | |
7213 | #else | |
7214 | ||
7215 | #define reg_inc_found_and_valid_p(regno,endregno,insn) 0 | |
7216 | ||
b8698a0f | 7217 | #endif |
d0236c3b | 7218 | |
9532e31f | 7219 | /* Return 1 if register REGNO is the subject of a clobber in insn INSN. |
d0236c3b DN |
7220 | If SETS is 1, also consider SETs. If SETS is 2, enable checking |
7221 | REG_INC. REGNO must refer to a hard register. */ | |
eab89b90 RK |
7222 | |
7223 | int | |
ef4bddc2 | 7224 | regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode, |
0c20a65f | 7225 | int sets) |
eab89b90 | 7226 | { |
5b804e8a RS |
7227 | unsigned int nregs, endregno; |
7228 | ||
7229 | /* regno must be a hard register. */ | |
7230 | gcc_assert (regno < FIRST_PSEUDO_REGISTER); | |
7231 | ||
7232 | nregs = hard_regno_nregs[regno][mode]; | |
7233 | endregno = regno + nregs; | |
8441bc30 | 7234 | |
9532e31f | 7235 | if ((GET_CODE (PATTERN (insn)) == CLOBBER |
d0236c3b | 7236 | || (sets == 1 && GET_CODE (PATTERN (insn)) == SET)) |
f8cfc6aa | 7237 | && REG_P (XEXP (PATTERN (insn), 0))) |
8441bc30 | 7238 | { |
ae0ed63a | 7239 | unsigned int test = REGNO (XEXP (PATTERN (insn), 0)); |
8441bc30 | 7240 | |
e695931e | 7241 | return test >= regno && test < endregno; |
8441bc30 | 7242 | } |
eab89b90 | 7243 | |
d0236c3b | 7244 | if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn)) |
b8698a0f L |
7245 | return 1; |
7246 | ||
eab89b90 RK |
7247 | if (GET_CODE (PATTERN (insn)) == PARALLEL) |
7248 | { | |
7249 | int i = XVECLEN (PATTERN (insn), 0) - 1; | |
7250 | ||
7251 | for (; i >= 0; i--) | |
7252 | { | |
7253 | rtx elt = XVECEXP (PATTERN (insn), 0, i); | |
9532e31f | 7254 | if ((GET_CODE (elt) == CLOBBER |
601b6520 | 7255 | || (sets == 1 && GET_CODE (elt) == SET)) |
f8cfc6aa | 7256 | && REG_P (XEXP (elt, 0))) |
8441bc30 | 7257 | { |
ae0ed63a | 7258 | unsigned int test = REGNO (XEXP (elt, 0)); |
a6a2274a | 7259 | |
e695931e | 7260 | if (test >= regno && test < endregno) |
8441bc30 BS |
7261 | return 1; |
7262 | } | |
d0236c3b DN |
7263 | if (sets == 2 |
7264 | && reg_inc_found_and_valid_p (regno, endregno, elt)) | |
b8698a0f | 7265 | return 1; |
eab89b90 RK |
7266 | } |
7267 | } | |
7268 | ||
7269 | return 0; | |
7270 | } | |
10bcde0d | 7271 | |
f12448c8 AO |
7272 | /* Find the low part, with mode MODE, of a hard regno RELOADREG. */ |
7273 | rtx | |
ef4bddc2 | 7274 | reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode) |
f12448c8 AO |
7275 | { |
7276 | int regno; | |
7277 | ||
7278 | if (GET_MODE (reloadreg) == mode) | |
7279 | return reloadreg; | |
7280 | ||
7281 | regno = REGNO (reloadreg); | |
7282 | ||
c0a6a1ef | 7283 | if (REG_WORDS_BIG_ENDIAN) |
66fd46b6 JH |
7284 | regno += (int) hard_regno_nregs[regno][GET_MODE (reloadreg)] |
7285 | - (int) hard_regno_nregs[regno][mode]; | |
f12448c8 AO |
7286 | |
7287 | return gen_rtx_REG (mode, regno); | |
7288 | } | |
7289 | ||
27c38fbe | 7290 | static const char *const reload_when_needed_name[] = |
10bcde0d | 7291 | { |
05d10675 BS |
7292 | "RELOAD_FOR_INPUT", |
7293 | "RELOAD_FOR_OUTPUT", | |
10bcde0d | 7294 | "RELOAD_FOR_INSN", |
47c8cf91 ILT |
7295 | "RELOAD_FOR_INPUT_ADDRESS", |
7296 | "RELOAD_FOR_INPADDR_ADDRESS", | |
10bcde0d | 7297 | "RELOAD_FOR_OUTPUT_ADDRESS", |
47c8cf91 | 7298 | "RELOAD_FOR_OUTADDR_ADDRESS", |
05d10675 | 7299 | "RELOAD_FOR_OPERAND_ADDRESS", |
10bcde0d | 7300 | "RELOAD_FOR_OPADDR_ADDR", |
05d10675 | 7301 | "RELOAD_OTHER", |
10bcde0d RK |
7302 | "RELOAD_FOR_OTHER_ADDRESS" |
7303 | }; | |
7304 | ||
b8fb2d72 | 7305 | /* These functions are used to print the variables set by 'find_reloads' */ |
10bcde0d | 7306 | |
24e47c76 | 7307 | DEBUG_FUNCTION void |
0c20a65f | 7308 | debug_reload_to_stream (FILE *f) |
10bcde0d RK |
7309 | { |
7310 | int r; | |
6f7d635c | 7311 | const char *prefix; |
10bcde0d | 7312 | |
b8fb2d72 CI |
7313 | if (! f) |
7314 | f = stderr; | |
10bcde0d RK |
7315 | for (r = 0; r < n_reloads; r++) |
7316 | { | |
b8fb2d72 | 7317 | fprintf (f, "Reload %d: ", r); |
10bcde0d | 7318 | |
eceef4c9 | 7319 | if (rld[r].in != 0) |
10bcde0d | 7320 | { |
b8fb2d72 | 7321 | fprintf (f, "reload_in (%s) = ", |
eceef4c9 BS |
7322 | GET_MODE_NAME (rld[r].inmode)); |
7323 | print_inline_rtx (f, rld[r].in, 24); | |
b8fb2d72 | 7324 | fprintf (f, "\n\t"); |
10bcde0d RK |
7325 | } |
7326 | ||
eceef4c9 | 7327 | if (rld[r].out != 0) |
10bcde0d | 7328 | { |
b8fb2d72 | 7329 | fprintf (f, "reload_out (%s) = ", |
eceef4c9 BS |
7330 | GET_MODE_NAME (rld[r].outmode)); |
7331 | print_inline_rtx (f, rld[r].out, 24); | |
b8fb2d72 | 7332 | fprintf (f, "\n\t"); |
10bcde0d RK |
7333 | } |
7334 | ||
48c54229 | 7335 | fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]); |
10bcde0d | 7336 | |
b8fb2d72 | 7337 | fprintf (f, "%s (opnum = %d)", |
eceef4c9 BS |
7338 | reload_when_needed_name[(int) rld[r].when_needed], |
7339 | rld[r].opnum); | |
10bcde0d | 7340 | |
eceef4c9 | 7341 | if (rld[r].optional) |
b8fb2d72 | 7342 | fprintf (f, ", optional"); |
10bcde0d | 7343 | |
eceef4c9 | 7344 | if (rld[r].nongroup) |
b2ee8ec2 | 7345 | fprintf (f, ", nongroup"); |
f5963e61 | 7346 | |
eceef4c9 BS |
7347 | if (rld[r].inc != 0) |
7348 | fprintf (f, ", inc by %d", rld[r].inc); | |
10bcde0d | 7349 | |
eceef4c9 | 7350 | if (rld[r].nocombine) |
b8fb2d72 | 7351 | fprintf (f, ", can't combine"); |
10bcde0d | 7352 | |
eceef4c9 | 7353 | if (rld[r].secondary_p) |
b8fb2d72 | 7354 | fprintf (f, ", secondary_reload_p"); |
10bcde0d | 7355 | |
eceef4c9 | 7356 | if (rld[r].in_reg != 0) |
10bcde0d | 7357 | { |
b8fb2d72 | 7358 | fprintf (f, "\n\treload_in_reg: "); |
eceef4c9 | 7359 | print_inline_rtx (f, rld[r].in_reg, 24); |
10bcde0d RK |
7360 | } |
7361 | ||
eceef4c9 | 7362 | if (rld[r].out_reg != 0) |
cb2afeb3 R |
7363 | { |
7364 | fprintf (f, "\n\treload_out_reg: "); | |
eceef4c9 | 7365 | print_inline_rtx (f, rld[r].out_reg, 24); |
cb2afeb3 R |
7366 | } |
7367 | ||
eceef4c9 | 7368 | if (rld[r].reg_rtx != 0) |
10bcde0d | 7369 | { |
b8fb2d72 | 7370 | fprintf (f, "\n\treload_reg_rtx: "); |
eceef4c9 | 7371 | print_inline_rtx (f, rld[r].reg_rtx, 24); |
10bcde0d RK |
7372 | } |
7373 | ||
505923a0 | 7374 | prefix = "\n\t"; |
eceef4c9 | 7375 | if (rld[r].secondary_in_reload != -1) |
10bcde0d | 7376 | { |
b8fb2d72 | 7377 | fprintf (f, "%ssecondary_in_reload = %d", |
eceef4c9 | 7378 | prefix, rld[r].secondary_in_reload); |
505923a0 | 7379 | prefix = ", "; |
10bcde0d RK |
7380 | } |
7381 | ||
eceef4c9 | 7382 | if (rld[r].secondary_out_reload != -1) |
b8fb2d72 | 7383 | fprintf (f, "%ssecondary_out_reload = %d\n", |
eceef4c9 | 7384 | prefix, rld[r].secondary_out_reload); |
10bcde0d | 7385 | |
505923a0 | 7386 | prefix = "\n\t"; |
eceef4c9 | 7387 | if (rld[r].secondary_in_icode != CODE_FOR_nothing) |
10bcde0d | 7388 | { |
b2ee8ec2 | 7389 | fprintf (f, "%ssecondary_in_icode = %s", prefix, |
eceef4c9 | 7390 | insn_data[rld[r].secondary_in_icode].name); |
505923a0 | 7391 | prefix = ", "; |
10bcde0d RK |
7392 | } |
7393 | ||
eceef4c9 | 7394 | if (rld[r].secondary_out_icode != CODE_FOR_nothing) |
b2ee8ec2 | 7395 | fprintf (f, "%ssecondary_out_icode = %s", prefix, |
eceef4c9 | 7396 | insn_data[rld[r].secondary_out_icode].name); |
10bcde0d | 7397 | |
b8fb2d72 | 7398 | fprintf (f, "\n"); |
10bcde0d | 7399 | } |
10bcde0d | 7400 | } |
b8fb2d72 | 7401 | |
24e47c76 | 7402 | DEBUG_FUNCTION void |
0c20a65f | 7403 | debug_reload (void) |
b8fb2d72 CI |
7404 | { |
7405 | debug_reload_to_stream (stderr); | |
7406 | } |