]>
Commit | Line | Data |
---|---|---|
18ca7dab | 1 | /* Subroutines for manipulating rtx's in semantically interesting ways. |
ef58a523 | 2 | Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998, |
7458026b | 3 | 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
71d59383 | 4 | Free Software Foundation, Inc. |
18ca7dab | 5 | |
1322177d | 6 | This file is part of GCC. |
18ca7dab | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
18ca7dab | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
18ca7dab RK |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
18ca7dab RK |
21 | |
22 | ||
23 | #include "config.h" | |
670ee920 | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
718f9c0f | 27 | #include "diagnostic-core.h" |
01198c2f | 28 | #include "toplev.h" |
18ca7dab RK |
29 | #include "rtl.h" |
30 | #include "tree.h" | |
6baf1cc8 | 31 | #include "tm_p.h" |
18ca7dab | 32 | #include "flags.h" |
b38f3813 | 33 | #include "except.h" |
49ad7cfa | 34 | #include "function.h" |
18ca7dab | 35 | #include "expr.h" |
e78d8e51 | 36 | #include "optabs.h" |
d477d1fe | 37 | #include "libfuncs.h" |
18ca7dab RK |
38 | #include "hard-reg-set.h" |
39 | #include "insn-config.h" | |
1d974ca7 | 40 | #include "ggc.h" |
18ca7dab | 41 | #include "recog.h" |
a77a9a18 | 42 | #include "langhooks.h" |
1d636cc6 | 43 | #include "target.h" |
aacd3885 | 44 | #include "output.h" |
18ca7dab | 45 | |
502b8322 | 46 | static rtx break_out_memory_refs (rtx); |
7e4ce834 RH |
47 | |
48 | ||
49 | /* Truncate and perhaps sign-extend C as appropriate for MODE. */ | |
50 | ||
51 | HOST_WIDE_INT | |
502b8322 | 52 | trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) |
7e4ce834 RH |
53 | { |
54 | int width = GET_MODE_BITSIZE (mode); | |
55 | ||
71012d97 | 56 | /* You want to truncate to a _what_? */ |
5b0264cb | 57 | gcc_assert (SCALAR_INT_MODE_P (mode)); |
71012d97 | 58 | |
1f3f36d1 RH |
59 | /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ |
60 | if (mode == BImode) | |
61 | return c & 1 ? STORE_FLAG_VALUE : 0; | |
62 | ||
5b0d91c3 AO |
63 | /* Sign-extend for the requested mode. */ |
64 | ||
65 | if (width < HOST_BITS_PER_WIDE_INT) | |
66 | { | |
67 | HOST_WIDE_INT sign = 1; | |
68 | sign <<= width - 1; | |
69 | c &= (sign << 1) - 1; | |
70 | c ^= sign; | |
71 | c -= sign; | |
72 | } | |
7e4ce834 RH |
73 | |
74 | return c; | |
75 | } | |
76 | ||
3e95a7cb | 77 | /* Return an rtx for the sum of X and the integer C. */ |
18ca7dab RK |
78 | |
79 | rtx | |
3e95a7cb | 80 | plus_constant (rtx x, HOST_WIDE_INT c) |
18ca7dab | 81 | { |
b3694847 | 82 | RTX_CODE code; |
17ab7c59 | 83 | rtx y; |
b3694847 SS |
84 | enum machine_mode mode; |
85 | rtx tem; | |
18ca7dab RK |
86 | int all_constant = 0; |
87 | ||
88 | if (c == 0) | |
89 | return x; | |
90 | ||
91 | restart: | |
92 | ||
93 | code = GET_CODE (x); | |
94 | mode = GET_MODE (x); | |
17ab7c59 RK |
95 | y = x; |
96 | ||
18ca7dab RK |
97 | switch (code) |
98 | { | |
99 | case CONST_INT: | |
b1ec3c92 | 100 | return GEN_INT (INTVAL (x) + c); |
18ca7dab RK |
101 | |
102 | case CONST_DOUBLE: | |
103 | { | |
f9e158c3 | 104 | unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x); |
b1ec3c92 | 105 | HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x); |
f9e158c3 | 106 | unsigned HOST_WIDE_INT l2 = c; |
b1ec3c92 | 107 | HOST_WIDE_INT h2 = c < 0 ? ~0 : 0; |
f9e158c3 JM |
108 | unsigned HOST_WIDE_INT lv; |
109 | HOST_WIDE_INT hv; | |
18ca7dab RK |
110 | |
111 | add_double (l1, h1, l2, h2, &lv, &hv); | |
112 | ||
113 | return immed_double_const (lv, hv, VOIDmode); | |
114 | } | |
115 | ||
116 | case MEM: | |
117 | /* If this is a reference to the constant pool, try replacing it with | |
118 | a reference to a new constant. If the resulting address isn't | |
119 | valid, don't return it because we have no way to validize it. */ | |
120 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
121 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) | |
122 | { | |
123 | tem | |
124 | = force_const_mem (GET_MODE (x), | |
125 | plus_constant (get_pool_constant (XEXP (x, 0)), | |
126 | c)); | |
127 | if (memory_address_p (GET_MODE (tem), XEXP (tem, 0))) | |
128 | return tem; | |
129 | } | |
130 | break; | |
131 | ||
132 | case CONST: | |
133 | /* If adding to something entirely constant, set a flag | |
134 | so that we can add a CONST around the result. */ | |
135 | x = XEXP (x, 0); | |
136 | all_constant = 1; | |
137 | goto restart; | |
138 | ||
139 | case SYMBOL_REF: | |
140 | case LABEL_REF: | |
141 | all_constant = 1; | |
142 | break; | |
143 | ||
144 | case PLUS: | |
145 | /* The interesting case is adding the integer to a sum. | |
146 | Look for constant term in the sum and combine | |
147 | with C. For an integer constant term, we make a combined | |
148 | integer. For a constant term that is not an explicit integer, | |
d9b3eb63 | 149 | we cannot really combine, but group them together anyway. |
e5671f2b | 150 | |
03d937fc R |
151 | Restart or use a recursive call in case the remaining operand is |
152 | something that we handle specially, such as a SYMBOL_REF. | |
153 | ||
154 | We may not immediately return from the recursive call here, lest | |
155 | all_constant gets lost. */ | |
e5671f2b | 156 | |
481683e1 | 157 | if (CONST_INT_P (XEXP (x, 1))) |
03d937fc R |
158 | { |
159 | c += INTVAL (XEXP (x, 1)); | |
7e4ce834 RH |
160 | |
161 | if (GET_MODE (x) != VOIDmode) | |
162 | c = trunc_int_for_mode (c, GET_MODE (x)); | |
163 | ||
03d937fc R |
164 | x = XEXP (x, 0); |
165 | goto restart; | |
166 | } | |
b72f00af | 167 | else if (CONSTANT_P (XEXP (x, 1))) |
03d937fc | 168 | { |
b72f00af | 169 | x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c)); |
03d937fc R |
170 | c = 0; |
171 | } | |
b72f00af | 172 | else if (find_constant_term_loc (&y)) |
03d937fc | 173 | { |
b72f00af RK |
174 | /* We need to be careful since X may be shared and we can't |
175 | modify it in place. */ | |
176 | rtx copy = copy_rtx (x); | |
177 | rtx *const_loc = find_constant_term_loc (©); | |
178 | ||
179 | *const_loc = plus_constant (*const_loc, c); | |
180 | x = copy; | |
03d937fc R |
181 | c = 0; |
182 | } | |
38a448ca | 183 | break; |
ed8908e7 | 184 | |
38a448ca RH |
185 | default: |
186 | break; | |
18ca7dab RK |
187 | } |
188 | ||
189 | if (c != 0) | |
38a448ca | 190 | x = gen_rtx_PLUS (mode, x, GEN_INT (c)); |
18ca7dab RK |
191 | |
192 | if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) | |
193 | return x; | |
194 | else if (all_constant) | |
38a448ca | 195 | return gen_rtx_CONST (mode, x); |
18ca7dab RK |
196 | else |
197 | return x; | |
198 | } | |
18ca7dab RK |
199 | \f |
200 | /* If X is a sum, return a new sum like X but lacking any constant terms. | |
201 | Add all the removed constant terms into *CONSTPTR. | |
202 | X itself is not altered. The result != X if and only if | |
203 | it is not isomorphic to X. */ | |
204 | ||
205 | rtx | |
502b8322 | 206 | eliminate_constant_term (rtx x, rtx *constptr) |
18ca7dab | 207 | { |
b3694847 | 208 | rtx x0, x1; |
18ca7dab RK |
209 | rtx tem; |
210 | ||
211 | if (GET_CODE (x) != PLUS) | |
212 | return x; | |
213 | ||
214 | /* First handle constants appearing at this level explicitly. */ | |
481683e1 | 215 | if (CONST_INT_P (XEXP (x, 1)) |
18ca7dab RK |
216 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr, |
217 | XEXP (x, 1))) | |
481683e1 | 218 | && CONST_INT_P (tem)) |
18ca7dab RK |
219 | { |
220 | *constptr = tem; | |
221 | return eliminate_constant_term (XEXP (x, 0), constptr); | |
222 | } | |
223 | ||
224 | tem = const0_rtx; | |
225 | x0 = eliminate_constant_term (XEXP (x, 0), &tem); | |
226 | x1 = eliminate_constant_term (XEXP (x, 1), &tem); | |
227 | if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0)) | |
228 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), | |
229 | *constptr, tem)) | |
481683e1 | 230 | && CONST_INT_P (tem)) |
18ca7dab RK |
231 | { |
232 | *constptr = tem; | |
38a448ca | 233 | return gen_rtx_PLUS (GET_MODE (x), x0, x1); |
18ca7dab RK |
234 | } |
235 | ||
236 | return x; | |
237 | } | |
238 | ||
18ca7dab RK |
239 | /* Return an rtx for the size in bytes of the value of EXP. */ |
240 | ||
241 | rtx | |
502b8322 | 242 | expr_size (tree exp) |
18ca7dab | 243 | { |
d25cee4d RH |
244 | tree size; |
245 | ||
246 | if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
247 | size = TREE_OPERAND (exp, 1); | |
248 | else | |
26979bc2 | 249 | { |
71c00b5c | 250 | size = tree_expr_size (exp); |
26979bc2 | 251 | gcc_assert (size); |
2ec5deb5 | 252 | gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp)); |
26979bc2 | 253 | } |
99098c66 | 254 | |
49452c07 | 255 | return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL); |
18ca7dab | 256 | } |
de8920be JM |
257 | |
258 | /* Return a wide integer for the size in bytes of the value of EXP, or -1 | |
259 | if the size can vary or is larger than an integer. */ | |
260 | ||
261 | HOST_WIDE_INT | |
502b8322 | 262 | int_expr_size (tree exp) |
de8920be | 263 | { |
d25cee4d RH |
264 | tree size; |
265 | ||
266 | if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
267 | size = TREE_OPERAND (exp, 1); | |
268 | else | |
26979bc2 | 269 | { |
71c00b5c | 270 | size = tree_expr_size (exp); |
26979bc2 JH |
271 | gcc_assert (size); |
272 | } | |
d25cee4d RH |
273 | |
274 | if (size == 0 || !host_integerp (size, 0)) | |
de8920be JM |
275 | return -1; |
276 | ||
d25cee4d | 277 | return tree_low_cst (size, 0); |
de8920be | 278 | } |
18ca7dab RK |
279 | \f |
280 | /* Return a copy of X in which all memory references | |
281 | and all constants that involve symbol refs | |
282 | have been replaced with new temporary registers. | |
283 | Also emit code to load the memory locations and constants | |
284 | into those registers. | |
285 | ||
286 | If X contains no such constants or memory references, | |
287 | X itself (not a copy) is returned. | |
288 | ||
289 | If a constant is found in the address that is not a legitimate constant | |
290 | in an insn, it is left alone in the hope that it might be valid in the | |
291 | address. | |
292 | ||
293 | X may contain no arithmetic except addition, subtraction and multiplication. | |
294 | Values returned by expand_expr with 1 for sum_ok fit this constraint. */ | |
295 | ||
296 | static rtx | |
502b8322 | 297 | break_out_memory_refs (rtx x) |
18ca7dab | 298 | { |
3c0cb5de | 299 | if (MEM_P (x) |
cabeca29 | 300 | || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x) |
18ca7dab | 301 | && GET_MODE (x) != VOIDmode)) |
2cca6e3f | 302 | x = force_reg (GET_MODE (x), x); |
18ca7dab RK |
303 | else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS |
304 | || GET_CODE (x) == MULT) | |
305 | { | |
b3694847 SS |
306 | rtx op0 = break_out_memory_refs (XEXP (x, 0)); |
307 | rtx op1 = break_out_memory_refs (XEXP (x, 1)); | |
2cca6e3f | 308 | |
18ca7dab | 309 | if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
d4ebfa65 | 310 | x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1); |
18ca7dab | 311 | } |
2cca6e3f | 312 | |
18ca7dab RK |
313 | return x; |
314 | } | |
315 | ||
d4ebfa65 BE |
316 | /* Given X, a memory address in address space AS' pointer mode, convert it to |
317 | an address in the address space's address mode, or vice versa (TO_MODE says | |
318 | which way). We take advantage of the fact that pointers are not allowed to | |
319 | overflow by commuting arithmetic operations over conversions so that address | |
320 | arithmetic insns can be used. */ | |
ea534b63 | 321 | |
498b529f | 322 | rtx |
d4ebfa65 BE |
323 | convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED, |
324 | rtx x, addr_space_t as ATTRIBUTE_UNUSED) | |
ea534b63 | 325 | { |
5ae6cd0d | 326 | #ifndef POINTERS_EXTEND_UNSIGNED |
7c137931 | 327 | gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode); |
5ae6cd0d MM |
328 | return x; |
329 | #else /* defined(POINTERS_EXTEND_UNSIGNED) */ | |
d4ebfa65 | 330 | enum machine_mode pointer_mode, address_mode, from_mode; |
498b529f | 331 | rtx temp; |
aa0f70e6 | 332 | enum rtx_code code; |
498b529f | 333 | |
5ae6cd0d MM |
334 | /* If X already has the right mode, just return it. */ |
335 | if (GET_MODE (x) == to_mode) | |
336 | return x; | |
337 | ||
d4ebfa65 BE |
338 | pointer_mode = targetm.addr_space.pointer_mode (as); |
339 | address_mode = targetm.addr_space.address_mode (as); | |
340 | from_mode = to_mode == pointer_mode ? address_mode : pointer_mode; | |
5ae6cd0d | 341 | |
0b04ec8c RK |
342 | /* Here we handle some special cases. If none of them apply, fall through |
343 | to the default case. */ | |
ea534b63 RK |
344 | switch (GET_CODE (x)) |
345 | { | |
346 | case CONST_INT: | |
347 | case CONST_DOUBLE: | |
aa0f70e6 SE |
348 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)) |
349 | code = TRUNCATE; | |
350 | else if (POINTERS_EXTEND_UNSIGNED < 0) | |
351 | break; | |
352 | else if (POINTERS_EXTEND_UNSIGNED > 0) | |
353 | code = ZERO_EXTEND; | |
354 | else | |
355 | code = SIGN_EXTEND; | |
356 | temp = simplify_unary_operation (code, to_mode, x, from_mode); | |
357 | if (temp) | |
358 | return temp; | |
359 | break; | |
498b529f | 360 | |
d1405722 | 361 | case SUBREG: |
5da4f548 | 362 | if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x))) |
6dd12198 | 363 | && GET_MODE (SUBREG_REG (x)) == to_mode) |
d1405722 RK |
364 | return SUBREG_REG (x); |
365 | break; | |
366 | ||
ea534b63 | 367 | case LABEL_REF: |
5da4f548 SE |
368 | temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0)); |
369 | LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); | |
370 | return temp; | |
6dd12198 | 371 | break; |
498b529f | 372 | |
ea534b63 | 373 | case SYMBOL_REF: |
ce02ba25 EC |
374 | temp = shallow_copy_rtx (x); |
375 | PUT_MODE (temp, to_mode); | |
5da4f548 | 376 | return temp; |
6dd12198 | 377 | break; |
ea534b63 | 378 | |
498b529f | 379 | case CONST: |
5da4f548 | 380 | return gen_rtx_CONST (to_mode, |
d4ebfa65 BE |
381 | convert_memory_address_addr_space |
382 | (to_mode, XEXP (x, 0), as)); | |
6dd12198 | 383 | break; |
ea534b63 | 384 | |
0b04ec8c RK |
385 | case PLUS: |
386 | case MULT: | |
aa0f70e6 SE |
387 | /* For addition we can safely permute the conversion and addition |
388 | operation if one operand is a constant and converting the constant | |
17939c98 SE |
389 | does not change it or if one operand is a constant and we are |
390 | using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0). | |
391 | We can always safely permute them if we are making the address | |
392 | narrower. */ | |
aa0f70e6 SE |
393 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode) |
394 | || (GET_CODE (x) == PLUS | |
481683e1 | 395 | && CONST_INT_P (XEXP (x, 1)) |
d4ebfa65 BE |
396 | && (XEXP (x, 1) == convert_memory_address_addr_space |
397 | (to_mode, XEXP (x, 1), as) | |
17939c98 | 398 | || POINTERS_EXTEND_UNSIGNED < 0))) |
d9b3eb63 | 399 | return gen_rtx_fmt_ee (GET_CODE (x), to_mode, |
d4ebfa65 BE |
400 | convert_memory_address_addr_space |
401 | (to_mode, XEXP (x, 0), as), | |
aa0f70e6 | 402 | XEXP (x, 1)); |
38a448ca | 403 | break; |
d9b3eb63 | 404 | |
38a448ca RH |
405 | default: |
406 | break; | |
ea534b63 | 407 | } |
0b04ec8c RK |
408 | |
409 | return convert_modes (to_mode, from_mode, | |
410 | x, POINTERS_EXTEND_UNSIGNED); | |
5ae6cd0d | 411 | #endif /* defined(POINTERS_EXTEND_UNSIGNED) */ |
ea534b63 | 412 | } |
18ca7dab | 413 | \f |
09e881c9 BE |
414 | /* Return something equivalent to X but valid as a memory address for something |
415 | of mode MODE in the named address space AS. When X is not itself valid, | |
416 | this works by copying X or subexpressions of it into registers. */ | |
18ca7dab RK |
417 | |
418 | rtx | |
09e881c9 | 419 | memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as) |
18ca7dab | 420 | { |
b3694847 | 421 | rtx oldx = x; |
d4ebfa65 | 422 | enum machine_mode address_mode = targetm.addr_space.address_mode (as); |
18ca7dab | 423 | |
d4ebfa65 | 424 | x = convert_memory_address_addr_space (address_mode, x, as); |
ea534b63 | 425 | |
ba228239 | 426 | /* By passing constant addresses through registers |
18ca7dab | 427 | we get a chance to cse them. */ |
cabeca29 | 428 | if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)) |
d4ebfa65 | 429 | x = force_reg (address_mode, x); |
18ca7dab | 430 | |
18ca7dab RK |
431 | /* We get better cse by rejecting indirect addressing at this stage. |
432 | Let the combiner create indirect addresses where appropriate. | |
433 | For now, generate the code so that the subexpressions useful to share | |
434 | are visible. But not if cse won't be done! */ | |
18b9ca6f | 435 | else |
18ca7dab | 436 | { |
f8cfc6aa | 437 | if (! cse_not_expected && !REG_P (x)) |
18b9ca6f RK |
438 | x = break_out_memory_refs (x); |
439 | ||
440 | /* At this point, any valid address is accepted. */ | |
09e881c9 | 441 | if (memory_address_addr_space_p (mode, x, as)) |
3de5e93a | 442 | goto done; |
18b9ca6f RK |
443 | |
444 | /* If it was valid before but breaking out memory refs invalidated it, | |
445 | use it the old way. */ | |
09e881c9 | 446 | if (memory_address_addr_space_p (mode, oldx, as)) |
3de5e93a SB |
447 | { |
448 | x = oldx; | |
449 | goto done; | |
450 | } | |
18b9ca6f RK |
451 | |
452 | /* Perform machine-dependent transformations on X | |
453 | in certain cases. This is not necessary since the code | |
454 | below can handle all possible cases, but machine-dependent | |
455 | transformations can make better code. */ | |
506d7b68 | 456 | { |
09e881c9 BE |
457 | rtx orig_x = x; |
458 | x = targetm.addr_space.legitimize_address (x, oldx, mode, as); | |
459 | if (orig_x != x && memory_address_addr_space_p (mode, x, as)) | |
506d7b68 PB |
460 | goto done; |
461 | } | |
18b9ca6f RK |
462 | |
463 | /* PLUS and MULT can appear in special ways | |
464 | as the result of attempts to make an address usable for indexing. | |
465 | Usually they are dealt with by calling force_operand, below. | |
466 | But a sum containing constant terms is special | |
467 | if removing them makes the sum a valid address: | |
468 | then we generate that address in a register | |
469 | and index off of it. We do this because it often makes | |
470 | shorter code, and because the addresses thus generated | |
471 | in registers often become common subexpressions. */ | |
472 | if (GET_CODE (x) == PLUS) | |
473 | { | |
474 | rtx constant_term = const0_rtx; | |
475 | rtx y = eliminate_constant_term (x, &constant_term); | |
476 | if (constant_term == const0_rtx | |
09e881c9 | 477 | || ! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
478 | x = force_operand (x, NULL_RTX); |
479 | else | |
480 | { | |
38a448ca | 481 | y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term); |
09e881c9 | 482 | if (! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
483 | x = force_operand (x, NULL_RTX); |
484 | else | |
485 | x = y; | |
486 | } | |
487 | } | |
18ca7dab | 488 | |
e475ed2a | 489 | else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS) |
18b9ca6f | 490 | x = force_operand (x, NULL_RTX); |
18ca7dab | 491 | |
18b9ca6f RK |
492 | /* If we have a register that's an invalid address, |
493 | it must be a hard reg of the wrong class. Copy it to a pseudo. */ | |
f8cfc6aa | 494 | else if (REG_P (x)) |
18b9ca6f RK |
495 | x = copy_to_reg (x); |
496 | ||
497 | /* Last resort: copy the value to a register, since | |
498 | the register is a valid address. */ | |
499 | else | |
d4ebfa65 | 500 | x = force_reg (address_mode, x); |
18ca7dab | 501 | } |
18b9ca6f RK |
502 | |
503 | done: | |
504 | ||
09e881c9 | 505 | gcc_assert (memory_address_addr_space_p (mode, x, as)); |
2cca6e3f RK |
506 | /* If we didn't change the address, we are done. Otherwise, mark |
507 | a reg as a pointer if we have REG or REG + CONST_INT. */ | |
508 | if (oldx == x) | |
509 | return x; | |
f8cfc6aa | 510 | else if (REG_P (x)) |
bdb429a5 | 511 | mark_reg_pointer (x, BITS_PER_UNIT); |
2cca6e3f | 512 | else if (GET_CODE (x) == PLUS |
f8cfc6aa | 513 | && REG_P (XEXP (x, 0)) |
481683e1 | 514 | && CONST_INT_P (XEXP (x, 1))) |
bdb429a5 | 515 | mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT); |
2cca6e3f | 516 | |
18b9ca6f RK |
517 | /* OLDX may have been the address on a temporary. Update the address |
518 | to indicate that X is now used. */ | |
519 | update_temp_slot_address (oldx, x); | |
520 | ||
18ca7dab RK |
521 | return x; |
522 | } | |
523 | ||
18ca7dab RK |
524 | /* Convert a mem ref into one with a valid memory address. |
525 | Pass through anything else unchanged. */ | |
526 | ||
527 | rtx | |
502b8322 | 528 | validize_mem (rtx ref) |
18ca7dab | 529 | { |
3c0cb5de | 530 | if (!MEM_P (ref)) |
18ca7dab | 531 | return ref; |
aacd3885 | 532 | ref = use_anchored_address (ref); |
09e881c9 BE |
533 | if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0), |
534 | MEM_ADDR_SPACE (ref))) | |
18ca7dab | 535 | return ref; |
792760b9 | 536 | |
18ca7dab | 537 | /* Don't alter REF itself, since that is probably a stack slot. */ |
792760b9 | 538 | return replace_equiv_address (ref, XEXP (ref, 0)); |
18ca7dab | 539 | } |
aacd3885 RS |
540 | |
541 | /* If X is a memory reference to a member of an object block, try rewriting | |
542 | it to use an anchor instead. Return the new memory reference on success | |
543 | and the old one on failure. */ | |
544 | ||
545 | rtx | |
546 | use_anchored_address (rtx x) | |
547 | { | |
548 | rtx base; | |
549 | HOST_WIDE_INT offset; | |
550 | ||
551 | if (!flag_section_anchors) | |
552 | return x; | |
553 | ||
554 | if (!MEM_P (x)) | |
555 | return x; | |
556 | ||
557 | /* Split the address into a base and offset. */ | |
558 | base = XEXP (x, 0); | |
559 | offset = 0; | |
560 | if (GET_CODE (base) == CONST | |
561 | && GET_CODE (XEXP (base, 0)) == PLUS | |
481683e1 | 562 | && CONST_INT_P (XEXP (XEXP (base, 0), 1))) |
aacd3885 RS |
563 | { |
564 | offset += INTVAL (XEXP (XEXP (base, 0), 1)); | |
565 | base = XEXP (XEXP (base, 0), 0); | |
566 | } | |
567 | ||
568 | /* Check whether BASE is suitable for anchors. */ | |
569 | if (GET_CODE (base) != SYMBOL_REF | |
3fa9c136 | 570 | || !SYMBOL_REF_HAS_BLOCK_INFO_P (base) |
aacd3885 | 571 | || SYMBOL_REF_ANCHOR_P (base) |
434aeebb | 572 | || SYMBOL_REF_BLOCK (base) == NULL |
aacd3885 RS |
573 | || !targetm.use_anchors_for_symbol_p (base)) |
574 | return x; | |
575 | ||
576 | /* Decide where BASE is going to be. */ | |
577 | place_block_symbol (base); | |
578 | ||
579 | /* Get the anchor we need to use. */ | |
580 | offset += SYMBOL_REF_BLOCK_OFFSET (base); | |
581 | base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset, | |
582 | SYMBOL_REF_TLS_MODEL (base)); | |
583 | ||
584 | /* Work out the offset from the anchor. */ | |
585 | offset -= SYMBOL_REF_BLOCK_OFFSET (base); | |
586 | ||
587 | /* If we're going to run a CSE pass, force the anchor into a register. | |
588 | We will then be able to reuse registers for several accesses, if the | |
589 | target costs say that that's worthwhile. */ | |
590 | if (!cse_not_expected) | |
591 | base = force_reg (GET_MODE (base), base); | |
592 | ||
593 | return replace_equiv_address (x, plus_constant (base, offset)); | |
594 | } | |
18ca7dab | 595 | \f |
18ca7dab RK |
596 | /* Copy the value or contents of X to a new temp reg and return that reg. */ |
597 | ||
598 | rtx | |
502b8322 | 599 | copy_to_reg (rtx x) |
18ca7dab | 600 | { |
b3694847 | 601 | rtx temp = gen_reg_rtx (GET_MODE (x)); |
d9b3eb63 | 602 | |
18ca7dab | 603 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 604 | do the computation. */ |
18ca7dab RK |
605 | if (! general_operand (x, VOIDmode)) |
606 | x = force_operand (x, temp); | |
d9b3eb63 | 607 | |
18ca7dab RK |
608 | if (x != temp) |
609 | emit_move_insn (temp, x); | |
610 | ||
611 | return temp; | |
612 | } | |
613 | ||
614 | /* Like copy_to_reg but always give the new register mode Pmode | |
615 | in case X is a constant. */ | |
616 | ||
617 | rtx | |
502b8322 | 618 | copy_addr_to_reg (rtx x) |
18ca7dab RK |
619 | { |
620 | return copy_to_mode_reg (Pmode, x); | |
621 | } | |
622 | ||
623 | /* Like copy_to_reg but always give the new register mode MODE | |
624 | in case X is a constant. */ | |
625 | ||
626 | rtx | |
502b8322 | 627 | copy_to_mode_reg (enum machine_mode mode, rtx x) |
18ca7dab | 628 | { |
b3694847 | 629 | rtx temp = gen_reg_rtx (mode); |
d9b3eb63 | 630 | |
18ca7dab | 631 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 632 | do the computation. */ |
18ca7dab RK |
633 | if (! general_operand (x, VOIDmode)) |
634 | x = force_operand (x, temp); | |
635 | ||
5b0264cb | 636 | gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); |
18ca7dab RK |
637 | if (x != temp) |
638 | emit_move_insn (temp, x); | |
639 | return temp; | |
640 | } | |
641 | ||
642 | /* Load X into a register if it is not already one. | |
643 | Use mode MODE for the register. | |
644 | X should be valid for mode MODE, but it may be a constant which | |
645 | is valid for all integer modes; that's why caller must specify MODE. | |
646 | ||
647 | The caller must not alter the value in the register we return, | |
648 | since we mark it as a "constant" register. */ | |
649 | ||
650 | rtx | |
502b8322 | 651 | force_reg (enum machine_mode mode, rtx x) |
18ca7dab | 652 | { |
b3694847 | 653 | rtx temp, insn, set; |
18ca7dab | 654 | |
f8cfc6aa | 655 | if (REG_P (x)) |
18ca7dab | 656 | return x; |
d9b3eb63 | 657 | |
e3c8ea67 RH |
658 | if (general_operand (x, mode)) |
659 | { | |
660 | temp = gen_reg_rtx (mode); | |
661 | insn = emit_move_insn (temp, x); | |
662 | } | |
663 | else | |
664 | { | |
665 | temp = force_operand (x, NULL_RTX); | |
f8cfc6aa | 666 | if (REG_P (temp)) |
e3c8ea67 RH |
667 | insn = get_last_insn (); |
668 | else | |
669 | { | |
670 | rtx temp2 = gen_reg_rtx (mode); | |
671 | insn = emit_move_insn (temp2, temp); | |
672 | temp = temp2; | |
673 | } | |
674 | } | |
62874575 | 675 | |
18ca7dab | 676 | /* Let optimizers know that TEMP's value never changes |
62874575 RK |
677 | and that X can be substituted for it. Don't get confused |
678 | if INSN set something else (such as a SUBREG of TEMP). */ | |
679 | if (CONSTANT_P (x) | |
680 | && (set = single_set (insn)) != 0 | |
fd7acc30 RS |
681 | && SET_DEST (set) == temp |
682 | && ! rtx_equal_p (x, SET_SRC (set))) | |
3d238248 | 683 | set_unique_reg_note (insn, REG_EQUAL, x); |
e3c8ea67 | 684 | |
4a4f95d9 RH |
685 | /* Let optimizers know that TEMP is a pointer, and if so, the |
686 | known alignment of that pointer. */ | |
687 | { | |
688 | unsigned align = 0; | |
689 | if (GET_CODE (x) == SYMBOL_REF) | |
690 | { | |
691 | align = BITS_PER_UNIT; | |
692 | if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x))) | |
693 | align = DECL_ALIGN (SYMBOL_REF_DECL (x)); | |
694 | } | |
695 | else if (GET_CODE (x) == LABEL_REF) | |
696 | align = BITS_PER_UNIT; | |
697 | else if (GET_CODE (x) == CONST | |
698 | && GET_CODE (XEXP (x, 0)) == PLUS | |
699 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF | |
481683e1 | 700 | && CONST_INT_P (XEXP (XEXP (x, 0), 1))) |
4a4f95d9 RH |
701 | { |
702 | rtx s = XEXP (XEXP (x, 0), 0); | |
703 | rtx c = XEXP (XEXP (x, 0), 1); | |
704 | unsigned sa, ca; | |
705 | ||
706 | sa = BITS_PER_UNIT; | |
707 | if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s))) | |
708 | sa = DECL_ALIGN (SYMBOL_REF_DECL (s)); | |
709 | ||
bd95721f RH |
710 | if (INTVAL (c) == 0) |
711 | align = sa; | |
712 | else | |
713 | { | |
714 | ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT; | |
715 | align = MIN (sa, ca); | |
716 | } | |
4a4f95d9 RH |
717 | } |
718 | ||
0a317111 | 719 | if (align || (MEM_P (x) && MEM_POINTER (x))) |
4a4f95d9 RH |
720 | mark_reg_pointer (temp, align); |
721 | } | |
722 | ||
18ca7dab RK |
723 | return temp; |
724 | } | |
725 | ||
726 | /* If X is a memory ref, copy its contents to a new temp reg and return | |
727 | that reg. Otherwise, return X. */ | |
728 | ||
729 | rtx | |
502b8322 | 730 | force_not_mem (rtx x) |
18ca7dab | 731 | { |
b3694847 | 732 | rtx temp; |
fe3439b0 | 733 | |
3c0cb5de | 734 | if (!MEM_P (x) || GET_MODE (x) == BLKmode) |
18ca7dab | 735 | return x; |
fe3439b0 | 736 | |
18ca7dab | 737 | temp = gen_reg_rtx (GET_MODE (x)); |
f8ad8d7c ZD |
738 | |
739 | if (MEM_POINTER (x)) | |
740 | REG_POINTER (temp) = 1; | |
741 | ||
18ca7dab RK |
742 | emit_move_insn (temp, x); |
743 | return temp; | |
744 | } | |
745 | ||
746 | /* Copy X to TARGET (if it's nonzero and a reg) | |
747 | or to a new temp reg and return that reg. | |
748 | MODE is the mode to use for X in case it is a constant. */ | |
749 | ||
750 | rtx | |
502b8322 | 751 | copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode) |
18ca7dab | 752 | { |
b3694847 | 753 | rtx temp; |
18ca7dab | 754 | |
f8cfc6aa | 755 | if (target && REG_P (target)) |
18ca7dab RK |
756 | temp = target; |
757 | else | |
758 | temp = gen_reg_rtx (mode); | |
759 | ||
760 | emit_move_insn (temp, x); | |
761 | return temp; | |
762 | } | |
763 | \f | |
cde0f3fd | 764 | /* Return the mode to use to pass or return a scalar of TYPE and MODE. |
9ff65789 RK |
765 | PUNSIGNEDP points to the signedness of the type and may be adjusted |
766 | to show what signedness to use on extension operations. | |
767 | ||
cde0f3fd PB |
768 | FOR_RETURN is nonzero if the caller is promoting the return value |
769 | of FNDECL, else it is for promoting args. */ | |
9ff65789 | 770 | |
cde0f3fd PB |
771 | enum machine_mode |
772 | promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp, | |
773 | const_tree funtype, int for_return) | |
774 | { | |
cde0f3fd PB |
775 | switch (TREE_CODE (type)) |
776 | { | |
777 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: | |
778 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: | |
779 | case POINTER_TYPE: case REFERENCE_TYPE: | |
780 | return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype, | |
781 | for_return); | |
782 | ||
783 | default: | |
784 | return mode; | |
785 | } | |
786 | } | |
787 | /* Return the mode to use to store a scalar of TYPE and MODE. | |
788 | PUNSIGNEDP points to the signedness of the type and may be adjusted | |
789 | to show what signedness to use on extension operations. */ | |
d4453b7a | 790 | |
9ff65789 | 791 | enum machine_mode |
b1680483 AK |
792 | promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode, |
793 | int *punsignedp ATTRIBUTE_UNUSED) | |
9ff65789 | 794 | { |
cde0f3fd PB |
795 | /* FIXME: this is the same logic that was there until GCC 4.4, but we |
796 | probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE | |
797 | is not defined. The affected targets are M32C, S390, SPARC. */ | |
798 | #ifdef PROMOTE_MODE | |
586de218 | 799 | const enum tree_code code = TREE_CODE (type); |
9ff65789 RK |
800 | int unsignedp = *punsignedp; |
801 | ||
9ff65789 RK |
802 | switch (code) |
803 | { | |
9ff65789 | 804 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
325217ed | 805 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: |
cde0f3fd PB |
806 | PROMOTE_MODE (mode, unsignedp, type); |
807 | *punsignedp = unsignedp; | |
808 | return mode; | |
9ff65789 | 809 | break; |
9ff65789 | 810 | |
ea534b63 | 811 | #ifdef POINTERS_EXTEND_UNSIGNED |
56a4c9e2 | 812 | case REFERENCE_TYPE: |
9ff65789 | 813 | case POINTER_TYPE: |
cde0f3fd | 814 | *punsignedp = POINTERS_EXTEND_UNSIGNED; |
d4ebfa65 BE |
815 | return targetm.addr_space.address_mode |
816 | (TYPE_ADDR_SPACE (TREE_TYPE (type))); | |
9ff65789 | 817 | break; |
ea534b63 | 818 | #endif |
d9b3eb63 | 819 | |
38a448ca | 820 | default: |
cde0f3fd | 821 | return mode; |
9ff65789 | 822 | } |
cde0f3fd | 823 | #else |
9ff65789 | 824 | return mode; |
cde0f3fd | 825 | #endif |
9ff65789 | 826 | } |
cde0f3fd PB |
827 | |
828 | ||
829 | /* Use one of promote_mode or promote_function_mode to find the promoted | |
830 | mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness | |
831 | of DECL after promotion. */ | |
832 | ||
833 | enum machine_mode | |
834 | promote_decl_mode (const_tree decl, int *punsignedp) | |
835 | { | |
836 | tree type = TREE_TYPE (decl); | |
837 | int unsignedp = TYPE_UNSIGNED (type); | |
838 | enum machine_mode mode = DECL_MODE (decl); | |
839 | enum machine_mode pmode; | |
840 | ||
666e3ceb PB |
841 | if (TREE_CODE (decl) == RESULT_DECL |
842 | || TREE_CODE (decl) == PARM_DECL) | |
cde0f3fd | 843 | pmode = promote_function_mode (type, mode, &unsignedp, |
666e3ceb | 844 | TREE_TYPE (current_function_decl), 2); |
cde0f3fd PB |
845 | else |
846 | pmode = promote_mode (type, mode, &unsignedp); | |
847 | ||
848 | if (punsignedp) | |
849 | *punsignedp = unsignedp; | |
850 | return pmode; | |
851 | } | |
852 | ||
9ff65789 | 853 | \f |
18ca7dab RK |
854 | /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). |
855 | This pops when ADJUST is positive. ADJUST need not be constant. */ | |
856 | ||
857 | void | |
502b8322 | 858 | adjust_stack (rtx adjust) |
18ca7dab RK |
859 | { |
860 | rtx temp; | |
18ca7dab RK |
861 | |
862 | if (adjust == const0_rtx) | |
863 | return; | |
864 | ||
1503a7ec JH |
865 | /* We expect all variable sized adjustments to be multiple of |
866 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 867 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
868 | stack_pointer_delta -= INTVAL (adjust); |
869 | ||
18ca7dab RK |
870 | temp = expand_binop (Pmode, |
871 | #ifdef STACK_GROWS_DOWNWARD | |
872 | add_optab, | |
873 | #else | |
874 | sub_optab, | |
875 | #endif | |
876 | stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
877 | OPTAB_LIB_WIDEN); | |
878 | ||
879 | if (temp != stack_pointer_rtx) | |
880 | emit_move_insn (stack_pointer_rtx, temp); | |
881 | } | |
882 | ||
883 | /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). | |
884 | This pushes when ADJUST is positive. ADJUST need not be constant. */ | |
885 | ||
886 | void | |
502b8322 | 887 | anti_adjust_stack (rtx adjust) |
18ca7dab RK |
888 | { |
889 | rtx temp; | |
18ca7dab RK |
890 | |
891 | if (adjust == const0_rtx) | |
892 | return; | |
893 | ||
1503a7ec JH |
894 | /* We expect all variable sized adjustments to be multiple of |
895 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 896 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
897 | stack_pointer_delta += INTVAL (adjust); |
898 | ||
18ca7dab RK |
899 | temp = expand_binop (Pmode, |
900 | #ifdef STACK_GROWS_DOWNWARD | |
901 | sub_optab, | |
902 | #else | |
903 | add_optab, | |
904 | #endif | |
905 | stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
906 | OPTAB_LIB_WIDEN); | |
907 | ||
908 | if (temp != stack_pointer_rtx) | |
909 | emit_move_insn (stack_pointer_rtx, temp); | |
910 | } | |
911 | ||
912 | /* Round the size of a block to be pushed up to the boundary required | |
913 | by this machine. SIZE is the desired size, which need not be constant. */ | |
914 | ||
4dd9b044 | 915 | static rtx |
502b8322 | 916 | round_push (rtx size) |
18ca7dab | 917 | { |
32990d5b | 918 | rtx align_rtx, alignm1_rtx; |
41ee3204 | 919 | |
32990d5b JJ |
920 | if (!SUPPORTS_STACK_ALIGNMENT |
921 | || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT) | |
18ca7dab | 922 | { |
32990d5b JJ |
923 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
924 | ||
925 | if (align == 1) | |
926 | return size; | |
927 | ||
928 | if (CONST_INT_P (size)) | |
929 | { | |
930 | HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align; | |
41ee3204 | 931 | |
32990d5b JJ |
932 | if (INTVAL (size) != new_size) |
933 | size = GEN_INT (new_size); | |
934 | return size; | |
935 | } | |
936 | ||
937 | align_rtx = GEN_INT (align); | |
938 | alignm1_rtx = GEN_INT (align - 1); | |
18ca7dab RK |
939 | } |
940 | else | |
941 | { | |
32990d5b JJ |
942 | /* If crtl->preferred_stack_boundary might still grow, use |
943 | virtual_preferred_stack_boundary_rtx instead. This will be | |
944 | substituted by the right value in vregs pass and optimized | |
945 | during combine. */ | |
946 | align_rtx = virtual_preferred_stack_boundary_rtx; | |
947 | alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX); | |
18ca7dab | 948 | } |
41ee3204 | 949 | |
32990d5b JJ |
950 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
951 | but we know it can't. So add ourselves and then do | |
952 | TRUNC_DIV_EXPR. */ | |
953 | size = expand_binop (Pmode, add_optab, size, alignm1_rtx, | |
954 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
955 | size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx, | |
956 | NULL_RTX, 1); | |
957 | size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1); | |
958 | ||
18ca7dab RK |
959 | return size; |
960 | } | |
961 | \f | |
59257ff7 RK |
962 | /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer |
963 | to a previously-created save area. If no save area has been allocated, | |
964 | this function will allocate one. If a save area is specified, it | |
965 | must be of the proper mode. | |
966 | ||
967 | The insns are emitted after insn AFTER, if nonzero, otherwise the insns | |
968 | are emitted at the current position. */ | |
969 | ||
970 | void | |
502b8322 | 971 | emit_stack_save (enum save_level save_level, rtx *psave, rtx after) |
59257ff7 RK |
972 | { |
973 | rtx sa = *psave; | |
974 | /* The default is that we use a move insn and save in a Pmode object. */ | |
502b8322 | 975 | rtx (*fcn) (rtx, rtx) = gen_move_insn; |
a260abc9 | 976 | enum machine_mode mode = STACK_SAVEAREA_MODE (save_level); |
59257ff7 RK |
977 | |
978 | /* See if this machine has anything special to do for this kind of save. */ | |
979 | switch (save_level) | |
980 | { | |
981 | #ifdef HAVE_save_stack_block | |
982 | case SAVE_BLOCK: | |
983 | if (HAVE_save_stack_block) | |
a260abc9 | 984 | fcn = gen_save_stack_block; |
59257ff7 RK |
985 | break; |
986 | #endif | |
987 | #ifdef HAVE_save_stack_function | |
988 | case SAVE_FUNCTION: | |
989 | if (HAVE_save_stack_function) | |
a260abc9 | 990 | fcn = gen_save_stack_function; |
59257ff7 RK |
991 | break; |
992 | #endif | |
993 | #ifdef HAVE_save_stack_nonlocal | |
994 | case SAVE_NONLOCAL: | |
995 | if (HAVE_save_stack_nonlocal) | |
a260abc9 | 996 | fcn = gen_save_stack_nonlocal; |
59257ff7 RK |
997 | break; |
998 | #endif | |
38a448ca RH |
999 | default: |
1000 | break; | |
59257ff7 RK |
1001 | } |
1002 | ||
1003 | /* If there is no save area and we have to allocate one, do so. Otherwise | |
1004 | verify the save area is the proper mode. */ | |
1005 | ||
1006 | if (sa == 0) | |
1007 | { | |
1008 | if (mode != VOIDmode) | |
1009 | { | |
1010 | if (save_level == SAVE_NONLOCAL) | |
1011 | *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0); | |
1012 | else | |
1013 | *psave = sa = gen_reg_rtx (mode); | |
1014 | } | |
1015 | } | |
59257ff7 RK |
1016 | |
1017 | if (after) | |
700f6f98 RK |
1018 | { |
1019 | rtx seq; | |
1020 | ||
1021 | start_sequence (); | |
a494ed43 | 1022 | do_pending_stack_adjust (); |
5460015d JW |
1023 | /* We must validize inside the sequence, to ensure that any instructions |
1024 | created by the validize call also get moved to the right place. */ | |
1025 | if (sa != 0) | |
1026 | sa = validize_mem (sa); | |
d072107f | 1027 | emit_insn (fcn (sa, stack_pointer_rtx)); |
2f937369 | 1028 | seq = get_insns (); |
700f6f98 RK |
1029 | end_sequence (); |
1030 | emit_insn_after (seq, after); | |
1031 | } | |
59257ff7 | 1032 | else |
5460015d | 1033 | { |
a494ed43 | 1034 | do_pending_stack_adjust (); |
5460015d JW |
1035 | if (sa != 0) |
1036 | sa = validize_mem (sa); | |
1037 | emit_insn (fcn (sa, stack_pointer_rtx)); | |
1038 | } | |
59257ff7 RK |
1039 | } |
1040 | ||
1041 | /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save | |
d9b3eb63 | 1042 | area made by emit_stack_save. If it is zero, we have nothing to do. |
59257ff7 | 1043 | |
d9b3eb63 | 1044 | Put any emitted insns after insn AFTER, if nonzero, otherwise at |
59257ff7 RK |
1045 | current position. */ |
1046 | ||
1047 | void | |
502b8322 | 1048 | emit_stack_restore (enum save_level save_level, rtx sa, rtx after) |
59257ff7 RK |
1049 | { |
1050 | /* The default is that we use a move insn. */ | |
502b8322 | 1051 | rtx (*fcn) (rtx, rtx) = gen_move_insn; |
59257ff7 RK |
1052 | |
1053 | /* See if this machine has anything special to do for this kind of save. */ | |
1054 | switch (save_level) | |
1055 | { | |
1056 | #ifdef HAVE_restore_stack_block | |
1057 | case SAVE_BLOCK: | |
1058 | if (HAVE_restore_stack_block) | |
1059 | fcn = gen_restore_stack_block; | |
1060 | break; | |
1061 | #endif | |
1062 | #ifdef HAVE_restore_stack_function | |
1063 | case SAVE_FUNCTION: | |
1064 | if (HAVE_restore_stack_function) | |
1065 | fcn = gen_restore_stack_function; | |
1066 | break; | |
1067 | #endif | |
1068 | #ifdef HAVE_restore_stack_nonlocal | |
59257ff7 RK |
1069 | case SAVE_NONLOCAL: |
1070 | if (HAVE_restore_stack_nonlocal) | |
1071 | fcn = gen_restore_stack_nonlocal; | |
1072 | break; | |
1073 | #endif | |
38a448ca RH |
1074 | default: |
1075 | break; | |
59257ff7 RK |
1076 | } |
1077 | ||
d072107f | 1078 | if (sa != 0) |
260f91c2 DJ |
1079 | { |
1080 | sa = validize_mem (sa); | |
1081 | /* These clobbers prevent the scheduler from moving | |
1082 | references to variable arrays below the code | |
4b7e68e7 | 1083 | that deletes (pops) the arrays. */ |
c41c1387 RS |
1084 | emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); |
1085 | emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx)); | |
260f91c2 | 1086 | } |
d072107f | 1087 | |
a494ed43 EB |
1088 | discard_pending_stack_adjust (); |
1089 | ||
59257ff7 | 1090 | if (after) |
700f6f98 RK |
1091 | { |
1092 | rtx seq; | |
1093 | ||
1094 | start_sequence (); | |
d072107f | 1095 | emit_insn (fcn (stack_pointer_rtx, sa)); |
2f937369 | 1096 | seq = get_insns (); |
700f6f98 RK |
1097 | end_sequence (); |
1098 | emit_insn_after (seq, after); | |
1099 | } | |
59257ff7 | 1100 | else |
d072107f | 1101 | emit_insn (fcn (stack_pointer_rtx, sa)); |
59257ff7 | 1102 | } |
6de9cd9a DN |
1103 | |
1104 | /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current | |
1105 | function. This function should be called whenever we allocate or | |
1106 | deallocate dynamic stack space. */ | |
1107 | ||
1108 | void | |
1109 | update_nonlocal_goto_save_area (void) | |
1110 | { | |
1111 | tree t_save; | |
1112 | rtx r_save; | |
1113 | ||
1114 | /* The nonlocal_goto_save_area object is an array of N pointers. The | |
1115 | first one is used for the frame pointer save; the rest are sized by | |
1116 | STACK_SAVEAREA_MODE. Create a reference to array index 1, the first | |
1117 | of the stack save area slots. */ | |
3244e67d RS |
1118 | t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area, |
1119 | integer_one_node, NULL_TREE, NULL_TREE); | |
6de9cd9a DN |
1120 | r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); |
1121 | ||
1122 | emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX); | |
1123 | } | |
59257ff7 | 1124 | \f |
18ca7dab | 1125 | /* Return an rtx representing the address of an area of memory dynamically |
3a42502d | 1126 | pushed on the stack. |
18ca7dab RK |
1127 | |
1128 | Any required stack pointer alignment is preserved. | |
1129 | ||
1130 | SIZE is an rtx representing the size of the area. | |
091ad0b9 | 1131 | |
3a42502d RH |
1132 | SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This |
1133 | parameter may be zero. If so, a proper value will be extracted | |
1134 | from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed. | |
1135 | ||
1136 | REQUIRED_ALIGN is the alignment (in bits) required for the region | |
1137 | of memory. | |
d3c12306 EB |
1138 | |
1139 | If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the | |
1140 | stack space allocated by the generated code cannot be added with itself | |
1141 | in the course of the execution of the function. It is always safe to | |
1142 | pass FALSE here and the following criterion is sufficient in order to | |
1143 | pass TRUE: every path in the CFG that starts at the allocation point and | |
1144 | loops to it executes the associated deallocation code. */ | |
18ca7dab RK |
1145 | |
1146 | rtx | |
3a42502d RH |
1147 | allocate_dynamic_stack_space (rtx size, unsigned size_align, |
1148 | unsigned required_align, bool cannot_accumulate) | |
18ca7dab | 1149 | { |
d3c12306 | 1150 | HOST_WIDE_INT stack_usage_size = -1; |
3a42502d RH |
1151 | rtx final_label, final_target, target; |
1152 | bool must_align; | |
d3c12306 | 1153 | |
15fc0026 | 1154 | /* If we're asking for zero bytes, it doesn't matter what we point |
9faa82d8 | 1155 | to since we can't dereference it. But return a reasonable |
15fc0026 RK |
1156 | address anyway. */ |
1157 | if (size == const0_rtx) | |
1158 | return virtual_stack_dynamic_rtx; | |
1159 | ||
1160 | /* Otherwise, show we're calling alloca or equivalent. */ | |
e3b5732b | 1161 | cfun->calls_alloca = 1; |
15fc0026 | 1162 | |
d3c12306 EB |
1163 | /* If stack usage info is requested, look into the size we are passed. |
1164 | We need to do so this early to avoid the obfuscation that may be | |
1165 | introduced later by the various alignment operations. */ | |
1166 | if (flag_stack_usage) | |
1167 | { | |
32990d5b | 1168 | if (CONST_INT_P (size)) |
d3c12306 | 1169 | stack_usage_size = INTVAL (size); |
32990d5b | 1170 | else if (REG_P (size)) |
d3c12306 EB |
1171 | { |
1172 | /* Look into the last emitted insn and see if we can deduce | |
1173 | something for the register. */ | |
1174 | rtx insn, set, note; | |
1175 | insn = get_last_insn (); | |
1176 | if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size)) | |
1177 | { | |
32990d5b | 1178 | if (CONST_INT_P (SET_SRC (set))) |
d3c12306 EB |
1179 | stack_usage_size = INTVAL (SET_SRC (set)); |
1180 | else if ((note = find_reg_equal_equiv_note (insn)) | |
32990d5b | 1181 | && CONST_INT_P (XEXP (note, 0))) |
d3c12306 EB |
1182 | stack_usage_size = INTVAL (XEXP (note, 0)); |
1183 | } | |
1184 | } | |
1185 | ||
1186 | /* If the size is not constant, we can't say anything. */ | |
1187 | if (stack_usage_size == -1) | |
1188 | { | |
1189 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1190 | stack_usage_size = 0; | |
1191 | } | |
1192 | } | |
1193 | ||
18ca7dab RK |
1194 | /* Ensure the size is in the proper mode. */ |
1195 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1196 | size = convert_to_mode (Pmode, size, 1); | |
1197 | ||
3a42502d RH |
1198 | /* Adjust SIZE_ALIGN, if needed. */ |
1199 | if (CONST_INT_P (size)) | |
1200 | { | |
1201 | unsigned HOST_WIDE_INT lsb; | |
1202 | ||
1203 | lsb = INTVAL (size); | |
1204 | lsb &= -lsb; | |
1205 | ||
1206 | /* Watch out for overflow truncating to "unsigned". */ | |
1207 | if (lsb > UINT_MAX / BITS_PER_UNIT) | |
1208 | size_align = 1u << (HOST_BITS_PER_INT - 1); | |
1209 | else | |
1210 | size_align = (unsigned)lsb * BITS_PER_UNIT; | |
1211 | } | |
1212 | else if (size_align < BITS_PER_UNIT) | |
1213 | size_align = BITS_PER_UNIT; | |
1214 | ||
c2f8b491 JH |
1215 | /* We can't attempt to minimize alignment necessary, because we don't |
1216 | know the final value of preferred_stack_boundary yet while executing | |
1217 | this code. */ | |
32990d5b JJ |
1218 | if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) |
1219 | crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
c2f8b491 | 1220 | |
18ca7dab | 1221 | /* We will need to ensure that the address we return is aligned to |
3a42502d | 1222 | REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't |
d9b3eb63 | 1223 | always know its final value at this point in the compilation (it |
18ca7dab RK |
1224 | might depend on the size of the outgoing parameter lists, for |
1225 | example), so we must align the value to be returned in that case. | |
cc2902df | 1226 | (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if |
18ca7dab RK |
1227 | STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined). |
1228 | We must also do an alignment operation on the returned value if | |
3a42502d | 1229 | the stack pointer alignment is less strict than REQUIRED_ALIGN. |
18ca7dab RK |
1230 | |
1231 | If we have to align, we must leave space in SIZE for the hole | |
1232 | that might result from the alignment operation. */ | |
1233 | ||
3a42502d | 1234 | must_align = (crtl->preferred_stack_boundary < required_align); |
31cdd499 | 1235 | #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) |
3a42502d | 1236 | must_align = true; |
18ca7dab RK |
1237 | #endif |
1238 | ||
3a42502d | 1239 | if (must_align) |
d3c12306 | 1240 | { |
3a42502d RH |
1241 | unsigned extra, extra_align; |
1242 | ||
1243 | if (required_align > PREFERRED_STACK_BOUNDARY) | |
1244 | extra_align = PREFERRED_STACK_BOUNDARY; | |
1245 | else if (required_align > STACK_BOUNDARY) | |
1246 | extra_align = STACK_BOUNDARY; | |
1247 | else | |
1248 | extra_align = BITS_PER_UNIT; | |
1249 | extra = (required_align - extra_align) / BITS_PER_UNIT; | |
1250 | ||
1251 | size = plus_constant (size, extra); | |
1252 | size = force_operand (size, NULL_RTX); | |
d3c12306 EB |
1253 | |
1254 | if (flag_stack_usage) | |
3a42502d | 1255 | stack_usage_size += extra; |
d3c12306 | 1256 | |
3a42502d RH |
1257 | if (extra && size_align > extra_align) |
1258 | size_align = extra_align; | |
d3c12306 | 1259 | } |
1d9d04f8 | 1260 | |
18ca7dab RK |
1261 | #ifdef SETJMP_VIA_SAVE_AREA |
1262 | /* If setjmp restores regs from a save area in the stack frame, | |
1263 | avoid clobbering the reg save area. Note that the offset of | |
1264 | virtual_incoming_args_rtx includes the preallocated stack args space. | |
1265 | It would be no problem to clobber that, but it's on the wrong side | |
d0828b31 DM |
1266 | of the old save area. |
1267 | ||
1268 | What used to happen is that, since we did not know for sure | |
1269 | whether setjmp() was invoked until after RTL generation, we | |
1270 | would use reg notes to store the "optimized" size and fix things | |
1271 | up later. These days we know this information before we ever | |
1272 | start building RTL so the reg notes are unnecessary. */ | |
d3c12306 | 1273 | if (cfun->calls_setjmp) |
d0828b31 DM |
1274 | { |
1275 | rtx dynamic_offset | |
1276 | = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx, | |
1277 | stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1278 | ||
1279 | size = expand_binop (Pmode, add_optab, size, dynamic_offset, | |
1280 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
d3c12306 EB |
1281 | |
1282 | /* The above dynamic offset cannot be computed statically at this | |
1283 | point, but it will be possible to do so after RTL expansion is | |
1284 | done. Record how many times we will need to add it. */ | |
1285 | if (flag_stack_usage) | |
1286 | current_function_dynamic_alloc_count++; | |
1287 | ||
3a42502d RH |
1288 | /* ??? Can we infer a minimum of STACK_BOUNDARY here? */ |
1289 | size_align = BITS_PER_UNIT; | |
d0828b31 | 1290 | } |
18ca7dab RK |
1291 | #endif /* SETJMP_VIA_SAVE_AREA */ |
1292 | ||
1293 | /* Round the size to a multiple of the required stack alignment. | |
1294 | Since the stack if presumed to be rounded before this allocation, | |
1295 | this will maintain the required alignment. | |
1296 | ||
1297 | If the stack grows downward, we could save an insn by subtracting | |
1298 | SIZE from the stack pointer and then aligning the stack pointer. | |
1299 | The problem with this is that the stack pointer may be unaligned | |
1300 | between the execution of the subtraction and alignment insns and | |
1301 | some machines do not allow this. Even on those that do, some | |
1302 | signal handlers malfunction if a signal should occur between those | |
1303 | insns. Since this is an extremely rare event, we have no reliable | |
1304 | way of knowing which systems have this problem. So we avoid even | |
1305 | momentarily mis-aligning the stack. */ | |
3a42502d | 1306 | if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0) |
d3c12306 EB |
1307 | { |
1308 | size = round_push (size); | |
18ca7dab | 1309 | |
d3c12306 EB |
1310 | if (flag_stack_usage) |
1311 | { | |
32990d5b | 1312 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
d3c12306 EB |
1313 | stack_usage_size = (stack_usage_size + align - 1) / align * align; |
1314 | } | |
1315 | } | |
1316 | ||
3a42502d | 1317 | target = gen_reg_rtx (Pmode); |
7458026b | 1318 | |
d3c12306 EB |
1319 | /* The size is supposed to be fully adjusted at this point so record it |
1320 | if stack usage info is requested. */ | |
1321 | if (flag_stack_usage) | |
1322 | { | |
1323 | current_function_dynamic_stack_size += stack_usage_size; | |
1324 | ||
1325 | /* ??? This is gross but the only safe stance in the absence | |
1326 | of stack usage oriented flow analysis. */ | |
1327 | if (!cannot_accumulate) | |
1328 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1329 | } | |
18ca7dab | 1330 | |
7458026b ILT |
1331 | final_label = NULL_RTX; |
1332 | final_target = NULL_RTX; | |
1333 | ||
1334 | /* If we are splitting the stack, we need to ask the backend whether | |
1335 | there is enough room on the current stack. If there isn't, or if | |
1336 | the backend doesn't know how to tell is, then we need to call a | |
1337 | function to allocate memory in some other way. This memory will | |
1338 | be released when we release the current stack segment. The | |
1339 | effect is that stack allocation becomes less efficient, but at | |
1340 | least it doesn't cause a stack overflow. */ | |
1341 | if (flag_split_stack) | |
1342 | { | |
c3928dde | 1343 | rtx available_label, ask, space, func; |
7458026b ILT |
1344 | |
1345 | available_label = NULL_RTX; | |
1346 | ||
1347 | #ifdef HAVE_split_stack_space_check | |
1348 | if (HAVE_split_stack_space_check) | |
1349 | { | |
1350 | available_label = gen_label_rtx (); | |
1351 | ||
1352 | /* This instruction will branch to AVAILABLE_LABEL if there | |
1353 | are SIZE bytes available on the stack. */ | |
1354 | emit_insn (gen_split_stack_space_check (size, available_label)); | |
1355 | } | |
1356 | #endif | |
1357 | ||
c3928dde ILT |
1358 | /* The __morestack_allocate_stack_space function will allocate |
1359 | memory using malloc. We don't know that the alignment of the | |
1360 | memory returned by malloc will meet REQUIRED_ALIGN. Increase | |
1361 | SIZE to make sure we allocate enough space. */ | |
1362 | ask = expand_binop (Pmode, add_optab, size, | |
1363 | GEN_INT (required_align / BITS_PER_UNIT - 1), | |
1364 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1365 | must_align = true; | |
1366 | ||
7458026b ILT |
1367 | func = init_one_libfunc ("__morestack_allocate_stack_space"); |
1368 | ||
1369 | space = emit_library_call_value (func, target, LCT_NORMAL, Pmode, | |
c3928dde | 1370 | 1, ask, Pmode); |
7458026b ILT |
1371 | |
1372 | if (available_label == NULL_RTX) | |
1373 | return space; | |
1374 | ||
1375 | final_target = gen_reg_rtx (Pmode); | |
7458026b ILT |
1376 | |
1377 | emit_move_insn (final_target, space); | |
1378 | ||
1379 | final_label = gen_label_rtx (); | |
1380 | emit_jump (final_label); | |
1381 | ||
1382 | emit_label (available_label); | |
1383 | } | |
1384 | ||
18ca7dab RK |
1385 | do_pending_stack_adjust (); |
1386 | ||
1503a7ec | 1387 | /* We ought to be called always on the toplevel and stack ought to be aligned |
a1f300c0 | 1388 | properly. */ |
5b0264cb NS |
1389 | gcc_assert (!(stack_pointer_delta |
1390 | % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); | |
1503a7ec | 1391 | |
d809253a EB |
1392 | /* If needed, check that we have the required amount of stack. Take into |
1393 | account what has already been checked. */ | |
1394 | if (STACK_CHECK_MOVING_SP) | |
1395 | ; | |
1396 | else if (flag_stack_check == GENERIC_STACK_CHECK) | |
b38f3813 EB |
1397 | probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE, |
1398 | size); | |
1399 | else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK) | |
1400 | probe_stack_range (STACK_CHECK_PROTECT, size); | |
edff2491 | 1401 | |
18ca7dab RK |
1402 | /* Perform the required allocation from the stack. Some systems do |
1403 | this differently than simply incrementing/decrementing from the | |
38a448ca | 1404 | stack pointer, such as acquiring the space by calling malloc(). */ |
18ca7dab RK |
1405 | #ifdef HAVE_allocate_stack |
1406 | if (HAVE_allocate_stack) | |
1407 | { | |
39403d82 | 1408 | enum machine_mode mode = STACK_SIZE_MODE; |
a995e389 | 1409 | insn_operand_predicate_fn pred; |
39403d82 | 1410 | |
4b6c1672 RK |
1411 | /* We don't have to check against the predicate for operand 0 since |
1412 | TARGET is known to be a pseudo of the proper mode, which must | |
1413 | be valid for the operand. For operand 1, convert to the | |
1414 | proper mode and validate. */ | |
c5c76735 | 1415 | if (mode == VOIDmode) |
4b6c1672 | 1416 | mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode; |
c5c76735 | 1417 | |
a995e389 RH |
1418 | pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate; |
1419 | if (pred && ! ((*pred) (size, mode))) | |
05d482b9 | 1420 | size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1)); |
18ca7dab | 1421 | |
38a448ca | 1422 | emit_insn (gen_allocate_stack (target, size)); |
18ca7dab RK |
1423 | } |
1424 | else | |
1425 | #endif | |
ea534b63 | 1426 | { |
32990d5b JJ |
1427 | int saved_stack_pointer_delta; |
1428 | ||
38a448ca RH |
1429 | #ifndef STACK_GROWS_DOWNWARD |
1430 | emit_move_insn (target, virtual_stack_dynamic_rtx); | |
1431 | #endif | |
a157febd GK |
1432 | |
1433 | /* Check stack bounds if necessary. */ | |
e3b5732b | 1434 | if (crtl->limit_stack) |
a157febd GK |
1435 | { |
1436 | rtx available; | |
1437 | rtx space_available = gen_label_rtx (); | |
1438 | #ifdef STACK_GROWS_DOWNWARD | |
d9b3eb63 | 1439 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1440 | stack_pointer_rtx, stack_limit_rtx, |
1441 | NULL_RTX, 1, OPTAB_WIDEN); | |
1442 | #else | |
d9b3eb63 | 1443 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1444 | stack_limit_rtx, stack_pointer_rtx, |
1445 | NULL_RTX, 1, OPTAB_WIDEN); | |
1446 | #endif | |
1447 | emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1, | |
a06ef755 | 1448 | space_available); |
a157febd GK |
1449 | #ifdef HAVE_trap |
1450 | if (HAVE_trap) | |
1451 | emit_insn (gen_trap ()); | |
1452 | else | |
1453 | #endif | |
1454 | error ("stack limits not supported on this target"); | |
1455 | emit_barrier (); | |
1456 | emit_label (space_available); | |
1457 | } | |
1458 | ||
32990d5b | 1459 | saved_stack_pointer_delta = stack_pointer_delta; |
d809253a | 1460 | if (flag_stack_check && STACK_CHECK_MOVING_SP) |
c35af30f | 1461 | anti_adjust_stack_and_probe (size, false); |
d809253a EB |
1462 | else |
1463 | anti_adjust_stack (size); | |
32990d5b JJ |
1464 | /* Even if size is constant, don't modify stack_pointer_delta. |
1465 | The constant size alloca should preserve | |
1466 | crtl->preferred_stack_boundary alignment. */ | |
1467 | stack_pointer_delta = saved_stack_pointer_delta; | |
d5457140 | 1468 | |
18ca7dab | 1469 | #ifdef STACK_GROWS_DOWNWARD |
ca56cd30 | 1470 | emit_move_insn (target, virtual_stack_dynamic_rtx); |
18ca7dab | 1471 | #endif |
38a448ca | 1472 | } |
18ca7dab | 1473 | |
3a42502d RH |
1474 | /* Finish up the split stack handling. */ |
1475 | if (final_label != NULL_RTX) | |
1476 | { | |
1477 | gcc_assert (flag_split_stack); | |
1478 | emit_move_insn (final_target, target); | |
1479 | emit_label (final_label); | |
1480 | target = final_target; | |
1481 | } | |
1482 | ||
1483 | if (must_align) | |
091ad0b9 | 1484 | { |
5244db05 | 1485 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
0f41302f MS |
1486 | but we know it can't. So add ourselves and then do |
1487 | TRUNC_DIV_EXPR. */ | |
0f56a403 | 1488 | target = expand_binop (Pmode, add_optab, target, |
3a42502d | 1489 | GEN_INT (required_align / BITS_PER_UNIT - 1), |
5244db05 RK |
1490 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1491 | target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target, | |
3a42502d | 1492 | GEN_INT (required_align / BITS_PER_UNIT), |
b1ec3c92 | 1493 | NULL_RTX, 1); |
091ad0b9 | 1494 | target = expand_mult (Pmode, target, |
3a42502d | 1495 | GEN_INT (required_align / BITS_PER_UNIT), |
b1ec3c92 | 1496 | NULL_RTX, 1); |
091ad0b9 | 1497 | } |
d9b3eb63 | 1498 | |
3a42502d RH |
1499 | /* Now that we've committed to a return value, mark its alignment. */ |
1500 | mark_reg_pointer (target, required_align); | |
1501 | ||
15fc0026 | 1502 | /* Record the new stack level for nonlocal gotos. */ |
6de9cd9a DN |
1503 | if (cfun->nonlocal_goto_save_area != 0) |
1504 | update_nonlocal_goto_save_area (); | |
15fc0026 | 1505 | |
18ca7dab RK |
1506 | return target; |
1507 | } | |
1508 | \f | |
d9b3eb63 | 1509 | /* A front end may want to override GCC's stack checking by providing a |
14a774a9 RK |
1510 | run-time routine to call to check the stack, so provide a mechanism for |
1511 | calling that routine. */ | |
1512 | ||
e2500fed | 1513 | static GTY(()) rtx stack_check_libfunc; |
14a774a9 RK |
1514 | |
1515 | void | |
d477d1fe | 1516 | set_stack_check_libfunc (const char *libfunc_name) |
14a774a9 | 1517 | { |
d477d1fe SB |
1518 | gcc_assert (stack_check_libfunc == NULL_RTX); |
1519 | stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name); | |
14a774a9 RK |
1520 | } |
1521 | \f | |
edff2491 RK |
1522 | /* Emit one stack probe at ADDRESS, an address within the stack. */ |
1523 | ||
260c8ba3 | 1524 | void |
502b8322 | 1525 | emit_stack_probe (rtx address) |
edff2491 | 1526 | { |
38a448ca | 1527 | rtx memref = gen_rtx_MEM (word_mode, address); |
edff2491 RK |
1528 | |
1529 | MEM_VOLATILE_P (memref) = 1; | |
1530 | ||
d809253a EB |
1531 | /* See if we have an insn to probe the stack. */ |
1532 | #ifdef HAVE_probe_stack | |
1533 | if (HAVE_probe_stack) | |
1534 | emit_insn (gen_probe_stack (memref)); | |
1535 | else | |
1536 | #endif | |
edff2491 RK |
1537 | emit_move_insn (memref, const0_rtx); |
1538 | } | |
1539 | ||
d9b3eb63 | 1540 | /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. |
d809253a EB |
1541 | FIRST is a constant and size is a Pmode RTX. These are offsets from |
1542 | the current stack pointer. STACK_GROWS_DOWNWARD says whether to add | |
1543 | or subtract them from the stack pointer. */ | |
1544 | ||
1545 | #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP) | |
edff2491 RK |
1546 | |
1547 | #ifdef STACK_GROWS_DOWNWARD | |
1548 | #define STACK_GROW_OP MINUS | |
d809253a EB |
1549 | #define STACK_GROW_OPTAB sub_optab |
1550 | #define STACK_GROW_OFF(off) -(off) | |
edff2491 RK |
1551 | #else |
1552 | #define STACK_GROW_OP PLUS | |
d809253a EB |
1553 | #define STACK_GROW_OPTAB add_optab |
1554 | #define STACK_GROW_OFF(off) (off) | |
edff2491 RK |
1555 | #endif |
1556 | ||
1557 | void | |
502b8322 | 1558 | probe_stack_range (HOST_WIDE_INT first, rtx size) |
edff2491 | 1559 | { |
4b6c1672 RK |
1560 | /* First ensure SIZE is Pmode. */ |
1561 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1562 | size = convert_to_mode (Pmode, size, 1); | |
1563 | ||
d809253a EB |
1564 | /* Next see if we have a function to check the stack. */ |
1565 | if (stack_check_libfunc) | |
f5f5363f | 1566 | { |
d809253a | 1567 | rtx addr = memory_address (Pmode, |
2b3aadfc RH |
1568 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1569 | stack_pointer_rtx, | |
1570 | plus_constant (size, first))); | |
949fa04c EB |
1571 | emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr, |
1572 | Pmode); | |
f5f5363f | 1573 | } |
14a774a9 | 1574 | |
d809253a | 1575 | /* Next see if we have an insn to check the stack. */ |
edff2491 | 1576 | #ifdef HAVE_check_stack |
14a774a9 | 1577 | else if (HAVE_check_stack) |
edff2491 | 1578 | { |
d809253a EB |
1579 | rtx addr = memory_address (Pmode, |
1580 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1581 | stack_pointer_rtx, | |
1582 | plus_constant (size, first))); | |
1583 | insn_operand_predicate_fn pred | |
1584 | = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate; | |
1585 | if (pred && !((*pred) (addr, Pmode))) | |
1586 | addr = copy_to_mode_reg (Pmode, addr); | |
edff2491 | 1587 | |
d809253a | 1588 | emit_insn (gen_check_stack (addr)); |
edff2491 RK |
1589 | } |
1590 | #endif | |
1591 | ||
d809253a EB |
1592 | /* Otherwise we have to generate explicit probes. If we have a constant |
1593 | small number of them to generate, that's the easy case. */ | |
1594 | else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) | |
edff2491 | 1595 | { |
d809253a EB |
1596 | HOST_WIDE_INT isize = INTVAL (size), i; |
1597 | rtx addr; | |
1598 | ||
1599 | /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until | |
1600 | it exceeds SIZE. If only one probe is needed, this will not | |
1601 | generate any code. Then probe at FIRST + SIZE. */ | |
1602 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1603 | { | |
1604 | addr = memory_address (Pmode, | |
1605 | plus_constant (stack_pointer_rtx, | |
1606 | STACK_GROW_OFF (first + i))); | |
1607 | emit_stack_probe (addr); | |
1608 | } | |
1609 | ||
1610 | addr = memory_address (Pmode, | |
1611 | plus_constant (stack_pointer_rtx, | |
1612 | STACK_GROW_OFF (first + isize))); | |
1613 | emit_stack_probe (addr); | |
edff2491 RK |
1614 | } |
1615 | ||
d809253a EB |
1616 | /* In the variable case, do the same as above, but in a loop. Note that we |
1617 | must be extra careful with variables wrapping around because we might be | |
1618 | at the very top (or the very bottom) of the address space and we have to | |
1619 | be able to handle this case properly; in particular, we use an equality | |
1620 | test for the loop condition. */ | |
edff2491 RK |
1621 | else |
1622 | { | |
d809253a | 1623 | rtx rounded_size, rounded_size_op, test_addr, last_addr, temp; |
edff2491 | 1624 | rtx loop_lab = gen_label_rtx (); |
edff2491 | 1625 | rtx end_lab = gen_label_rtx (); |
edff2491 | 1626 | |
edff2491 | 1627 | |
d809253a EB |
1628 | /* Step 1: round SIZE to the previous multiple of the interval. */ |
1629 | ||
1630 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1631 | rounded_size | |
1632 | = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL)); | |
1633 | rounded_size_op = force_operand (rounded_size, NULL_RTX); | |
1634 | ||
1635 | ||
1636 | /* Step 2: compute initial and final value of the loop counter. */ | |
1637 | ||
1638 | /* TEST_ADDR = SP + FIRST. */ | |
1639 | test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1640 | stack_pointer_rtx, | |
1641 | GEN_INT (first)), NULL_RTX); | |
1642 | ||
1643 | /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */ | |
1644 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1645 | test_addr, | |
1646 | rounded_size_op), NULL_RTX); | |
1647 | ||
1648 | ||
1649 | /* Step 3: the loop | |
1650 | ||
1651 | while (TEST_ADDR != LAST_ADDR) | |
1652 | { | |
1653 | TEST_ADDR = TEST_ADDR + PROBE_INTERVAL | |
1654 | probe at TEST_ADDR | |
1655 | } | |
1656 | ||
1657 | probes at FIRST + N * PROBE_INTERVAL for values of N from 1 | |
1658 | until it is equal to ROUNDED_SIZE. */ | |
edff2491 RK |
1659 | |
1660 | emit_label (loop_lab); | |
edff2491 | 1661 | |
d809253a EB |
1662 | /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */ |
1663 | emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1, | |
1664 | end_lab); | |
1665 | ||
1666 | /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */ | |
1667 | temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr, | |
1668 | GEN_INT (PROBE_INTERVAL), test_addr, | |
edff2491 | 1669 | 1, OPTAB_WIDEN); |
edff2491 | 1670 | |
5b0264cb | 1671 | gcc_assert (temp == test_addr); |
edff2491 | 1672 | |
d809253a EB |
1673 | /* Probe at TEST_ADDR. */ |
1674 | emit_stack_probe (test_addr); | |
1675 | ||
1676 | emit_jump (loop_lab); | |
1677 | ||
edff2491 RK |
1678 | emit_label (end_lab); |
1679 | ||
d809253a EB |
1680 | |
1681 | /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time | |
1682 | that SIZE is equal to ROUNDED_SIZE. */ | |
1683 | ||
1684 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1685 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1686 | if (temp != const0_rtx) | |
1687 | { | |
1688 | rtx addr; | |
1689 | ||
32990d5b | 1690 | if (CONST_INT_P (temp)) |
d809253a EB |
1691 | { |
1692 | /* Use [base + disp} addressing mode if supported. */ | |
1693 | HOST_WIDE_INT offset = INTVAL (temp); | |
1694 | addr = memory_address (Pmode, | |
1695 | plus_constant (last_addr, | |
1696 | STACK_GROW_OFF (offset))); | |
1697 | } | |
1698 | else | |
1699 | { | |
1700 | /* Manual CSE if the difference is not known at compile-time. */ | |
1701 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1702 | addr = memory_address (Pmode, | |
1703 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1704 | last_addr, temp)); | |
1705 | } | |
1706 | ||
1707 | emit_stack_probe (addr); | |
1708 | } | |
edff2491 RK |
1709 | } |
1710 | } | |
d809253a | 1711 | |
c35af30f EB |
1712 | /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) |
1713 | while probing it. This pushes when SIZE is positive. SIZE need not | |
1714 | be constant. If ADJUST_BACK is true, adjust back the stack pointer | |
1715 | by plus SIZE at the end. */ | |
d809253a | 1716 | |
c35af30f EB |
1717 | void |
1718 | anti_adjust_stack_and_probe (rtx size, bool adjust_back) | |
d809253a | 1719 | { |
c35af30f EB |
1720 | /* We skip the probe for the first interval + a small dope of 4 words and |
1721 | probe that many bytes past the specified size to maintain a protection | |
1722 | area at the botton of the stack. */ | |
d809253a EB |
1723 | const int dope = 4 * UNITS_PER_WORD; |
1724 | ||
1725 | /* First ensure SIZE is Pmode. */ | |
1726 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1727 | size = convert_to_mode (Pmode, size, 1); | |
1728 | ||
1729 | /* If we have a constant small number of probes to generate, that's the | |
1730 | easy case. */ | |
32990d5b | 1731 | if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) |
d809253a EB |
1732 | { |
1733 | HOST_WIDE_INT isize = INTVAL (size), i; | |
1734 | bool first_probe = true; | |
1735 | ||
260c8ba3 | 1736 | /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1737 | values of N from 1 until it exceeds SIZE. If only one probe is |
1738 | needed, this will not generate any code. Then adjust and probe | |
1739 | to PROBE_INTERVAL + SIZE. */ | |
1740 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1741 | { | |
1742 | if (first_probe) | |
1743 | { | |
1744 | anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope)); | |
1745 | first_probe = false; | |
1746 | } | |
1747 | else | |
1748 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1749 | emit_stack_probe (stack_pointer_rtx); | |
1750 | } | |
1751 | ||
1752 | if (first_probe) | |
1753 | anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope)); | |
1754 | else | |
1755 | anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i)); | |
1756 | emit_stack_probe (stack_pointer_rtx); | |
1757 | } | |
1758 | ||
1759 | /* In the variable case, do the same as above, but in a loop. Note that we | |
1760 | must be extra careful with variables wrapping around because we might be | |
1761 | at the very top (or the very bottom) of the address space and we have to | |
1762 | be able to handle this case properly; in particular, we use an equality | |
1763 | test for the loop condition. */ | |
1764 | else | |
1765 | { | |
1766 | rtx rounded_size, rounded_size_op, last_addr, temp; | |
1767 | rtx loop_lab = gen_label_rtx (); | |
1768 | rtx end_lab = gen_label_rtx (); | |
1769 | ||
1770 | ||
1771 | /* Step 1: round SIZE to the previous multiple of the interval. */ | |
1772 | ||
1773 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1774 | rounded_size | |
1775 | = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL)); | |
1776 | rounded_size_op = force_operand (rounded_size, NULL_RTX); | |
1777 | ||
1778 | ||
1779 | /* Step 2: compute initial and final value of the loop counter. */ | |
1780 | ||
1781 | /* SP = SP_0 + PROBE_INTERVAL. */ | |
1782 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
1783 | ||
1784 | /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */ | |
1785 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1786 | stack_pointer_rtx, | |
1787 | rounded_size_op), NULL_RTX); | |
1788 | ||
1789 | ||
1790 | /* Step 3: the loop | |
1791 | ||
260c8ba3 EB |
1792 | while (SP != LAST_ADDR) |
1793 | { | |
1794 | SP = SP + PROBE_INTERVAL | |
1795 | probe at SP | |
1796 | } | |
d809253a | 1797 | |
260c8ba3 | 1798 | adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1799 | values of N from 1 until it is equal to ROUNDED_SIZE. */ |
1800 | ||
1801 | emit_label (loop_lab); | |
1802 | ||
1803 | /* Jump to END_LAB if SP == LAST_ADDR. */ | |
1804 | emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX, | |
1805 | Pmode, 1, end_lab); | |
1806 | ||
1807 | /* SP = SP + PROBE_INTERVAL and probe at SP. */ | |
1808 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1809 | emit_stack_probe (stack_pointer_rtx); | |
1810 | ||
1811 | emit_jump (loop_lab); | |
1812 | ||
1813 | emit_label (end_lab); | |
1814 | ||
1815 | ||
260c8ba3 | 1816 | /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot |
d809253a EB |
1817 | assert at compile-time that SIZE is equal to ROUNDED_SIZE. */ |
1818 | ||
1819 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1820 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1821 | if (temp != const0_rtx) | |
1822 | { | |
1823 | /* Manual CSE if the difference is not known at compile-time. */ | |
1824 | if (GET_CODE (temp) != CONST_INT) | |
1825 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1826 | anti_adjust_stack (temp); | |
1827 | emit_stack_probe (stack_pointer_rtx); | |
1828 | } | |
1829 | } | |
1830 | ||
c35af30f EB |
1831 | /* Adjust back and account for the additional first interval. */ |
1832 | if (adjust_back) | |
1833 | adjust_stack (plus_constant (size, PROBE_INTERVAL + dope)); | |
1834 | else | |
1835 | adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
d809253a EB |
1836 | } |
1837 | ||
18ca7dab RK |
1838 | /* Return an rtx representing the register or memory location |
1839 | in which a scalar value of data type VALTYPE | |
1840 | was returned by a function call to function FUNC. | |
1d636cc6 RG |
1841 | FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise |
1842 | function is known, otherwise 0. | |
4dc07bd7 JJ |
1843 | OUTGOING is 1 if on a machine with register windows this function |
1844 | should return the register in which the function will put its result | |
30f7a378 | 1845 | and 0 otherwise. */ |
18ca7dab RK |
1846 | |
1847 | rtx | |
586de218 | 1848 | hard_function_value (const_tree valtype, const_tree func, const_tree fntype, |
502b8322 | 1849 | int outgoing ATTRIBUTE_UNUSED) |
18ca7dab | 1850 | { |
4dc07bd7 | 1851 | rtx val; |
770ae6cc | 1852 | |
1d636cc6 | 1853 | val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing); |
770ae6cc | 1854 | |
f8cfc6aa | 1855 | if (REG_P (val) |
e1a4071f JL |
1856 | && GET_MODE (val) == BLKmode) |
1857 | { | |
770ae6cc | 1858 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype); |
e1a4071f | 1859 | enum machine_mode tmpmode; |
770ae6cc | 1860 | |
d9b3eb63 | 1861 | /* int_size_in_bytes can return -1. We don't need a check here |
535a42b1 NS |
1862 | since the value of bytes will then be large enough that no |
1863 | mode will match anyway. */ | |
d9b3eb63 | 1864 | |
e1a4071f | 1865 | for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
0fb7aeda KH |
1866 | tmpmode != VOIDmode; |
1867 | tmpmode = GET_MODE_WIDER_MODE (tmpmode)) | |
1868 | { | |
1869 | /* Have we found a large enough mode? */ | |
1870 | if (GET_MODE_SIZE (tmpmode) >= bytes) | |
1871 | break; | |
1872 | } | |
e1a4071f JL |
1873 | |
1874 | /* No suitable mode found. */ | |
5b0264cb | 1875 | gcc_assert (tmpmode != VOIDmode); |
e1a4071f JL |
1876 | |
1877 | PUT_MODE (val, tmpmode); | |
d9b3eb63 | 1878 | } |
e1a4071f | 1879 | return val; |
18ca7dab RK |
1880 | } |
1881 | ||
1882 | /* Return an rtx representing the register or memory location | |
1883 | in which a scalar value of mode MODE was returned by a library call. */ | |
1884 | ||
1885 | rtx | |
390b17c2 | 1886 | hard_libcall_value (enum machine_mode mode, rtx fun) |
18ca7dab | 1887 | { |
390b17c2 | 1888 | return targetm.calls.libcall_value (mode, fun); |
18ca7dab | 1889 | } |
0c5e217d RS |
1890 | |
1891 | /* Look up the tree code for a given rtx code | |
1892 | to provide the arithmetic operation for REAL_ARITHMETIC. | |
1893 | The function returns an int because the caller may not know | |
1894 | what `enum tree_code' means. */ | |
1895 | ||
1896 | int | |
502b8322 | 1897 | rtx_to_tree_code (enum rtx_code code) |
0c5e217d RS |
1898 | { |
1899 | enum tree_code tcode; | |
1900 | ||
1901 | switch (code) | |
1902 | { | |
1903 | case PLUS: | |
1904 | tcode = PLUS_EXPR; | |
1905 | break; | |
1906 | case MINUS: | |
1907 | tcode = MINUS_EXPR; | |
1908 | break; | |
1909 | case MULT: | |
1910 | tcode = MULT_EXPR; | |
1911 | break; | |
1912 | case DIV: | |
1913 | tcode = RDIV_EXPR; | |
1914 | break; | |
1915 | case SMIN: | |
1916 | tcode = MIN_EXPR; | |
1917 | break; | |
1918 | case SMAX: | |
1919 | tcode = MAX_EXPR; | |
1920 | break; | |
1921 | default: | |
1922 | tcode = LAST_AND_UNUSED_TREE_CODE; | |
1923 | break; | |
1924 | } | |
1925 | return ((int) tcode); | |
1926 | } | |
e2500fed GK |
1927 | |
1928 | #include "gt-explow.h" |