]>
Commit | Line | Data |
---|---|---|
18ca7dab | 1 | /* Subroutines for manipulating rtx's in semantically interesting ways. |
23a5b65a | 2 | Copyright (C) 1987-2014 Free Software Foundation, Inc. |
18ca7dab | 3 | |
1322177d | 4 | This file is part of GCC. |
18ca7dab | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
18ca7dab | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
18ca7dab RK |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
18ca7dab RK |
19 | |
20 | ||
21 | #include "config.h" | |
670ee920 | 22 | #include "system.h" |
4977bab6 ZW |
23 | #include "coretypes.h" |
24 | #include "tm.h" | |
718f9c0f | 25 | #include "diagnostic-core.h" |
18ca7dab RK |
26 | #include "rtl.h" |
27 | #include "tree.h" | |
d8a2d370 | 28 | #include "stor-layout.h" |
6baf1cc8 | 29 | #include "tm_p.h" |
18ca7dab | 30 | #include "flags.h" |
b38f3813 | 31 | #include "except.h" |
49ad7cfa | 32 | #include "function.h" |
18ca7dab | 33 | #include "expr.h" |
e78d8e51 | 34 | #include "optabs.h" |
d477d1fe | 35 | #include "libfuncs.h" |
18ca7dab RK |
36 | #include "hard-reg-set.h" |
37 | #include "insn-config.h" | |
1d974ca7 | 38 | #include "ggc.h" |
18ca7dab | 39 | #include "recog.h" |
a77a9a18 | 40 | #include "langhooks.h" |
1d636cc6 | 41 | #include "target.h" |
677f3fa8 | 42 | #include "common/common-target.h" |
aacd3885 | 43 | #include "output.h" |
18ca7dab | 44 | |
502b8322 | 45 | static rtx break_out_memory_refs (rtx); |
7e4ce834 RH |
46 | |
47 | ||
48 | /* Truncate and perhaps sign-extend C as appropriate for MODE. */ | |
49 | ||
50 | HOST_WIDE_INT | |
502b8322 | 51 | trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) |
7e4ce834 | 52 | { |
5511bc5a | 53 | int width = GET_MODE_PRECISION (mode); |
7e4ce834 | 54 | |
71012d97 | 55 | /* You want to truncate to a _what_? */ |
5b0264cb | 56 | gcc_assert (SCALAR_INT_MODE_P (mode)); |
71012d97 | 57 | |
1f3f36d1 RH |
58 | /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ |
59 | if (mode == BImode) | |
60 | return c & 1 ? STORE_FLAG_VALUE : 0; | |
61 | ||
5b0d91c3 AO |
62 | /* Sign-extend for the requested mode. */ |
63 | ||
64 | if (width < HOST_BITS_PER_WIDE_INT) | |
65 | { | |
66 | HOST_WIDE_INT sign = 1; | |
67 | sign <<= width - 1; | |
68 | c &= (sign << 1) - 1; | |
69 | c ^= sign; | |
70 | c -= sign; | |
71 | } | |
7e4ce834 RH |
72 | |
73 | return c; | |
74 | } | |
75 | ||
929e10f4 | 76 | /* Return an rtx for the sum of X and the integer C, given that X has |
0a81f074 | 77 | mode MODE. */ |
18ca7dab RK |
78 | |
79 | rtx | |
0a81f074 | 80 | plus_constant (enum machine_mode mode, rtx x, HOST_WIDE_INT c) |
18ca7dab | 81 | { |
b3694847 | 82 | RTX_CODE code; |
17ab7c59 | 83 | rtx y; |
b3694847 | 84 | rtx tem; |
18ca7dab RK |
85 | int all_constant = 0; |
86 | ||
0a81f074 RS |
87 | gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode); |
88 | ||
18ca7dab RK |
89 | if (c == 0) |
90 | return x; | |
91 | ||
92 | restart: | |
93 | ||
94 | code = GET_CODE (x); | |
17ab7c59 RK |
95 | y = x; |
96 | ||
18ca7dab RK |
97 | switch (code) |
98 | { | |
99 | case CONST_INT: | |
929e10f4 MS |
100 | if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) |
101 | { | |
9be0ac8c LC |
102 | double_int di_x = double_int::from_shwi (INTVAL (x)); |
103 | double_int di_c = double_int::from_shwi (c); | |
104 | ||
105 | bool overflow; | |
106 | double_int v = di_x.add_with_sign (di_c, false, &overflow); | |
107 | if (overflow) | |
929e10f4 MS |
108 | gcc_unreachable (); |
109 | ||
d6b28156 | 110 | return immed_double_int_const (v, mode); |
929e10f4 MS |
111 | } |
112 | ||
a8acccdd | 113 | return gen_int_mode (UINTVAL (x) + c, mode); |
18ca7dab RK |
114 | |
115 | case CONST_DOUBLE: | |
116 | { | |
9be0ac8c LC |
117 | double_int di_x = double_int::from_pair (CONST_DOUBLE_HIGH (x), |
118 | CONST_DOUBLE_LOW (x)); | |
119 | double_int di_c = double_int::from_shwi (c); | |
120 | ||
121 | bool overflow; | |
122 | double_int v = di_x.add_with_sign (di_c, false, &overflow); | |
123 | if (overflow) | |
929e10f4 MS |
124 | /* Sorry, we have no way to represent overflows this wide. |
125 | To fix, add constant support wider than CONST_DOUBLE. */ | |
49ab6098 | 126 | gcc_assert (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_DOUBLE_INT); |
18ca7dab | 127 | |
d6b28156 | 128 | return immed_double_int_const (v, mode); |
18ca7dab RK |
129 | } |
130 | ||
131 | case MEM: | |
132 | /* If this is a reference to the constant pool, try replacing it with | |
133 | a reference to a new constant. If the resulting address isn't | |
134 | valid, don't return it because we have no way to validize it. */ | |
135 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
136 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) | |
137 | { | |
0a81f074 | 138 | tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c); |
929e10f4 | 139 | tem = force_const_mem (GET_MODE (x), tem); |
18ca7dab RK |
140 | if (memory_address_p (GET_MODE (tem), XEXP (tem, 0))) |
141 | return tem; | |
142 | } | |
143 | break; | |
144 | ||
145 | case CONST: | |
146 | /* If adding to something entirely constant, set a flag | |
147 | so that we can add a CONST around the result. */ | |
148 | x = XEXP (x, 0); | |
149 | all_constant = 1; | |
150 | goto restart; | |
151 | ||
152 | case SYMBOL_REF: | |
153 | case LABEL_REF: | |
154 | all_constant = 1; | |
155 | break; | |
156 | ||
157 | case PLUS: | |
929e10f4 MS |
158 | /* The interesting case is adding the integer to a sum. Look |
159 | for constant term in the sum and combine with C. For an | |
160 | integer constant term or a constant term that is not an | |
161 | explicit integer, we combine or group them together anyway. | |
03d937fc R |
162 | |
163 | We may not immediately return from the recursive call here, lest | |
164 | all_constant gets lost. */ | |
e5671f2b | 165 | |
929e10f4 | 166 | if (CONSTANT_P (XEXP (x, 1))) |
03d937fc | 167 | { |
0a81f074 RS |
168 | x = gen_rtx_PLUS (mode, XEXP (x, 0), |
169 | plus_constant (mode, XEXP (x, 1), c)); | |
03d937fc R |
170 | c = 0; |
171 | } | |
b72f00af | 172 | else if (find_constant_term_loc (&y)) |
03d937fc | 173 | { |
b72f00af RK |
174 | /* We need to be careful since X may be shared and we can't |
175 | modify it in place. */ | |
176 | rtx copy = copy_rtx (x); | |
177 | rtx *const_loc = find_constant_term_loc (©); | |
178 | ||
0a81f074 | 179 | *const_loc = plus_constant (mode, *const_loc, c); |
b72f00af | 180 | x = copy; |
03d937fc R |
181 | c = 0; |
182 | } | |
38a448ca | 183 | break; |
ed8908e7 | 184 | |
38a448ca RH |
185 | default: |
186 | break; | |
18ca7dab RK |
187 | } |
188 | ||
189 | if (c != 0) | |
4789c0ce | 190 | x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode)); |
18ca7dab RK |
191 | |
192 | if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) | |
193 | return x; | |
194 | else if (all_constant) | |
38a448ca | 195 | return gen_rtx_CONST (mode, x); |
18ca7dab RK |
196 | else |
197 | return x; | |
198 | } | |
18ca7dab RK |
199 | \f |
200 | /* If X is a sum, return a new sum like X but lacking any constant terms. | |
201 | Add all the removed constant terms into *CONSTPTR. | |
202 | X itself is not altered. The result != X if and only if | |
203 | it is not isomorphic to X. */ | |
204 | ||
205 | rtx | |
502b8322 | 206 | eliminate_constant_term (rtx x, rtx *constptr) |
18ca7dab | 207 | { |
b3694847 | 208 | rtx x0, x1; |
18ca7dab RK |
209 | rtx tem; |
210 | ||
211 | if (GET_CODE (x) != PLUS) | |
212 | return x; | |
213 | ||
214 | /* First handle constants appearing at this level explicitly. */ | |
481683e1 | 215 | if (CONST_INT_P (XEXP (x, 1)) |
18ca7dab RK |
216 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr, |
217 | XEXP (x, 1))) | |
481683e1 | 218 | && CONST_INT_P (tem)) |
18ca7dab RK |
219 | { |
220 | *constptr = tem; | |
221 | return eliminate_constant_term (XEXP (x, 0), constptr); | |
222 | } | |
223 | ||
224 | tem = const0_rtx; | |
225 | x0 = eliminate_constant_term (XEXP (x, 0), &tem); | |
226 | x1 = eliminate_constant_term (XEXP (x, 1), &tem); | |
227 | if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0)) | |
228 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), | |
229 | *constptr, tem)) | |
481683e1 | 230 | && CONST_INT_P (tem)) |
18ca7dab RK |
231 | { |
232 | *constptr = tem; | |
38a448ca | 233 | return gen_rtx_PLUS (GET_MODE (x), x0, x1); |
18ca7dab RK |
234 | } |
235 | ||
236 | return x; | |
237 | } | |
238 | ||
862d0b35 DN |
239 | /* Returns a tree for the size of EXP in bytes. */ |
240 | ||
241 | static tree | |
242 | tree_expr_size (const_tree exp) | |
243 | { | |
244 | if (DECL_P (exp) | |
245 | && DECL_SIZE_UNIT (exp) != 0) | |
246 | return DECL_SIZE_UNIT (exp); | |
247 | else | |
248 | return size_in_bytes (TREE_TYPE (exp)); | |
249 | } | |
250 | ||
18ca7dab RK |
251 | /* Return an rtx for the size in bytes of the value of EXP. */ |
252 | ||
253 | rtx | |
502b8322 | 254 | expr_size (tree exp) |
18ca7dab | 255 | { |
d25cee4d RH |
256 | tree size; |
257 | ||
258 | if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
259 | size = TREE_OPERAND (exp, 1); | |
260 | else | |
26979bc2 | 261 | { |
71c00b5c | 262 | size = tree_expr_size (exp); |
26979bc2 | 263 | gcc_assert (size); |
2ec5deb5 | 264 | gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp)); |
26979bc2 | 265 | } |
99098c66 | 266 | |
49452c07 | 267 | return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL); |
18ca7dab | 268 | } |
de8920be JM |
269 | |
270 | /* Return a wide integer for the size in bytes of the value of EXP, or -1 | |
271 | if the size can vary or is larger than an integer. */ | |
272 | ||
273 | HOST_WIDE_INT | |
502b8322 | 274 | int_expr_size (tree exp) |
de8920be | 275 | { |
d25cee4d RH |
276 | tree size; |
277 | ||
278 | if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
279 | size = TREE_OPERAND (exp, 1); | |
280 | else | |
26979bc2 | 281 | { |
71c00b5c | 282 | size = tree_expr_size (exp); |
26979bc2 JH |
283 | gcc_assert (size); |
284 | } | |
d25cee4d | 285 | |
9541ffee | 286 | if (size == 0 || !tree_fits_shwi_p (size)) |
de8920be JM |
287 | return -1; |
288 | ||
9439e9a1 | 289 | return tree_to_shwi (size); |
de8920be | 290 | } |
18ca7dab RK |
291 | \f |
292 | /* Return a copy of X in which all memory references | |
293 | and all constants that involve symbol refs | |
294 | have been replaced with new temporary registers. | |
295 | Also emit code to load the memory locations and constants | |
296 | into those registers. | |
297 | ||
298 | If X contains no such constants or memory references, | |
299 | X itself (not a copy) is returned. | |
300 | ||
301 | If a constant is found in the address that is not a legitimate constant | |
302 | in an insn, it is left alone in the hope that it might be valid in the | |
303 | address. | |
304 | ||
305 | X may contain no arithmetic except addition, subtraction and multiplication. | |
306 | Values returned by expand_expr with 1 for sum_ok fit this constraint. */ | |
307 | ||
308 | static rtx | |
502b8322 | 309 | break_out_memory_refs (rtx x) |
18ca7dab | 310 | { |
3c0cb5de | 311 | if (MEM_P (x) |
cabeca29 | 312 | || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x) |
18ca7dab | 313 | && GET_MODE (x) != VOIDmode)) |
2cca6e3f | 314 | x = force_reg (GET_MODE (x), x); |
18ca7dab RK |
315 | else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS |
316 | || GET_CODE (x) == MULT) | |
317 | { | |
b3694847 SS |
318 | rtx op0 = break_out_memory_refs (XEXP (x, 0)); |
319 | rtx op1 = break_out_memory_refs (XEXP (x, 1)); | |
2cca6e3f | 320 | |
18ca7dab | 321 | if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
d4ebfa65 | 322 | x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1); |
18ca7dab | 323 | } |
2cca6e3f | 324 | |
18ca7dab RK |
325 | return x; |
326 | } | |
327 | ||
d4ebfa65 BE |
328 | /* Given X, a memory address in address space AS' pointer mode, convert it to |
329 | an address in the address space's address mode, or vice versa (TO_MODE says | |
330 | which way). We take advantage of the fact that pointers are not allowed to | |
331 | overflow by commuting arithmetic operations over conversions so that address | |
332 | arithmetic insns can be used. */ | |
ea534b63 | 333 | |
498b529f | 334 | rtx |
d4ebfa65 BE |
335 | convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED, |
336 | rtx x, addr_space_t as ATTRIBUTE_UNUSED) | |
ea534b63 | 337 | { |
5ae6cd0d | 338 | #ifndef POINTERS_EXTEND_UNSIGNED |
7c137931 | 339 | gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode); |
5ae6cd0d MM |
340 | return x; |
341 | #else /* defined(POINTERS_EXTEND_UNSIGNED) */ | |
d4ebfa65 | 342 | enum machine_mode pointer_mode, address_mode, from_mode; |
498b529f | 343 | rtx temp; |
aa0f70e6 | 344 | enum rtx_code code; |
498b529f | 345 | |
5ae6cd0d MM |
346 | /* If X already has the right mode, just return it. */ |
347 | if (GET_MODE (x) == to_mode) | |
348 | return x; | |
349 | ||
d4ebfa65 BE |
350 | pointer_mode = targetm.addr_space.pointer_mode (as); |
351 | address_mode = targetm.addr_space.address_mode (as); | |
352 | from_mode = to_mode == pointer_mode ? address_mode : pointer_mode; | |
5ae6cd0d | 353 | |
0b04ec8c RK |
354 | /* Here we handle some special cases. If none of them apply, fall through |
355 | to the default case. */ | |
ea534b63 RK |
356 | switch (GET_CODE (x)) |
357 | { | |
d8116890 | 358 | CASE_CONST_SCALAR_INT: |
aa0f70e6 SE |
359 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)) |
360 | code = TRUNCATE; | |
361 | else if (POINTERS_EXTEND_UNSIGNED < 0) | |
362 | break; | |
363 | else if (POINTERS_EXTEND_UNSIGNED > 0) | |
364 | code = ZERO_EXTEND; | |
365 | else | |
366 | code = SIGN_EXTEND; | |
367 | temp = simplify_unary_operation (code, to_mode, x, from_mode); | |
368 | if (temp) | |
369 | return temp; | |
370 | break; | |
498b529f | 371 | |
d1405722 | 372 | case SUBREG: |
5da4f548 | 373 | if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x))) |
6dd12198 | 374 | && GET_MODE (SUBREG_REG (x)) == to_mode) |
d1405722 RK |
375 | return SUBREG_REG (x); |
376 | break; | |
377 | ||
ea534b63 | 378 | case LABEL_REF: |
5da4f548 SE |
379 | temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0)); |
380 | LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); | |
381 | return temp; | |
6dd12198 | 382 | break; |
498b529f | 383 | |
ea534b63 | 384 | case SYMBOL_REF: |
ce02ba25 EC |
385 | temp = shallow_copy_rtx (x); |
386 | PUT_MODE (temp, to_mode); | |
5da4f548 | 387 | return temp; |
6dd12198 | 388 | break; |
ea534b63 | 389 | |
498b529f | 390 | case CONST: |
5da4f548 | 391 | return gen_rtx_CONST (to_mode, |
d4ebfa65 BE |
392 | convert_memory_address_addr_space |
393 | (to_mode, XEXP (x, 0), as)); | |
6dd12198 | 394 | break; |
ea534b63 | 395 | |
0b04ec8c RK |
396 | case PLUS: |
397 | case MULT: | |
54f6892e L |
398 | /* FIXME: For addition, we used to permute the conversion and |
399 | addition operation only if one operand is a constant and | |
400 | converting the constant does not change it or if one operand | |
401 | is a constant and we are using a ptr_extend instruction | |
402 | (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address | |
403 | may overflow/underflow. We relax the condition to include | |
404 | zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other | |
405 | parts of the compiler depend on it. See PR 49721. | |
406 | ||
17939c98 SE |
407 | We can always safely permute them if we are making the address |
408 | narrower. */ | |
aa0f70e6 SE |
409 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode) |
410 | || (GET_CODE (x) == PLUS | |
481683e1 | 411 | && CONST_INT_P (XEXP (x, 1)) |
54f6892e L |
412 | && (POINTERS_EXTEND_UNSIGNED != 0 |
413 | || XEXP (x, 1) == convert_memory_address_addr_space | |
414 | (to_mode, XEXP (x, 1), as)))) | |
d9b3eb63 | 415 | return gen_rtx_fmt_ee (GET_CODE (x), to_mode, |
d4ebfa65 BE |
416 | convert_memory_address_addr_space |
417 | (to_mode, XEXP (x, 0), as), | |
aa0f70e6 | 418 | XEXP (x, 1)); |
38a448ca | 419 | break; |
d9b3eb63 | 420 | |
38a448ca RH |
421 | default: |
422 | break; | |
ea534b63 | 423 | } |
0b04ec8c RK |
424 | |
425 | return convert_modes (to_mode, from_mode, | |
426 | x, POINTERS_EXTEND_UNSIGNED); | |
5ae6cd0d | 427 | #endif /* defined(POINTERS_EXTEND_UNSIGNED) */ |
ea534b63 | 428 | } |
18ca7dab | 429 | \f |
09e881c9 BE |
430 | /* Return something equivalent to X but valid as a memory address for something |
431 | of mode MODE in the named address space AS. When X is not itself valid, | |
432 | this works by copying X or subexpressions of it into registers. */ | |
18ca7dab RK |
433 | |
434 | rtx | |
09e881c9 | 435 | memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as) |
18ca7dab | 436 | { |
b3694847 | 437 | rtx oldx = x; |
d4ebfa65 | 438 | enum machine_mode address_mode = targetm.addr_space.address_mode (as); |
18ca7dab | 439 | |
d4ebfa65 | 440 | x = convert_memory_address_addr_space (address_mode, x, as); |
ea534b63 | 441 | |
ba228239 | 442 | /* By passing constant addresses through registers |
18ca7dab | 443 | we get a chance to cse them. */ |
cabeca29 | 444 | if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)) |
d4ebfa65 | 445 | x = force_reg (address_mode, x); |
18ca7dab | 446 | |
18ca7dab RK |
447 | /* We get better cse by rejecting indirect addressing at this stage. |
448 | Let the combiner create indirect addresses where appropriate. | |
449 | For now, generate the code so that the subexpressions useful to share | |
450 | are visible. But not if cse won't be done! */ | |
18b9ca6f | 451 | else |
18ca7dab | 452 | { |
f8cfc6aa | 453 | if (! cse_not_expected && !REG_P (x)) |
18b9ca6f RK |
454 | x = break_out_memory_refs (x); |
455 | ||
456 | /* At this point, any valid address is accepted. */ | |
09e881c9 | 457 | if (memory_address_addr_space_p (mode, x, as)) |
3de5e93a | 458 | goto done; |
18b9ca6f RK |
459 | |
460 | /* If it was valid before but breaking out memory refs invalidated it, | |
461 | use it the old way. */ | |
09e881c9 | 462 | if (memory_address_addr_space_p (mode, oldx, as)) |
3de5e93a SB |
463 | { |
464 | x = oldx; | |
465 | goto done; | |
466 | } | |
18b9ca6f RK |
467 | |
468 | /* Perform machine-dependent transformations on X | |
469 | in certain cases. This is not necessary since the code | |
470 | below can handle all possible cases, but machine-dependent | |
471 | transformations can make better code. */ | |
506d7b68 | 472 | { |
09e881c9 BE |
473 | rtx orig_x = x; |
474 | x = targetm.addr_space.legitimize_address (x, oldx, mode, as); | |
475 | if (orig_x != x && memory_address_addr_space_p (mode, x, as)) | |
506d7b68 PB |
476 | goto done; |
477 | } | |
18b9ca6f RK |
478 | |
479 | /* PLUS and MULT can appear in special ways | |
480 | as the result of attempts to make an address usable for indexing. | |
481 | Usually they are dealt with by calling force_operand, below. | |
482 | But a sum containing constant terms is special | |
483 | if removing them makes the sum a valid address: | |
484 | then we generate that address in a register | |
485 | and index off of it. We do this because it often makes | |
486 | shorter code, and because the addresses thus generated | |
487 | in registers often become common subexpressions. */ | |
488 | if (GET_CODE (x) == PLUS) | |
489 | { | |
490 | rtx constant_term = const0_rtx; | |
491 | rtx y = eliminate_constant_term (x, &constant_term); | |
492 | if (constant_term == const0_rtx | |
09e881c9 | 493 | || ! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
494 | x = force_operand (x, NULL_RTX); |
495 | else | |
496 | { | |
38a448ca | 497 | y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term); |
09e881c9 | 498 | if (! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
499 | x = force_operand (x, NULL_RTX); |
500 | else | |
501 | x = y; | |
502 | } | |
503 | } | |
18ca7dab | 504 | |
e475ed2a | 505 | else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS) |
18b9ca6f | 506 | x = force_operand (x, NULL_RTX); |
18ca7dab | 507 | |
18b9ca6f RK |
508 | /* If we have a register that's an invalid address, |
509 | it must be a hard reg of the wrong class. Copy it to a pseudo. */ | |
f8cfc6aa | 510 | else if (REG_P (x)) |
18b9ca6f RK |
511 | x = copy_to_reg (x); |
512 | ||
513 | /* Last resort: copy the value to a register, since | |
514 | the register is a valid address. */ | |
515 | else | |
d4ebfa65 | 516 | x = force_reg (address_mode, x); |
18ca7dab | 517 | } |
18b9ca6f RK |
518 | |
519 | done: | |
520 | ||
09e881c9 | 521 | gcc_assert (memory_address_addr_space_p (mode, x, as)); |
2cca6e3f RK |
522 | /* If we didn't change the address, we are done. Otherwise, mark |
523 | a reg as a pointer if we have REG or REG + CONST_INT. */ | |
524 | if (oldx == x) | |
525 | return x; | |
f8cfc6aa | 526 | else if (REG_P (x)) |
bdb429a5 | 527 | mark_reg_pointer (x, BITS_PER_UNIT); |
2cca6e3f | 528 | else if (GET_CODE (x) == PLUS |
f8cfc6aa | 529 | && REG_P (XEXP (x, 0)) |
481683e1 | 530 | && CONST_INT_P (XEXP (x, 1))) |
bdb429a5 | 531 | mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT); |
2cca6e3f | 532 | |
18b9ca6f RK |
533 | /* OLDX may have been the address on a temporary. Update the address |
534 | to indicate that X is now used. */ | |
535 | update_temp_slot_address (oldx, x); | |
536 | ||
18ca7dab RK |
537 | return x; |
538 | } | |
539 | ||
18ca7dab RK |
540 | /* Convert a mem ref into one with a valid memory address. |
541 | Pass through anything else unchanged. */ | |
542 | ||
543 | rtx | |
502b8322 | 544 | validize_mem (rtx ref) |
18ca7dab | 545 | { |
3c0cb5de | 546 | if (!MEM_P (ref)) |
18ca7dab | 547 | return ref; |
aacd3885 | 548 | ref = use_anchored_address (ref); |
09e881c9 BE |
549 | if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0), |
550 | MEM_ADDR_SPACE (ref))) | |
18ca7dab | 551 | return ref; |
792760b9 | 552 | |
18ca7dab | 553 | /* Don't alter REF itself, since that is probably a stack slot. */ |
792760b9 | 554 | return replace_equiv_address (ref, XEXP (ref, 0)); |
18ca7dab | 555 | } |
aacd3885 RS |
556 | |
557 | /* If X is a memory reference to a member of an object block, try rewriting | |
558 | it to use an anchor instead. Return the new memory reference on success | |
559 | and the old one on failure. */ | |
560 | ||
561 | rtx | |
562 | use_anchored_address (rtx x) | |
563 | { | |
564 | rtx base; | |
565 | HOST_WIDE_INT offset; | |
0a81f074 | 566 | enum machine_mode mode; |
aacd3885 RS |
567 | |
568 | if (!flag_section_anchors) | |
569 | return x; | |
570 | ||
571 | if (!MEM_P (x)) | |
572 | return x; | |
573 | ||
574 | /* Split the address into a base and offset. */ | |
575 | base = XEXP (x, 0); | |
576 | offset = 0; | |
577 | if (GET_CODE (base) == CONST | |
578 | && GET_CODE (XEXP (base, 0)) == PLUS | |
481683e1 | 579 | && CONST_INT_P (XEXP (XEXP (base, 0), 1))) |
aacd3885 RS |
580 | { |
581 | offset += INTVAL (XEXP (XEXP (base, 0), 1)); | |
582 | base = XEXP (XEXP (base, 0), 0); | |
583 | } | |
584 | ||
585 | /* Check whether BASE is suitable for anchors. */ | |
586 | if (GET_CODE (base) != SYMBOL_REF | |
3fa9c136 | 587 | || !SYMBOL_REF_HAS_BLOCK_INFO_P (base) |
aacd3885 | 588 | || SYMBOL_REF_ANCHOR_P (base) |
434aeebb | 589 | || SYMBOL_REF_BLOCK (base) == NULL |
aacd3885 RS |
590 | || !targetm.use_anchors_for_symbol_p (base)) |
591 | return x; | |
592 | ||
593 | /* Decide where BASE is going to be. */ | |
594 | place_block_symbol (base); | |
595 | ||
596 | /* Get the anchor we need to use. */ | |
597 | offset += SYMBOL_REF_BLOCK_OFFSET (base); | |
598 | base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset, | |
599 | SYMBOL_REF_TLS_MODEL (base)); | |
600 | ||
601 | /* Work out the offset from the anchor. */ | |
602 | offset -= SYMBOL_REF_BLOCK_OFFSET (base); | |
603 | ||
604 | /* If we're going to run a CSE pass, force the anchor into a register. | |
605 | We will then be able to reuse registers for several accesses, if the | |
606 | target costs say that that's worthwhile. */ | |
0a81f074 | 607 | mode = GET_MODE (base); |
aacd3885 | 608 | if (!cse_not_expected) |
0a81f074 | 609 | base = force_reg (mode, base); |
aacd3885 | 610 | |
0a81f074 | 611 | return replace_equiv_address (x, plus_constant (mode, base, offset)); |
aacd3885 | 612 | } |
18ca7dab | 613 | \f |
18ca7dab RK |
614 | /* Copy the value or contents of X to a new temp reg and return that reg. */ |
615 | ||
616 | rtx | |
502b8322 | 617 | copy_to_reg (rtx x) |
18ca7dab | 618 | { |
b3694847 | 619 | rtx temp = gen_reg_rtx (GET_MODE (x)); |
d9b3eb63 | 620 | |
18ca7dab | 621 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 622 | do the computation. */ |
18ca7dab RK |
623 | if (! general_operand (x, VOIDmode)) |
624 | x = force_operand (x, temp); | |
d9b3eb63 | 625 | |
18ca7dab RK |
626 | if (x != temp) |
627 | emit_move_insn (temp, x); | |
628 | ||
629 | return temp; | |
630 | } | |
631 | ||
632 | /* Like copy_to_reg but always give the new register mode Pmode | |
633 | in case X is a constant. */ | |
634 | ||
635 | rtx | |
502b8322 | 636 | copy_addr_to_reg (rtx x) |
18ca7dab RK |
637 | { |
638 | return copy_to_mode_reg (Pmode, x); | |
639 | } | |
640 | ||
641 | /* Like copy_to_reg but always give the new register mode MODE | |
642 | in case X is a constant. */ | |
643 | ||
644 | rtx | |
502b8322 | 645 | copy_to_mode_reg (enum machine_mode mode, rtx x) |
18ca7dab | 646 | { |
b3694847 | 647 | rtx temp = gen_reg_rtx (mode); |
d9b3eb63 | 648 | |
18ca7dab | 649 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 650 | do the computation. */ |
18ca7dab RK |
651 | if (! general_operand (x, VOIDmode)) |
652 | x = force_operand (x, temp); | |
653 | ||
5b0264cb | 654 | gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); |
18ca7dab RK |
655 | if (x != temp) |
656 | emit_move_insn (temp, x); | |
657 | return temp; | |
658 | } | |
659 | ||
660 | /* Load X into a register if it is not already one. | |
661 | Use mode MODE for the register. | |
662 | X should be valid for mode MODE, but it may be a constant which | |
663 | is valid for all integer modes; that's why caller must specify MODE. | |
664 | ||
665 | The caller must not alter the value in the register we return, | |
666 | since we mark it as a "constant" register. */ | |
667 | ||
668 | rtx | |
502b8322 | 669 | force_reg (enum machine_mode mode, rtx x) |
18ca7dab | 670 | { |
b3694847 | 671 | rtx temp, insn, set; |
18ca7dab | 672 | |
f8cfc6aa | 673 | if (REG_P (x)) |
18ca7dab | 674 | return x; |
d9b3eb63 | 675 | |
e3c8ea67 RH |
676 | if (general_operand (x, mode)) |
677 | { | |
678 | temp = gen_reg_rtx (mode); | |
679 | insn = emit_move_insn (temp, x); | |
680 | } | |
681 | else | |
682 | { | |
683 | temp = force_operand (x, NULL_RTX); | |
f8cfc6aa | 684 | if (REG_P (temp)) |
e3c8ea67 RH |
685 | insn = get_last_insn (); |
686 | else | |
687 | { | |
688 | rtx temp2 = gen_reg_rtx (mode); | |
689 | insn = emit_move_insn (temp2, temp); | |
690 | temp = temp2; | |
691 | } | |
692 | } | |
62874575 | 693 | |
18ca7dab | 694 | /* Let optimizers know that TEMP's value never changes |
62874575 RK |
695 | and that X can be substituted for it. Don't get confused |
696 | if INSN set something else (such as a SUBREG of TEMP). */ | |
697 | if (CONSTANT_P (x) | |
698 | && (set = single_set (insn)) != 0 | |
fd7acc30 RS |
699 | && SET_DEST (set) == temp |
700 | && ! rtx_equal_p (x, SET_SRC (set))) | |
3d238248 | 701 | set_unique_reg_note (insn, REG_EQUAL, x); |
e3c8ea67 | 702 | |
4a4f95d9 RH |
703 | /* Let optimizers know that TEMP is a pointer, and if so, the |
704 | known alignment of that pointer. */ | |
705 | { | |
706 | unsigned align = 0; | |
707 | if (GET_CODE (x) == SYMBOL_REF) | |
708 | { | |
709 | align = BITS_PER_UNIT; | |
710 | if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x))) | |
711 | align = DECL_ALIGN (SYMBOL_REF_DECL (x)); | |
712 | } | |
713 | else if (GET_CODE (x) == LABEL_REF) | |
714 | align = BITS_PER_UNIT; | |
715 | else if (GET_CODE (x) == CONST | |
716 | && GET_CODE (XEXP (x, 0)) == PLUS | |
717 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF | |
481683e1 | 718 | && CONST_INT_P (XEXP (XEXP (x, 0), 1))) |
4a4f95d9 RH |
719 | { |
720 | rtx s = XEXP (XEXP (x, 0), 0); | |
721 | rtx c = XEXP (XEXP (x, 0), 1); | |
722 | unsigned sa, ca; | |
723 | ||
724 | sa = BITS_PER_UNIT; | |
725 | if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s))) | |
726 | sa = DECL_ALIGN (SYMBOL_REF_DECL (s)); | |
727 | ||
bd95721f RH |
728 | if (INTVAL (c) == 0) |
729 | align = sa; | |
730 | else | |
731 | { | |
732 | ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT; | |
733 | align = MIN (sa, ca); | |
734 | } | |
4a4f95d9 RH |
735 | } |
736 | ||
0a317111 | 737 | if (align || (MEM_P (x) && MEM_POINTER (x))) |
4a4f95d9 RH |
738 | mark_reg_pointer (temp, align); |
739 | } | |
740 | ||
18ca7dab RK |
741 | return temp; |
742 | } | |
743 | ||
744 | /* If X is a memory ref, copy its contents to a new temp reg and return | |
745 | that reg. Otherwise, return X. */ | |
746 | ||
747 | rtx | |
502b8322 | 748 | force_not_mem (rtx x) |
18ca7dab | 749 | { |
b3694847 | 750 | rtx temp; |
fe3439b0 | 751 | |
3c0cb5de | 752 | if (!MEM_P (x) || GET_MODE (x) == BLKmode) |
18ca7dab | 753 | return x; |
fe3439b0 | 754 | |
18ca7dab | 755 | temp = gen_reg_rtx (GET_MODE (x)); |
f8ad8d7c ZD |
756 | |
757 | if (MEM_POINTER (x)) | |
758 | REG_POINTER (temp) = 1; | |
759 | ||
18ca7dab RK |
760 | emit_move_insn (temp, x); |
761 | return temp; | |
762 | } | |
763 | ||
764 | /* Copy X to TARGET (if it's nonzero and a reg) | |
765 | or to a new temp reg and return that reg. | |
766 | MODE is the mode to use for X in case it is a constant. */ | |
767 | ||
768 | rtx | |
502b8322 | 769 | copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode) |
18ca7dab | 770 | { |
b3694847 | 771 | rtx temp; |
18ca7dab | 772 | |
f8cfc6aa | 773 | if (target && REG_P (target)) |
18ca7dab RK |
774 | temp = target; |
775 | else | |
776 | temp = gen_reg_rtx (mode); | |
777 | ||
778 | emit_move_insn (temp, x); | |
779 | return temp; | |
780 | } | |
781 | \f | |
cde0f3fd | 782 | /* Return the mode to use to pass or return a scalar of TYPE and MODE. |
9ff65789 RK |
783 | PUNSIGNEDP points to the signedness of the type and may be adjusted |
784 | to show what signedness to use on extension operations. | |
785 | ||
cde0f3fd PB |
786 | FOR_RETURN is nonzero if the caller is promoting the return value |
787 | of FNDECL, else it is for promoting args. */ | |
9ff65789 | 788 | |
cde0f3fd PB |
789 | enum machine_mode |
790 | promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp, | |
791 | const_tree funtype, int for_return) | |
792 | { | |
5e617be8 AK |
793 | /* Called without a type node for a libcall. */ |
794 | if (type == NULL_TREE) | |
795 | { | |
796 | if (INTEGRAL_MODE_P (mode)) | |
797 | return targetm.calls.promote_function_mode (NULL_TREE, mode, | |
798 | punsignedp, funtype, | |
799 | for_return); | |
800 | else | |
801 | return mode; | |
802 | } | |
803 | ||
cde0f3fd PB |
804 | switch (TREE_CODE (type)) |
805 | { | |
806 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: | |
807 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: | |
808 | case POINTER_TYPE: case REFERENCE_TYPE: | |
809 | return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype, | |
810 | for_return); | |
811 | ||
812 | default: | |
813 | return mode; | |
814 | } | |
815 | } | |
816 | /* Return the mode to use to store a scalar of TYPE and MODE. | |
817 | PUNSIGNEDP points to the signedness of the type and may be adjusted | |
818 | to show what signedness to use on extension operations. */ | |
d4453b7a | 819 | |
9ff65789 | 820 | enum machine_mode |
b1680483 AK |
821 | promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode, |
822 | int *punsignedp ATTRIBUTE_UNUSED) | |
9ff65789 | 823 | { |
1e3287d0 RG |
824 | #ifdef PROMOTE_MODE |
825 | enum tree_code code; | |
826 | int unsignedp; | |
827 | #endif | |
828 | ||
5e617be8 AK |
829 | /* For libcalls this is invoked without TYPE from the backends |
830 | TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that | |
831 | case. */ | |
832 | if (type == NULL_TREE) | |
833 | return mode; | |
834 | ||
cde0f3fd PB |
835 | /* FIXME: this is the same logic that was there until GCC 4.4, but we |
836 | probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE | |
837 | is not defined. The affected targets are M32C, S390, SPARC. */ | |
838 | #ifdef PROMOTE_MODE | |
1e3287d0 RG |
839 | code = TREE_CODE (type); |
840 | unsignedp = *punsignedp; | |
9ff65789 | 841 | |
9ff65789 RK |
842 | switch (code) |
843 | { | |
9ff65789 | 844 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
325217ed | 845 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: |
cde0f3fd PB |
846 | PROMOTE_MODE (mode, unsignedp, type); |
847 | *punsignedp = unsignedp; | |
848 | return mode; | |
9ff65789 | 849 | break; |
9ff65789 | 850 | |
ea534b63 | 851 | #ifdef POINTERS_EXTEND_UNSIGNED |
56a4c9e2 | 852 | case REFERENCE_TYPE: |
9ff65789 | 853 | case POINTER_TYPE: |
cde0f3fd | 854 | *punsignedp = POINTERS_EXTEND_UNSIGNED; |
d4ebfa65 BE |
855 | return targetm.addr_space.address_mode |
856 | (TYPE_ADDR_SPACE (TREE_TYPE (type))); | |
9ff65789 | 857 | break; |
ea534b63 | 858 | #endif |
d9b3eb63 | 859 | |
38a448ca | 860 | default: |
cde0f3fd | 861 | return mode; |
9ff65789 | 862 | } |
cde0f3fd | 863 | #else |
9ff65789 | 864 | return mode; |
cde0f3fd | 865 | #endif |
9ff65789 | 866 | } |
cde0f3fd PB |
867 | |
868 | ||
869 | /* Use one of promote_mode or promote_function_mode to find the promoted | |
870 | mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness | |
871 | of DECL after promotion. */ | |
872 | ||
873 | enum machine_mode | |
874 | promote_decl_mode (const_tree decl, int *punsignedp) | |
875 | { | |
876 | tree type = TREE_TYPE (decl); | |
877 | int unsignedp = TYPE_UNSIGNED (type); | |
878 | enum machine_mode mode = DECL_MODE (decl); | |
879 | enum machine_mode pmode; | |
880 | ||
666e3ceb PB |
881 | if (TREE_CODE (decl) == RESULT_DECL |
882 | || TREE_CODE (decl) == PARM_DECL) | |
cde0f3fd | 883 | pmode = promote_function_mode (type, mode, &unsignedp, |
666e3ceb | 884 | TREE_TYPE (current_function_decl), 2); |
cde0f3fd PB |
885 | else |
886 | pmode = promote_mode (type, mode, &unsignedp); | |
887 | ||
888 | if (punsignedp) | |
889 | *punsignedp = unsignedp; | |
890 | return pmode; | |
891 | } | |
892 | ||
9ff65789 | 893 | \f |
9a08d230 RH |
894 | /* Controls the behaviour of {anti_,}adjust_stack. */ |
895 | static bool suppress_reg_args_size; | |
896 | ||
897 | /* A helper for adjust_stack and anti_adjust_stack. */ | |
898 | ||
899 | static void | |
900 | adjust_stack_1 (rtx adjust, bool anti_p) | |
901 | { | |
902 | rtx temp, insn; | |
903 | ||
904 | #ifndef STACK_GROWS_DOWNWARD | |
905 | /* Hereafter anti_p means subtract_p. */ | |
906 | anti_p = !anti_p; | |
907 | #endif | |
908 | ||
909 | temp = expand_binop (Pmode, | |
910 | anti_p ? sub_optab : add_optab, | |
911 | stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
912 | OPTAB_LIB_WIDEN); | |
913 | ||
914 | if (temp != stack_pointer_rtx) | |
915 | insn = emit_move_insn (stack_pointer_rtx, temp); | |
916 | else | |
917 | { | |
918 | insn = get_last_insn (); | |
919 | temp = single_set (insn); | |
920 | gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx); | |
921 | } | |
922 | ||
923 | if (!suppress_reg_args_size) | |
924 | add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); | |
925 | } | |
926 | ||
18ca7dab RK |
927 | /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). |
928 | This pops when ADJUST is positive. ADJUST need not be constant. */ | |
929 | ||
930 | void | |
502b8322 | 931 | adjust_stack (rtx adjust) |
18ca7dab | 932 | { |
18ca7dab RK |
933 | if (adjust == const0_rtx) |
934 | return; | |
935 | ||
1503a7ec JH |
936 | /* We expect all variable sized adjustments to be multiple of |
937 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 938 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
939 | stack_pointer_delta -= INTVAL (adjust); |
940 | ||
9a08d230 | 941 | adjust_stack_1 (adjust, false); |
18ca7dab RK |
942 | } |
943 | ||
944 | /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). | |
945 | This pushes when ADJUST is positive. ADJUST need not be constant. */ | |
946 | ||
947 | void | |
502b8322 | 948 | anti_adjust_stack (rtx adjust) |
18ca7dab | 949 | { |
18ca7dab RK |
950 | if (adjust == const0_rtx) |
951 | return; | |
952 | ||
1503a7ec JH |
953 | /* We expect all variable sized adjustments to be multiple of |
954 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 955 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
956 | stack_pointer_delta += INTVAL (adjust); |
957 | ||
9a08d230 | 958 | adjust_stack_1 (adjust, true); |
18ca7dab RK |
959 | } |
960 | ||
961 | /* Round the size of a block to be pushed up to the boundary required | |
962 | by this machine. SIZE is the desired size, which need not be constant. */ | |
963 | ||
4dd9b044 | 964 | static rtx |
502b8322 | 965 | round_push (rtx size) |
18ca7dab | 966 | { |
32990d5b | 967 | rtx align_rtx, alignm1_rtx; |
41ee3204 | 968 | |
32990d5b JJ |
969 | if (!SUPPORTS_STACK_ALIGNMENT |
970 | || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT) | |
18ca7dab | 971 | { |
32990d5b JJ |
972 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
973 | ||
974 | if (align == 1) | |
975 | return size; | |
976 | ||
977 | if (CONST_INT_P (size)) | |
978 | { | |
979 | HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align; | |
41ee3204 | 980 | |
32990d5b JJ |
981 | if (INTVAL (size) != new_size) |
982 | size = GEN_INT (new_size); | |
983 | return size; | |
984 | } | |
985 | ||
986 | align_rtx = GEN_INT (align); | |
987 | alignm1_rtx = GEN_INT (align - 1); | |
18ca7dab RK |
988 | } |
989 | else | |
990 | { | |
32990d5b JJ |
991 | /* If crtl->preferred_stack_boundary might still grow, use |
992 | virtual_preferred_stack_boundary_rtx instead. This will be | |
993 | substituted by the right value in vregs pass and optimized | |
994 | during combine. */ | |
995 | align_rtx = virtual_preferred_stack_boundary_rtx; | |
0a81f074 RS |
996 | alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1), |
997 | NULL_RTX); | |
18ca7dab | 998 | } |
41ee3204 | 999 | |
32990d5b JJ |
1000 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
1001 | but we know it can't. So add ourselves and then do | |
1002 | TRUNC_DIV_EXPR. */ | |
1003 | size = expand_binop (Pmode, add_optab, size, alignm1_rtx, | |
1004 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1005 | size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx, | |
1006 | NULL_RTX, 1); | |
1007 | size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1); | |
1008 | ||
18ca7dab RK |
1009 | return size; |
1010 | } | |
1011 | \f | |
59257ff7 RK |
1012 | /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer |
1013 | to a previously-created save area. If no save area has been allocated, | |
1014 | this function will allocate one. If a save area is specified, it | |
9eac0f2a | 1015 | must be of the proper mode. */ |
59257ff7 RK |
1016 | |
1017 | void | |
9eac0f2a | 1018 | emit_stack_save (enum save_level save_level, rtx *psave) |
59257ff7 RK |
1019 | { |
1020 | rtx sa = *psave; | |
1021 | /* The default is that we use a move insn and save in a Pmode object. */ | |
502b8322 | 1022 | rtx (*fcn) (rtx, rtx) = gen_move_insn; |
a260abc9 | 1023 | enum machine_mode mode = STACK_SAVEAREA_MODE (save_level); |
59257ff7 RK |
1024 | |
1025 | /* See if this machine has anything special to do for this kind of save. */ | |
1026 | switch (save_level) | |
1027 | { | |
1028 | #ifdef HAVE_save_stack_block | |
1029 | case SAVE_BLOCK: | |
1030 | if (HAVE_save_stack_block) | |
a260abc9 | 1031 | fcn = gen_save_stack_block; |
59257ff7 RK |
1032 | break; |
1033 | #endif | |
1034 | #ifdef HAVE_save_stack_function | |
1035 | case SAVE_FUNCTION: | |
1036 | if (HAVE_save_stack_function) | |
a260abc9 | 1037 | fcn = gen_save_stack_function; |
59257ff7 RK |
1038 | break; |
1039 | #endif | |
1040 | #ifdef HAVE_save_stack_nonlocal | |
1041 | case SAVE_NONLOCAL: | |
1042 | if (HAVE_save_stack_nonlocal) | |
a260abc9 | 1043 | fcn = gen_save_stack_nonlocal; |
59257ff7 RK |
1044 | break; |
1045 | #endif | |
38a448ca RH |
1046 | default: |
1047 | break; | |
59257ff7 RK |
1048 | } |
1049 | ||
1050 | /* If there is no save area and we have to allocate one, do so. Otherwise | |
1051 | verify the save area is the proper mode. */ | |
1052 | ||
1053 | if (sa == 0) | |
1054 | { | |
1055 | if (mode != VOIDmode) | |
1056 | { | |
1057 | if (save_level == SAVE_NONLOCAL) | |
1058 | *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0); | |
1059 | else | |
1060 | *psave = sa = gen_reg_rtx (mode); | |
1061 | } | |
1062 | } | |
59257ff7 | 1063 | |
9eac0f2a RH |
1064 | do_pending_stack_adjust (); |
1065 | if (sa != 0) | |
1066 | sa = validize_mem (sa); | |
1067 | emit_insn (fcn (sa, stack_pointer_rtx)); | |
59257ff7 RK |
1068 | } |
1069 | ||
1070 | /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save | |
9eac0f2a | 1071 | area made by emit_stack_save. If it is zero, we have nothing to do. */ |
59257ff7 RK |
1072 | |
1073 | void | |
9eac0f2a | 1074 | emit_stack_restore (enum save_level save_level, rtx sa) |
59257ff7 RK |
1075 | { |
1076 | /* The default is that we use a move insn. */ | |
502b8322 | 1077 | rtx (*fcn) (rtx, rtx) = gen_move_insn; |
59257ff7 | 1078 | |
50025f91 TV |
1079 | /* If stack_realign_drap, the x86 backend emits a prologue that aligns both |
1080 | STACK_POINTER and HARD_FRAME_POINTER. | |
1081 | If stack_realign_fp, the x86 backend emits a prologue that aligns only | |
1082 | STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing | |
1083 | aligned variables, which is reflected in ix86_can_eliminate. | |
1084 | We normally still have the realigned STACK_POINTER that we can use. | |
1085 | But if there is a stack restore still present at reload, it can trigger | |
1086 | mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate | |
1087 | FRAME_POINTER into a hard reg. | |
1088 | To prevent this situation, we force need_drap if we emit a stack | |
1089 | restore. */ | |
1090 | if (SUPPORTS_STACK_ALIGNMENT) | |
1091 | crtl->need_drap = true; | |
1092 | ||
59257ff7 RK |
1093 | /* See if this machine has anything special to do for this kind of save. */ |
1094 | switch (save_level) | |
1095 | { | |
1096 | #ifdef HAVE_restore_stack_block | |
1097 | case SAVE_BLOCK: | |
1098 | if (HAVE_restore_stack_block) | |
1099 | fcn = gen_restore_stack_block; | |
1100 | break; | |
1101 | #endif | |
1102 | #ifdef HAVE_restore_stack_function | |
1103 | case SAVE_FUNCTION: | |
1104 | if (HAVE_restore_stack_function) | |
1105 | fcn = gen_restore_stack_function; | |
1106 | break; | |
1107 | #endif | |
1108 | #ifdef HAVE_restore_stack_nonlocal | |
59257ff7 RK |
1109 | case SAVE_NONLOCAL: |
1110 | if (HAVE_restore_stack_nonlocal) | |
1111 | fcn = gen_restore_stack_nonlocal; | |
1112 | break; | |
1113 | #endif | |
38a448ca RH |
1114 | default: |
1115 | break; | |
59257ff7 RK |
1116 | } |
1117 | ||
d072107f | 1118 | if (sa != 0) |
260f91c2 DJ |
1119 | { |
1120 | sa = validize_mem (sa); | |
1121 | /* These clobbers prevent the scheduler from moving | |
1122 | references to variable arrays below the code | |
4b7e68e7 | 1123 | that deletes (pops) the arrays. */ |
c41c1387 RS |
1124 | emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); |
1125 | emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx)); | |
260f91c2 | 1126 | } |
d072107f | 1127 | |
a494ed43 EB |
1128 | discard_pending_stack_adjust (); |
1129 | ||
9eac0f2a | 1130 | emit_insn (fcn (stack_pointer_rtx, sa)); |
59257ff7 | 1131 | } |
6de9cd9a DN |
1132 | |
1133 | /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current | |
1134 | function. This function should be called whenever we allocate or | |
1135 | deallocate dynamic stack space. */ | |
1136 | ||
1137 | void | |
1138 | update_nonlocal_goto_save_area (void) | |
1139 | { | |
1140 | tree t_save; | |
1141 | rtx r_save; | |
1142 | ||
1143 | /* The nonlocal_goto_save_area object is an array of N pointers. The | |
1144 | first one is used for the frame pointer save; the rest are sized by | |
1145 | STACK_SAVEAREA_MODE. Create a reference to array index 1, the first | |
1146 | of the stack save area slots. */ | |
6bbec3e1 L |
1147 | t_save = build4 (ARRAY_REF, |
1148 | TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)), | |
1149 | cfun->nonlocal_goto_save_area, | |
3244e67d | 1150 | integer_one_node, NULL_TREE, NULL_TREE); |
6de9cd9a DN |
1151 | r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); |
1152 | ||
9eac0f2a | 1153 | emit_stack_save (SAVE_NONLOCAL, &r_save); |
6de9cd9a | 1154 | } |
59257ff7 | 1155 | \f |
18ca7dab | 1156 | /* Return an rtx representing the address of an area of memory dynamically |
3a42502d | 1157 | pushed on the stack. |
18ca7dab RK |
1158 | |
1159 | Any required stack pointer alignment is preserved. | |
1160 | ||
1161 | SIZE is an rtx representing the size of the area. | |
091ad0b9 | 1162 | |
3a42502d RH |
1163 | SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This |
1164 | parameter may be zero. If so, a proper value will be extracted | |
1165 | from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed. | |
1166 | ||
1167 | REQUIRED_ALIGN is the alignment (in bits) required for the region | |
1168 | of memory. | |
d3c12306 EB |
1169 | |
1170 | If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the | |
1171 | stack space allocated by the generated code cannot be added with itself | |
1172 | in the course of the execution of the function. It is always safe to | |
1173 | pass FALSE here and the following criterion is sufficient in order to | |
1174 | pass TRUE: every path in the CFG that starts at the allocation point and | |
1175 | loops to it executes the associated deallocation code. */ | |
18ca7dab RK |
1176 | |
1177 | rtx | |
3a42502d RH |
1178 | allocate_dynamic_stack_space (rtx size, unsigned size_align, |
1179 | unsigned required_align, bool cannot_accumulate) | |
18ca7dab | 1180 | { |
d3c12306 | 1181 | HOST_WIDE_INT stack_usage_size = -1; |
3a42502d | 1182 | rtx final_label, final_target, target; |
34831f3e | 1183 | unsigned extra_align = 0; |
3a42502d | 1184 | bool must_align; |
d3c12306 | 1185 | |
15fc0026 | 1186 | /* If we're asking for zero bytes, it doesn't matter what we point |
9faa82d8 | 1187 | to since we can't dereference it. But return a reasonable |
15fc0026 RK |
1188 | address anyway. */ |
1189 | if (size == const0_rtx) | |
1190 | return virtual_stack_dynamic_rtx; | |
1191 | ||
1192 | /* Otherwise, show we're calling alloca or equivalent. */ | |
e3b5732b | 1193 | cfun->calls_alloca = 1; |
15fc0026 | 1194 | |
d3c12306 EB |
1195 | /* If stack usage info is requested, look into the size we are passed. |
1196 | We need to do so this early to avoid the obfuscation that may be | |
1197 | introduced later by the various alignment operations. */ | |
a11e0df4 | 1198 | if (flag_stack_usage_info) |
d3c12306 | 1199 | { |
32990d5b | 1200 | if (CONST_INT_P (size)) |
d3c12306 | 1201 | stack_usage_size = INTVAL (size); |
32990d5b | 1202 | else if (REG_P (size)) |
d3c12306 EB |
1203 | { |
1204 | /* Look into the last emitted insn and see if we can deduce | |
1205 | something for the register. */ | |
1206 | rtx insn, set, note; | |
1207 | insn = get_last_insn (); | |
1208 | if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size)) | |
1209 | { | |
32990d5b | 1210 | if (CONST_INT_P (SET_SRC (set))) |
d3c12306 EB |
1211 | stack_usage_size = INTVAL (SET_SRC (set)); |
1212 | else if ((note = find_reg_equal_equiv_note (insn)) | |
32990d5b | 1213 | && CONST_INT_P (XEXP (note, 0))) |
d3c12306 EB |
1214 | stack_usage_size = INTVAL (XEXP (note, 0)); |
1215 | } | |
1216 | } | |
1217 | ||
1218 | /* If the size is not constant, we can't say anything. */ | |
1219 | if (stack_usage_size == -1) | |
1220 | { | |
1221 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1222 | stack_usage_size = 0; | |
1223 | } | |
1224 | } | |
1225 | ||
18ca7dab RK |
1226 | /* Ensure the size is in the proper mode. */ |
1227 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1228 | size = convert_to_mode (Pmode, size, 1); | |
1229 | ||
3a42502d RH |
1230 | /* Adjust SIZE_ALIGN, if needed. */ |
1231 | if (CONST_INT_P (size)) | |
1232 | { | |
1233 | unsigned HOST_WIDE_INT lsb; | |
1234 | ||
1235 | lsb = INTVAL (size); | |
1236 | lsb &= -lsb; | |
1237 | ||
1238 | /* Watch out for overflow truncating to "unsigned". */ | |
1239 | if (lsb > UINT_MAX / BITS_PER_UNIT) | |
1240 | size_align = 1u << (HOST_BITS_PER_INT - 1); | |
1241 | else | |
1242 | size_align = (unsigned)lsb * BITS_PER_UNIT; | |
1243 | } | |
1244 | else if (size_align < BITS_PER_UNIT) | |
1245 | size_align = BITS_PER_UNIT; | |
1246 | ||
34831f3e RH |
1247 | /* We can't attempt to minimize alignment necessary, because we don't |
1248 | know the final value of preferred_stack_boundary yet while executing | |
1249 | this code. */ | |
1250 | if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) | |
1251 | crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
1252 | ||
18ca7dab | 1253 | /* We will need to ensure that the address we return is aligned to |
34831f3e RH |
1254 | REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't |
1255 | always know its final value at this point in the compilation (it | |
1256 | might depend on the size of the outgoing parameter lists, for | |
1257 | example), so we must align the value to be returned in that case. | |
1258 | (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if | |
1259 | STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined). | |
1260 | We must also do an alignment operation on the returned value if | |
1261 | the stack pointer alignment is less strict than REQUIRED_ALIGN. | |
1262 | ||
1263 | If we have to align, we must leave space in SIZE for the hole | |
1264 | that might result from the alignment operation. */ | |
1265 | ||
1266 | must_align = (crtl->preferred_stack_boundary < required_align); | |
1267 | if (must_align) | |
d3c12306 | 1268 | { |
34831f3e RH |
1269 | if (required_align > PREFERRED_STACK_BOUNDARY) |
1270 | extra_align = PREFERRED_STACK_BOUNDARY; | |
1271 | else if (required_align > STACK_BOUNDARY) | |
1272 | extra_align = STACK_BOUNDARY; | |
1273 | else | |
1274 | extra_align = BITS_PER_UNIT; | |
1ecad98e EB |
1275 | } |
1276 | ||
34831f3e RH |
1277 | /* ??? STACK_POINTER_OFFSET is always defined now. */ |
1278 | #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) | |
1279 | must_align = true; | |
1280 | extra_align = BITS_PER_UNIT; | |
1281 | #endif | |
1ecad98e | 1282 | |
34831f3e RH |
1283 | if (must_align) |
1284 | { | |
1285 | unsigned extra = (required_align - extra_align) / BITS_PER_UNIT; | |
3a42502d | 1286 | |
0a81f074 | 1287 | size = plus_constant (Pmode, size, extra); |
3a42502d | 1288 | size = force_operand (size, NULL_RTX); |
d3c12306 | 1289 | |
a11e0df4 | 1290 | if (flag_stack_usage_info) |
3a42502d | 1291 | stack_usage_size += extra; |
34831f3e | 1292 | |
3a42502d RH |
1293 | if (extra && size_align > extra_align) |
1294 | size_align = extra_align; | |
d3c12306 | 1295 | } |
1d9d04f8 | 1296 | |
18ca7dab | 1297 | /* Round the size to a multiple of the required stack alignment. |
34831f3e | 1298 | Since the stack if presumed to be rounded before this allocation, |
18ca7dab RK |
1299 | this will maintain the required alignment. |
1300 | ||
1301 | If the stack grows downward, we could save an insn by subtracting | |
1302 | SIZE from the stack pointer and then aligning the stack pointer. | |
1303 | The problem with this is that the stack pointer may be unaligned | |
1304 | between the execution of the subtraction and alignment insns and | |
1305 | some machines do not allow this. Even on those that do, some | |
1306 | signal handlers malfunction if a signal should occur between those | |
1307 | insns. Since this is an extremely rare event, we have no reliable | |
1308 | way of knowing which systems have this problem. So we avoid even | |
1309 | momentarily mis-aligning the stack. */ | |
3a42502d | 1310 | if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0) |
d3c12306 EB |
1311 | { |
1312 | size = round_push (size); | |
18ca7dab | 1313 | |
a11e0df4 | 1314 | if (flag_stack_usage_info) |
d3c12306 | 1315 | { |
32990d5b | 1316 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
d3c12306 EB |
1317 | stack_usage_size = (stack_usage_size + align - 1) / align * align; |
1318 | } | |
1319 | } | |
1320 | ||
3a42502d | 1321 | target = gen_reg_rtx (Pmode); |
7458026b | 1322 | |
d3c12306 EB |
1323 | /* The size is supposed to be fully adjusted at this point so record it |
1324 | if stack usage info is requested. */ | |
a11e0df4 | 1325 | if (flag_stack_usage_info) |
d3c12306 EB |
1326 | { |
1327 | current_function_dynamic_stack_size += stack_usage_size; | |
1328 | ||
1329 | /* ??? This is gross but the only safe stance in the absence | |
1330 | of stack usage oriented flow analysis. */ | |
1331 | if (!cannot_accumulate) | |
1332 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1333 | } | |
18ca7dab | 1334 | |
7458026b ILT |
1335 | final_label = NULL_RTX; |
1336 | final_target = NULL_RTX; | |
1337 | ||
1338 | /* If we are splitting the stack, we need to ask the backend whether | |
1339 | there is enough room on the current stack. If there isn't, or if | |
1340 | the backend doesn't know how to tell is, then we need to call a | |
1341 | function to allocate memory in some other way. This memory will | |
1342 | be released when we release the current stack segment. The | |
1343 | effect is that stack allocation becomes less efficient, but at | |
1344 | least it doesn't cause a stack overflow. */ | |
1345 | if (flag_split_stack) | |
1346 | { | |
c3928dde | 1347 | rtx available_label, ask, space, func; |
7458026b ILT |
1348 | |
1349 | available_label = NULL_RTX; | |
1350 | ||
1351 | #ifdef HAVE_split_stack_space_check | |
1352 | if (HAVE_split_stack_space_check) | |
1353 | { | |
1354 | available_label = gen_label_rtx (); | |
1355 | ||
1356 | /* This instruction will branch to AVAILABLE_LABEL if there | |
1357 | are SIZE bytes available on the stack. */ | |
1358 | emit_insn (gen_split_stack_space_check (size, available_label)); | |
1359 | } | |
1360 | #endif | |
1361 | ||
c3928dde | 1362 | /* The __morestack_allocate_stack_space function will allocate |
c070a3b9 ILT |
1363 | memory using malloc. If the alignment of the memory returned |
1364 | by malloc does not meet REQUIRED_ALIGN, we increase SIZE to | |
1365 | make sure we allocate enough space. */ | |
1366 | if (MALLOC_ABI_ALIGNMENT >= required_align) | |
1367 | ask = size; | |
1368 | else | |
1369 | { | |
1370 | ask = expand_binop (Pmode, add_optab, size, | |
2f1cd2eb RS |
1371 | gen_int_mode (required_align / BITS_PER_UNIT - 1, |
1372 | Pmode), | |
c070a3b9 ILT |
1373 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1374 | must_align = true; | |
1375 | } | |
c3928dde | 1376 | |
7458026b ILT |
1377 | func = init_one_libfunc ("__morestack_allocate_stack_space"); |
1378 | ||
1379 | space = emit_library_call_value (func, target, LCT_NORMAL, Pmode, | |
c3928dde | 1380 | 1, ask, Pmode); |
7458026b ILT |
1381 | |
1382 | if (available_label == NULL_RTX) | |
1383 | return space; | |
1384 | ||
1385 | final_target = gen_reg_rtx (Pmode); | |
7458026b ILT |
1386 | |
1387 | emit_move_insn (final_target, space); | |
1388 | ||
1389 | final_label = gen_label_rtx (); | |
1390 | emit_jump (final_label); | |
1391 | ||
1392 | emit_label (available_label); | |
1393 | } | |
1394 | ||
18ca7dab RK |
1395 | do_pending_stack_adjust (); |
1396 | ||
1503a7ec | 1397 | /* We ought to be called always on the toplevel and stack ought to be aligned |
a1f300c0 | 1398 | properly. */ |
5b0264cb NS |
1399 | gcc_assert (!(stack_pointer_delta |
1400 | % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); | |
1503a7ec | 1401 | |
d809253a EB |
1402 | /* If needed, check that we have the required amount of stack. Take into |
1403 | account what has already been checked. */ | |
1404 | if (STACK_CHECK_MOVING_SP) | |
1405 | ; | |
1406 | else if (flag_stack_check == GENERIC_STACK_CHECK) | |
b38f3813 EB |
1407 | probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE, |
1408 | size); | |
1409 | else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK) | |
1410 | probe_stack_range (STACK_CHECK_PROTECT, size); | |
edff2491 | 1411 | |
efec771a RH |
1412 | /* Don't let anti_adjust_stack emit notes. */ |
1413 | suppress_reg_args_size = true; | |
1414 | ||
18ca7dab RK |
1415 | /* Perform the required allocation from the stack. Some systems do |
1416 | this differently than simply incrementing/decrementing from the | |
38a448ca | 1417 | stack pointer, such as acquiring the space by calling malloc(). */ |
18ca7dab RK |
1418 | #ifdef HAVE_allocate_stack |
1419 | if (HAVE_allocate_stack) | |
1420 | { | |
a5c7d693 | 1421 | struct expand_operand ops[2]; |
4b6c1672 RK |
1422 | /* We don't have to check against the predicate for operand 0 since |
1423 | TARGET is known to be a pseudo of the proper mode, which must | |
a5c7d693 RS |
1424 | be valid for the operand. */ |
1425 | create_fixed_operand (&ops[0], target); | |
1426 | create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true); | |
1427 | expand_insn (CODE_FOR_allocate_stack, 2, ops); | |
18ca7dab RK |
1428 | } |
1429 | else | |
1430 | #endif | |
ea534b63 | 1431 | { |
32990d5b JJ |
1432 | int saved_stack_pointer_delta; |
1433 | ||
38a448ca RH |
1434 | #ifndef STACK_GROWS_DOWNWARD |
1435 | emit_move_insn (target, virtual_stack_dynamic_rtx); | |
1436 | #endif | |
a157febd GK |
1437 | |
1438 | /* Check stack bounds if necessary. */ | |
e3b5732b | 1439 | if (crtl->limit_stack) |
a157febd GK |
1440 | { |
1441 | rtx available; | |
1442 | rtx space_available = gen_label_rtx (); | |
1443 | #ifdef STACK_GROWS_DOWNWARD | |
d9b3eb63 | 1444 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1445 | stack_pointer_rtx, stack_limit_rtx, |
1446 | NULL_RTX, 1, OPTAB_WIDEN); | |
1447 | #else | |
d9b3eb63 | 1448 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1449 | stack_limit_rtx, stack_pointer_rtx, |
1450 | NULL_RTX, 1, OPTAB_WIDEN); | |
1451 | #endif | |
1452 | emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1, | |
a06ef755 | 1453 | space_available); |
a157febd GK |
1454 | #ifdef HAVE_trap |
1455 | if (HAVE_trap) | |
1456 | emit_insn (gen_trap ()); | |
1457 | else | |
1458 | #endif | |
1459 | error ("stack limits not supported on this target"); | |
1460 | emit_barrier (); | |
1461 | emit_label (space_available); | |
1462 | } | |
1463 | ||
32990d5b | 1464 | saved_stack_pointer_delta = stack_pointer_delta; |
9a08d230 | 1465 | |
d809253a | 1466 | if (flag_stack_check && STACK_CHECK_MOVING_SP) |
c35af30f | 1467 | anti_adjust_stack_and_probe (size, false); |
d809253a EB |
1468 | else |
1469 | anti_adjust_stack (size); | |
9a08d230 | 1470 | |
32990d5b JJ |
1471 | /* Even if size is constant, don't modify stack_pointer_delta. |
1472 | The constant size alloca should preserve | |
1473 | crtl->preferred_stack_boundary alignment. */ | |
1474 | stack_pointer_delta = saved_stack_pointer_delta; | |
d5457140 | 1475 | |
18ca7dab | 1476 | #ifdef STACK_GROWS_DOWNWARD |
ca56cd30 | 1477 | emit_move_insn (target, virtual_stack_dynamic_rtx); |
18ca7dab | 1478 | #endif |
38a448ca | 1479 | } |
18ca7dab | 1480 | |
efec771a RH |
1481 | suppress_reg_args_size = false; |
1482 | ||
3a42502d RH |
1483 | /* Finish up the split stack handling. */ |
1484 | if (final_label != NULL_RTX) | |
1485 | { | |
1486 | gcc_assert (flag_split_stack); | |
1487 | emit_move_insn (final_target, target); | |
1488 | emit_label (final_label); | |
1489 | target = final_target; | |
1490 | } | |
1491 | ||
1492 | if (must_align) | |
091ad0b9 | 1493 | { |
5244db05 | 1494 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
0f41302f MS |
1495 | but we know it can't. So add ourselves and then do |
1496 | TRUNC_DIV_EXPR. */ | |
0f56a403 | 1497 | target = expand_binop (Pmode, add_optab, target, |
2f1cd2eb RS |
1498 | gen_int_mode (required_align / BITS_PER_UNIT - 1, |
1499 | Pmode), | |
5244db05 RK |
1500 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1501 | target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target, | |
2f1cd2eb RS |
1502 | gen_int_mode (required_align / BITS_PER_UNIT, |
1503 | Pmode), | |
b1ec3c92 | 1504 | NULL_RTX, 1); |
091ad0b9 | 1505 | target = expand_mult (Pmode, target, |
2f1cd2eb RS |
1506 | gen_int_mode (required_align / BITS_PER_UNIT, |
1507 | Pmode), | |
b1ec3c92 | 1508 | NULL_RTX, 1); |
091ad0b9 | 1509 | } |
d9b3eb63 | 1510 | |
3a42502d RH |
1511 | /* Now that we've committed to a return value, mark its alignment. */ |
1512 | mark_reg_pointer (target, required_align); | |
1513 | ||
15fc0026 | 1514 | /* Record the new stack level for nonlocal gotos. */ |
6de9cd9a DN |
1515 | if (cfun->nonlocal_goto_save_area != 0) |
1516 | update_nonlocal_goto_save_area (); | |
15fc0026 | 1517 | |
18ca7dab RK |
1518 | return target; |
1519 | } | |
1520 | \f | |
d9b3eb63 | 1521 | /* A front end may want to override GCC's stack checking by providing a |
14a774a9 RK |
1522 | run-time routine to call to check the stack, so provide a mechanism for |
1523 | calling that routine. */ | |
1524 | ||
e2500fed | 1525 | static GTY(()) rtx stack_check_libfunc; |
14a774a9 RK |
1526 | |
1527 | void | |
d477d1fe | 1528 | set_stack_check_libfunc (const char *libfunc_name) |
14a774a9 | 1529 | { |
d477d1fe SB |
1530 | gcc_assert (stack_check_libfunc == NULL_RTX); |
1531 | stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name); | |
14a774a9 RK |
1532 | } |
1533 | \f | |
edff2491 RK |
1534 | /* Emit one stack probe at ADDRESS, an address within the stack. */ |
1535 | ||
260c8ba3 | 1536 | void |
502b8322 | 1537 | emit_stack_probe (rtx address) |
edff2491 | 1538 | { |
7b84aac0 EB |
1539 | #ifdef HAVE_probe_stack_address |
1540 | if (HAVE_probe_stack_address) | |
1541 | emit_insn (gen_probe_stack_address (address)); | |
1542 | else | |
1543 | #endif | |
1544 | { | |
1545 | rtx memref = gen_rtx_MEM (word_mode, address); | |
edff2491 | 1546 | |
7b84aac0 | 1547 | MEM_VOLATILE_P (memref) = 1; |
edff2491 | 1548 | |
7b84aac0 | 1549 | /* See if we have an insn to probe the stack. */ |
d809253a | 1550 | #ifdef HAVE_probe_stack |
7b84aac0 EB |
1551 | if (HAVE_probe_stack) |
1552 | emit_insn (gen_probe_stack (memref)); | |
1553 | else | |
d809253a | 1554 | #endif |
7b84aac0 EB |
1555 | emit_move_insn (memref, const0_rtx); |
1556 | } | |
edff2491 RK |
1557 | } |
1558 | ||
d9b3eb63 | 1559 | /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. |
d809253a EB |
1560 | FIRST is a constant and size is a Pmode RTX. These are offsets from |
1561 | the current stack pointer. STACK_GROWS_DOWNWARD says whether to add | |
1562 | or subtract them from the stack pointer. */ | |
1563 | ||
1564 | #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP) | |
edff2491 RK |
1565 | |
1566 | #ifdef STACK_GROWS_DOWNWARD | |
1567 | #define STACK_GROW_OP MINUS | |
d809253a EB |
1568 | #define STACK_GROW_OPTAB sub_optab |
1569 | #define STACK_GROW_OFF(off) -(off) | |
edff2491 RK |
1570 | #else |
1571 | #define STACK_GROW_OP PLUS | |
d809253a EB |
1572 | #define STACK_GROW_OPTAB add_optab |
1573 | #define STACK_GROW_OFF(off) (off) | |
edff2491 RK |
1574 | #endif |
1575 | ||
1576 | void | |
502b8322 | 1577 | probe_stack_range (HOST_WIDE_INT first, rtx size) |
edff2491 | 1578 | { |
4b6c1672 RK |
1579 | /* First ensure SIZE is Pmode. */ |
1580 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1581 | size = convert_to_mode (Pmode, size, 1); | |
1582 | ||
d809253a EB |
1583 | /* Next see if we have a function to check the stack. */ |
1584 | if (stack_check_libfunc) | |
f5f5363f | 1585 | { |
d809253a | 1586 | rtx addr = memory_address (Pmode, |
2b3aadfc RH |
1587 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1588 | stack_pointer_rtx, | |
0a81f074 RS |
1589 | plus_constant (Pmode, |
1590 | size, first))); | |
949fa04c EB |
1591 | emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr, |
1592 | Pmode); | |
f5f5363f | 1593 | } |
14a774a9 | 1594 | |
d809253a | 1595 | /* Next see if we have an insn to check the stack. */ |
edff2491 | 1596 | #ifdef HAVE_check_stack |
d6a6a07a | 1597 | else if (HAVE_check_stack) |
edff2491 | 1598 | { |
a5c7d693 | 1599 | struct expand_operand ops[1]; |
d809253a EB |
1600 | rtx addr = memory_address (Pmode, |
1601 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1602 | stack_pointer_rtx, | |
0a81f074 RS |
1603 | plus_constant (Pmode, |
1604 | size, first))); | |
d6a6a07a | 1605 | bool success; |
a5c7d693 | 1606 | create_input_operand (&ops[0], addr, Pmode); |
d6a6a07a EB |
1607 | success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops); |
1608 | gcc_assert (success); | |
edff2491 RK |
1609 | } |
1610 | #endif | |
1611 | ||
d809253a EB |
1612 | /* Otherwise we have to generate explicit probes. If we have a constant |
1613 | small number of them to generate, that's the easy case. */ | |
1614 | else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) | |
edff2491 | 1615 | { |
d809253a EB |
1616 | HOST_WIDE_INT isize = INTVAL (size), i; |
1617 | rtx addr; | |
1618 | ||
1619 | /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until | |
1620 | it exceeds SIZE. If only one probe is needed, this will not | |
1621 | generate any code. Then probe at FIRST + SIZE. */ | |
1622 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1623 | { | |
1624 | addr = memory_address (Pmode, | |
0a81f074 | 1625 | plus_constant (Pmode, stack_pointer_rtx, |
d809253a EB |
1626 | STACK_GROW_OFF (first + i))); |
1627 | emit_stack_probe (addr); | |
1628 | } | |
1629 | ||
1630 | addr = memory_address (Pmode, | |
0a81f074 | 1631 | plus_constant (Pmode, stack_pointer_rtx, |
d809253a EB |
1632 | STACK_GROW_OFF (first + isize))); |
1633 | emit_stack_probe (addr); | |
edff2491 RK |
1634 | } |
1635 | ||
d809253a EB |
1636 | /* In the variable case, do the same as above, but in a loop. Note that we |
1637 | must be extra careful with variables wrapping around because we might be | |
1638 | at the very top (or the very bottom) of the address space and we have to | |
1639 | be able to handle this case properly; in particular, we use an equality | |
1640 | test for the loop condition. */ | |
edff2491 RK |
1641 | else |
1642 | { | |
d809253a | 1643 | rtx rounded_size, rounded_size_op, test_addr, last_addr, temp; |
edff2491 | 1644 | rtx loop_lab = gen_label_rtx (); |
edff2491 | 1645 | rtx end_lab = gen_label_rtx (); |
edff2491 | 1646 | |
edff2491 | 1647 | |
d809253a EB |
1648 | /* Step 1: round SIZE to the previous multiple of the interval. */ |
1649 | ||
1650 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1651 | rounded_size | |
69a59f0f RS |
1652 | = simplify_gen_binary (AND, Pmode, size, |
1653 | gen_int_mode (-PROBE_INTERVAL, Pmode)); | |
d809253a EB |
1654 | rounded_size_op = force_operand (rounded_size, NULL_RTX); |
1655 | ||
1656 | ||
1657 | /* Step 2: compute initial and final value of the loop counter. */ | |
1658 | ||
1659 | /* TEST_ADDR = SP + FIRST. */ | |
1660 | test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1661 | stack_pointer_rtx, | |
4789c0ce RS |
1662 | gen_int_mode (first, Pmode)), |
1663 | NULL_RTX); | |
d809253a EB |
1664 | |
1665 | /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */ | |
1666 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1667 | test_addr, | |
1668 | rounded_size_op), NULL_RTX); | |
1669 | ||
1670 | ||
1671 | /* Step 3: the loop | |
1672 | ||
1673 | while (TEST_ADDR != LAST_ADDR) | |
1674 | { | |
1675 | TEST_ADDR = TEST_ADDR + PROBE_INTERVAL | |
1676 | probe at TEST_ADDR | |
1677 | } | |
1678 | ||
1679 | probes at FIRST + N * PROBE_INTERVAL for values of N from 1 | |
1680 | until it is equal to ROUNDED_SIZE. */ | |
edff2491 RK |
1681 | |
1682 | emit_label (loop_lab); | |
edff2491 | 1683 | |
d809253a EB |
1684 | /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */ |
1685 | emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1, | |
1686 | end_lab); | |
1687 | ||
1688 | /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */ | |
1689 | temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr, | |
2f1cd2eb | 1690 | gen_int_mode (PROBE_INTERVAL, Pmode), test_addr, |
edff2491 | 1691 | 1, OPTAB_WIDEN); |
edff2491 | 1692 | |
5b0264cb | 1693 | gcc_assert (temp == test_addr); |
edff2491 | 1694 | |
d809253a EB |
1695 | /* Probe at TEST_ADDR. */ |
1696 | emit_stack_probe (test_addr); | |
1697 | ||
1698 | emit_jump (loop_lab); | |
1699 | ||
edff2491 RK |
1700 | emit_label (end_lab); |
1701 | ||
d809253a EB |
1702 | |
1703 | /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time | |
1704 | that SIZE is equal to ROUNDED_SIZE. */ | |
1705 | ||
1706 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1707 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1708 | if (temp != const0_rtx) | |
1709 | { | |
1710 | rtx addr; | |
1711 | ||
32990d5b | 1712 | if (CONST_INT_P (temp)) |
d809253a EB |
1713 | { |
1714 | /* Use [base + disp} addressing mode if supported. */ | |
1715 | HOST_WIDE_INT offset = INTVAL (temp); | |
1716 | addr = memory_address (Pmode, | |
0a81f074 | 1717 | plus_constant (Pmode, last_addr, |
d809253a EB |
1718 | STACK_GROW_OFF (offset))); |
1719 | } | |
1720 | else | |
1721 | { | |
1722 | /* Manual CSE if the difference is not known at compile-time. */ | |
1723 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1724 | addr = memory_address (Pmode, | |
1725 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1726 | last_addr, temp)); | |
1727 | } | |
1728 | ||
1729 | emit_stack_probe (addr); | |
1730 | } | |
edff2491 | 1731 | } |
eabcc725 EB |
1732 | |
1733 | /* Make sure nothing is scheduled before we are done. */ | |
1734 | emit_insn (gen_blockage ()); | |
edff2491 | 1735 | } |
d809253a | 1736 | |
c35af30f EB |
1737 | /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) |
1738 | while probing it. This pushes when SIZE is positive. SIZE need not | |
1739 | be constant. If ADJUST_BACK is true, adjust back the stack pointer | |
1740 | by plus SIZE at the end. */ | |
d809253a | 1741 | |
c35af30f EB |
1742 | void |
1743 | anti_adjust_stack_and_probe (rtx size, bool adjust_back) | |
d809253a | 1744 | { |
c35af30f EB |
1745 | /* We skip the probe for the first interval + a small dope of 4 words and |
1746 | probe that many bytes past the specified size to maintain a protection | |
1747 | area at the botton of the stack. */ | |
d809253a EB |
1748 | const int dope = 4 * UNITS_PER_WORD; |
1749 | ||
1750 | /* First ensure SIZE is Pmode. */ | |
1751 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1752 | size = convert_to_mode (Pmode, size, 1); | |
1753 | ||
1754 | /* If we have a constant small number of probes to generate, that's the | |
1755 | easy case. */ | |
32990d5b | 1756 | if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) |
d809253a EB |
1757 | { |
1758 | HOST_WIDE_INT isize = INTVAL (size), i; | |
1759 | bool first_probe = true; | |
1760 | ||
260c8ba3 | 1761 | /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1762 | values of N from 1 until it exceeds SIZE. If only one probe is |
1763 | needed, this will not generate any code. Then adjust and probe | |
1764 | to PROBE_INTERVAL + SIZE. */ | |
1765 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1766 | { | |
1767 | if (first_probe) | |
1768 | { | |
1769 | anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope)); | |
1770 | first_probe = false; | |
1771 | } | |
1772 | else | |
1773 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1774 | emit_stack_probe (stack_pointer_rtx); | |
1775 | } | |
1776 | ||
1777 | if (first_probe) | |
0a81f074 | 1778 | anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
d809253a | 1779 | else |
0a81f074 | 1780 | anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i)); |
d809253a EB |
1781 | emit_stack_probe (stack_pointer_rtx); |
1782 | } | |
1783 | ||
1784 | /* In the variable case, do the same as above, but in a loop. Note that we | |
1785 | must be extra careful with variables wrapping around because we might be | |
1786 | at the very top (or the very bottom) of the address space and we have to | |
1787 | be able to handle this case properly; in particular, we use an equality | |
1788 | test for the loop condition. */ | |
1789 | else | |
1790 | { | |
1791 | rtx rounded_size, rounded_size_op, last_addr, temp; | |
1792 | rtx loop_lab = gen_label_rtx (); | |
1793 | rtx end_lab = gen_label_rtx (); | |
1794 | ||
1795 | ||
1796 | /* Step 1: round SIZE to the previous multiple of the interval. */ | |
1797 | ||
1798 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1799 | rounded_size | |
69a59f0f RS |
1800 | = simplify_gen_binary (AND, Pmode, size, |
1801 | gen_int_mode (-PROBE_INTERVAL, Pmode)); | |
d809253a EB |
1802 | rounded_size_op = force_operand (rounded_size, NULL_RTX); |
1803 | ||
1804 | ||
1805 | /* Step 2: compute initial and final value of the loop counter. */ | |
1806 | ||
1807 | /* SP = SP_0 + PROBE_INTERVAL. */ | |
1808 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
1809 | ||
1810 | /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */ | |
1811 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1812 | stack_pointer_rtx, | |
1813 | rounded_size_op), NULL_RTX); | |
1814 | ||
1815 | ||
1816 | /* Step 3: the loop | |
1817 | ||
260c8ba3 EB |
1818 | while (SP != LAST_ADDR) |
1819 | { | |
1820 | SP = SP + PROBE_INTERVAL | |
1821 | probe at SP | |
1822 | } | |
d809253a | 1823 | |
260c8ba3 | 1824 | adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1825 | values of N from 1 until it is equal to ROUNDED_SIZE. */ |
1826 | ||
1827 | emit_label (loop_lab); | |
1828 | ||
1829 | /* Jump to END_LAB if SP == LAST_ADDR. */ | |
1830 | emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX, | |
1831 | Pmode, 1, end_lab); | |
1832 | ||
1833 | /* SP = SP + PROBE_INTERVAL and probe at SP. */ | |
1834 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1835 | emit_stack_probe (stack_pointer_rtx); | |
1836 | ||
1837 | emit_jump (loop_lab); | |
1838 | ||
1839 | emit_label (end_lab); | |
1840 | ||
1841 | ||
260c8ba3 | 1842 | /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot |
d809253a EB |
1843 | assert at compile-time that SIZE is equal to ROUNDED_SIZE. */ |
1844 | ||
1845 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1846 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1847 | if (temp != const0_rtx) | |
1848 | { | |
1849 | /* Manual CSE if the difference is not known at compile-time. */ | |
1850 | if (GET_CODE (temp) != CONST_INT) | |
1851 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1852 | anti_adjust_stack (temp); | |
1853 | emit_stack_probe (stack_pointer_rtx); | |
1854 | } | |
1855 | } | |
1856 | ||
c35af30f EB |
1857 | /* Adjust back and account for the additional first interval. */ |
1858 | if (adjust_back) | |
0a81f074 | 1859 | adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
c35af30f EB |
1860 | else |
1861 | adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
d809253a EB |
1862 | } |
1863 | ||
18ca7dab RK |
1864 | /* Return an rtx representing the register or memory location |
1865 | in which a scalar value of data type VALTYPE | |
1866 | was returned by a function call to function FUNC. | |
1d636cc6 RG |
1867 | FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise |
1868 | function is known, otherwise 0. | |
4dc07bd7 JJ |
1869 | OUTGOING is 1 if on a machine with register windows this function |
1870 | should return the register in which the function will put its result | |
30f7a378 | 1871 | and 0 otherwise. */ |
18ca7dab RK |
1872 | |
1873 | rtx | |
586de218 | 1874 | hard_function_value (const_tree valtype, const_tree func, const_tree fntype, |
502b8322 | 1875 | int outgoing ATTRIBUTE_UNUSED) |
18ca7dab | 1876 | { |
4dc07bd7 | 1877 | rtx val; |
770ae6cc | 1878 | |
1d636cc6 | 1879 | val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing); |
770ae6cc | 1880 | |
f8cfc6aa | 1881 | if (REG_P (val) |
e1a4071f JL |
1882 | && GET_MODE (val) == BLKmode) |
1883 | { | |
770ae6cc | 1884 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype); |
e1a4071f | 1885 | enum machine_mode tmpmode; |
770ae6cc | 1886 | |
d9b3eb63 | 1887 | /* int_size_in_bytes can return -1. We don't need a check here |
535a42b1 NS |
1888 | since the value of bytes will then be large enough that no |
1889 | mode will match anyway. */ | |
d9b3eb63 | 1890 | |
e1a4071f | 1891 | for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
0fb7aeda KH |
1892 | tmpmode != VOIDmode; |
1893 | tmpmode = GET_MODE_WIDER_MODE (tmpmode)) | |
1894 | { | |
1895 | /* Have we found a large enough mode? */ | |
1896 | if (GET_MODE_SIZE (tmpmode) >= bytes) | |
1897 | break; | |
1898 | } | |
e1a4071f JL |
1899 | |
1900 | /* No suitable mode found. */ | |
5b0264cb | 1901 | gcc_assert (tmpmode != VOIDmode); |
e1a4071f JL |
1902 | |
1903 | PUT_MODE (val, tmpmode); | |
d9b3eb63 | 1904 | } |
e1a4071f | 1905 | return val; |
18ca7dab RK |
1906 | } |
1907 | ||
1908 | /* Return an rtx representing the register or memory location | |
1909 | in which a scalar value of mode MODE was returned by a library call. */ | |
1910 | ||
1911 | rtx | |
390b17c2 | 1912 | hard_libcall_value (enum machine_mode mode, rtx fun) |
18ca7dab | 1913 | { |
390b17c2 | 1914 | return targetm.calls.libcall_value (mode, fun); |
18ca7dab | 1915 | } |
0c5e217d RS |
1916 | |
1917 | /* Look up the tree code for a given rtx code | |
1918 | to provide the arithmetic operation for REAL_ARITHMETIC. | |
1919 | The function returns an int because the caller may not know | |
1920 | what `enum tree_code' means. */ | |
1921 | ||
1922 | int | |
502b8322 | 1923 | rtx_to_tree_code (enum rtx_code code) |
0c5e217d RS |
1924 | { |
1925 | enum tree_code tcode; | |
1926 | ||
1927 | switch (code) | |
1928 | { | |
1929 | case PLUS: | |
1930 | tcode = PLUS_EXPR; | |
1931 | break; | |
1932 | case MINUS: | |
1933 | tcode = MINUS_EXPR; | |
1934 | break; | |
1935 | case MULT: | |
1936 | tcode = MULT_EXPR; | |
1937 | break; | |
1938 | case DIV: | |
1939 | tcode = RDIV_EXPR; | |
1940 | break; | |
1941 | case SMIN: | |
1942 | tcode = MIN_EXPR; | |
1943 | break; | |
1944 | case SMAX: | |
1945 | tcode = MAX_EXPR; | |
1946 | break; | |
1947 | default: | |
1948 | tcode = LAST_AND_UNUSED_TREE_CODE; | |
1949 | break; | |
1950 | } | |
1951 | return ((int) tcode); | |
1952 | } | |
e2500fed GK |
1953 | |
1954 | #include "gt-explow.h" |