]>
Commit | Line | Data |
---|---|---|
18ca7dab | 1 | /* Subroutines for manipulating rtx's in semantically interesting ways. |
23a5b65a | 2 | Copyright (C) 1987-2014 Free Software Foundation, Inc. |
18ca7dab | 3 | |
1322177d | 4 | This file is part of GCC. |
18ca7dab | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
18ca7dab | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
18ca7dab RK |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
18ca7dab RK |
19 | |
20 | ||
21 | #include "config.h" | |
670ee920 | 22 | #include "system.h" |
4977bab6 ZW |
23 | #include "coretypes.h" |
24 | #include "tm.h" | |
718f9c0f | 25 | #include "diagnostic-core.h" |
18ca7dab RK |
26 | #include "rtl.h" |
27 | #include "tree.h" | |
d8a2d370 | 28 | #include "stor-layout.h" |
6baf1cc8 | 29 | #include "tm_p.h" |
18ca7dab | 30 | #include "flags.h" |
b38f3813 | 31 | #include "except.h" |
83685514 AM |
32 | #include "hashtab.h" |
33 | #include "hash-set.h" | |
34 | #include "vec.h" | |
35 | #include "machmode.h" | |
36 | #include "hard-reg-set.h" | |
37 | #include "input.h" | |
49ad7cfa | 38 | #include "function.h" |
18ca7dab | 39 | #include "expr.h" |
e78d8e51 | 40 | #include "optabs.h" |
d477d1fe | 41 | #include "libfuncs.h" |
18ca7dab | 42 | #include "insn-config.h" |
1d974ca7 | 43 | #include "ggc.h" |
18ca7dab | 44 | #include "recog.h" |
a77a9a18 | 45 | #include "langhooks.h" |
1d636cc6 | 46 | #include "target.h" |
677f3fa8 | 47 | #include "common/common-target.h" |
aacd3885 | 48 | #include "output.h" |
18ca7dab | 49 | |
502b8322 | 50 | static rtx break_out_memory_refs (rtx); |
7e4ce834 RH |
51 | |
52 | ||
53 | /* Truncate and perhaps sign-extend C as appropriate for MODE. */ | |
54 | ||
55 | HOST_WIDE_INT | |
ef4bddc2 | 56 | trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode) |
7e4ce834 | 57 | { |
5511bc5a | 58 | int width = GET_MODE_PRECISION (mode); |
7e4ce834 | 59 | |
71012d97 | 60 | /* You want to truncate to a _what_? */ |
5b0264cb | 61 | gcc_assert (SCALAR_INT_MODE_P (mode)); |
71012d97 | 62 | |
1f3f36d1 RH |
63 | /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ |
64 | if (mode == BImode) | |
65 | return c & 1 ? STORE_FLAG_VALUE : 0; | |
66 | ||
5b0d91c3 AO |
67 | /* Sign-extend for the requested mode. */ |
68 | ||
69 | if (width < HOST_BITS_PER_WIDE_INT) | |
70 | { | |
71 | HOST_WIDE_INT sign = 1; | |
72 | sign <<= width - 1; | |
73 | c &= (sign << 1) - 1; | |
74 | c ^= sign; | |
75 | c -= sign; | |
76 | } | |
7e4ce834 RH |
77 | |
78 | return c; | |
79 | } | |
80 | ||
929e10f4 | 81 | /* Return an rtx for the sum of X and the integer C, given that X has |
23b33725 RS |
82 | mode MODE. INPLACE is true if X can be modified inplace or false |
83 | if it must be treated as immutable. */ | |
18ca7dab RK |
84 | |
85 | rtx | |
ef4bddc2 | 86 | plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c, |
23b33725 | 87 | bool inplace) |
18ca7dab | 88 | { |
b3694847 | 89 | RTX_CODE code; |
17ab7c59 | 90 | rtx y; |
b3694847 | 91 | rtx tem; |
18ca7dab RK |
92 | int all_constant = 0; |
93 | ||
0a81f074 RS |
94 | gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode); |
95 | ||
18ca7dab RK |
96 | if (c == 0) |
97 | return x; | |
98 | ||
99 | restart: | |
100 | ||
101 | code = GET_CODE (x); | |
17ab7c59 RK |
102 | y = x; |
103 | ||
18ca7dab RK |
104 | switch (code) |
105 | { | |
807e902e KZ |
106 | CASE_CONST_SCALAR_INT: |
107 | return immed_wide_int_const (wi::add (std::make_pair (x, mode), c), | |
108 | mode); | |
18ca7dab RK |
109 | case MEM: |
110 | /* If this is a reference to the constant pool, try replacing it with | |
111 | a reference to a new constant. If the resulting address isn't | |
112 | valid, don't return it because we have no way to validize it. */ | |
113 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
114 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) | |
115 | { | |
0a81f074 | 116 | tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c); |
929e10f4 | 117 | tem = force_const_mem (GET_MODE (x), tem); |
18ca7dab RK |
118 | if (memory_address_p (GET_MODE (tem), XEXP (tem, 0))) |
119 | return tem; | |
120 | } | |
121 | break; | |
122 | ||
123 | case CONST: | |
124 | /* If adding to something entirely constant, set a flag | |
125 | so that we can add a CONST around the result. */ | |
23b33725 RS |
126 | if (inplace && shared_const_p (x)) |
127 | inplace = false; | |
18ca7dab RK |
128 | x = XEXP (x, 0); |
129 | all_constant = 1; | |
130 | goto restart; | |
131 | ||
132 | case SYMBOL_REF: | |
133 | case LABEL_REF: | |
134 | all_constant = 1; | |
135 | break; | |
136 | ||
137 | case PLUS: | |
929e10f4 MS |
138 | /* The interesting case is adding the integer to a sum. Look |
139 | for constant term in the sum and combine with C. For an | |
140 | integer constant term or a constant term that is not an | |
141 | explicit integer, we combine or group them together anyway. | |
03d937fc R |
142 | |
143 | We may not immediately return from the recursive call here, lest | |
144 | all_constant gets lost. */ | |
e5671f2b | 145 | |
929e10f4 | 146 | if (CONSTANT_P (XEXP (x, 1))) |
03d937fc | 147 | { |
23b33725 RS |
148 | rtx term = plus_constant (mode, XEXP (x, 1), c, inplace); |
149 | if (term == const0_rtx) | |
150 | x = XEXP (x, 0); | |
151 | else if (inplace) | |
152 | XEXP (x, 1) = term; | |
153 | else | |
154 | x = gen_rtx_PLUS (mode, XEXP (x, 0), term); | |
03d937fc R |
155 | c = 0; |
156 | } | |
23b33725 | 157 | else if (rtx *const_loc = find_constant_term_loc (&y)) |
03d937fc | 158 | { |
23b33725 RS |
159 | if (!inplace) |
160 | { | |
161 | /* We need to be careful since X may be shared and we can't | |
162 | modify it in place. */ | |
163 | x = copy_rtx (x); | |
164 | const_loc = find_constant_term_loc (&x); | |
165 | } | |
166 | *const_loc = plus_constant (mode, *const_loc, c, true); | |
03d937fc R |
167 | c = 0; |
168 | } | |
38a448ca | 169 | break; |
ed8908e7 | 170 | |
38a448ca RH |
171 | default: |
172 | break; | |
18ca7dab RK |
173 | } |
174 | ||
175 | if (c != 0) | |
4789c0ce | 176 | x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode)); |
18ca7dab RK |
177 | |
178 | if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) | |
179 | return x; | |
180 | else if (all_constant) | |
38a448ca | 181 | return gen_rtx_CONST (mode, x); |
18ca7dab RK |
182 | else |
183 | return x; | |
184 | } | |
18ca7dab RK |
185 | \f |
186 | /* If X is a sum, return a new sum like X but lacking any constant terms. | |
187 | Add all the removed constant terms into *CONSTPTR. | |
188 | X itself is not altered. The result != X if and only if | |
189 | it is not isomorphic to X. */ | |
190 | ||
191 | rtx | |
502b8322 | 192 | eliminate_constant_term (rtx x, rtx *constptr) |
18ca7dab | 193 | { |
b3694847 | 194 | rtx x0, x1; |
18ca7dab RK |
195 | rtx tem; |
196 | ||
197 | if (GET_CODE (x) != PLUS) | |
198 | return x; | |
199 | ||
200 | /* First handle constants appearing at this level explicitly. */ | |
481683e1 | 201 | if (CONST_INT_P (XEXP (x, 1)) |
18ca7dab RK |
202 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr, |
203 | XEXP (x, 1))) | |
481683e1 | 204 | && CONST_INT_P (tem)) |
18ca7dab RK |
205 | { |
206 | *constptr = tem; | |
207 | return eliminate_constant_term (XEXP (x, 0), constptr); | |
208 | } | |
209 | ||
210 | tem = const0_rtx; | |
211 | x0 = eliminate_constant_term (XEXP (x, 0), &tem); | |
212 | x1 = eliminate_constant_term (XEXP (x, 1), &tem); | |
213 | if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0)) | |
214 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), | |
215 | *constptr, tem)) | |
481683e1 | 216 | && CONST_INT_P (tem)) |
18ca7dab RK |
217 | { |
218 | *constptr = tem; | |
38a448ca | 219 | return gen_rtx_PLUS (GET_MODE (x), x0, x1); |
18ca7dab RK |
220 | } |
221 | ||
222 | return x; | |
223 | } | |
224 | ||
862d0b35 DN |
225 | /* Returns a tree for the size of EXP in bytes. */ |
226 | ||
227 | static tree | |
228 | tree_expr_size (const_tree exp) | |
229 | { | |
230 | if (DECL_P (exp) | |
231 | && DECL_SIZE_UNIT (exp) != 0) | |
232 | return DECL_SIZE_UNIT (exp); | |
233 | else | |
234 | return size_in_bytes (TREE_TYPE (exp)); | |
235 | } | |
236 | ||
18ca7dab RK |
237 | /* Return an rtx for the size in bytes of the value of EXP. */ |
238 | ||
239 | rtx | |
502b8322 | 240 | expr_size (tree exp) |
18ca7dab | 241 | { |
d25cee4d RH |
242 | tree size; |
243 | ||
244 | if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
245 | size = TREE_OPERAND (exp, 1); | |
246 | else | |
26979bc2 | 247 | { |
71c00b5c | 248 | size = tree_expr_size (exp); |
26979bc2 | 249 | gcc_assert (size); |
2ec5deb5 | 250 | gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp)); |
26979bc2 | 251 | } |
99098c66 | 252 | |
49452c07 | 253 | return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL); |
18ca7dab | 254 | } |
de8920be JM |
255 | |
256 | /* Return a wide integer for the size in bytes of the value of EXP, or -1 | |
257 | if the size can vary or is larger than an integer. */ | |
258 | ||
259 | HOST_WIDE_INT | |
502b8322 | 260 | int_expr_size (tree exp) |
de8920be | 261 | { |
d25cee4d RH |
262 | tree size; |
263 | ||
264 | if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
265 | size = TREE_OPERAND (exp, 1); | |
266 | else | |
26979bc2 | 267 | { |
71c00b5c | 268 | size = tree_expr_size (exp); |
26979bc2 JH |
269 | gcc_assert (size); |
270 | } | |
d25cee4d | 271 | |
9541ffee | 272 | if (size == 0 || !tree_fits_shwi_p (size)) |
de8920be JM |
273 | return -1; |
274 | ||
9439e9a1 | 275 | return tree_to_shwi (size); |
de8920be | 276 | } |
18ca7dab RK |
277 | \f |
278 | /* Return a copy of X in which all memory references | |
279 | and all constants that involve symbol refs | |
280 | have been replaced with new temporary registers. | |
281 | Also emit code to load the memory locations and constants | |
282 | into those registers. | |
283 | ||
284 | If X contains no such constants or memory references, | |
285 | X itself (not a copy) is returned. | |
286 | ||
287 | If a constant is found in the address that is not a legitimate constant | |
288 | in an insn, it is left alone in the hope that it might be valid in the | |
289 | address. | |
290 | ||
291 | X may contain no arithmetic except addition, subtraction and multiplication. | |
292 | Values returned by expand_expr with 1 for sum_ok fit this constraint. */ | |
293 | ||
294 | static rtx | |
502b8322 | 295 | break_out_memory_refs (rtx x) |
18ca7dab | 296 | { |
3c0cb5de | 297 | if (MEM_P (x) |
cabeca29 | 298 | || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x) |
18ca7dab | 299 | && GET_MODE (x) != VOIDmode)) |
2cca6e3f | 300 | x = force_reg (GET_MODE (x), x); |
18ca7dab RK |
301 | else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS |
302 | || GET_CODE (x) == MULT) | |
303 | { | |
b3694847 SS |
304 | rtx op0 = break_out_memory_refs (XEXP (x, 0)); |
305 | rtx op1 = break_out_memory_refs (XEXP (x, 1)); | |
2cca6e3f | 306 | |
18ca7dab | 307 | if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
d4ebfa65 | 308 | x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1); |
18ca7dab | 309 | } |
2cca6e3f | 310 | |
18ca7dab RK |
311 | return x; |
312 | } | |
313 | ||
d4ebfa65 BE |
314 | /* Given X, a memory address in address space AS' pointer mode, convert it to |
315 | an address in the address space's address mode, or vice versa (TO_MODE says | |
316 | which way). We take advantage of the fact that pointers are not allowed to | |
317 | overflow by commuting arithmetic operations over conversions so that address | |
7745730f AP |
318 | arithmetic insns can be used. IN_CONST is true if this conversion is inside |
319 | a CONST. */ | |
ea534b63 | 320 | |
7745730f | 321 | static rtx |
ef4bddc2 | 322 | convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED, |
7745730f | 323 | rtx x, addr_space_t as ATTRIBUTE_UNUSED, |
c582bb15 | 324 | bool in_const ATTRIBUTE_UNUSED) |
ea534b63 | 325 | { |
5ae6cd0d | 326 | #ifndef POINTERS_EXTEND_UNSIGNED |
7c137931 | 327 | gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode); |
5ae6cd0d MM |
328 | return x; |
329 | #else /* defined(POINTERS_EXTEND_UNSIGNED) */ | |
ef4bddc2 | 330 | machine_mode pointer_mode, address_mode, from_mode; |
498b529f | 331 | rtx temp; |
aa0f70e6 | 332 | enum rtx_code code; |
498b529f | 333 | |
5ae6cd0d MM |
334 | /* If X already has the right mode, just return it. */ |
335 | if (GET_MODE (x) == to_mode) | |
336 | return x; | |
337 | ||
d4ebfa65 BE |
338 | pointer_mode = targetm.addr_space.pointer_mode (as); |
339 | address_mode = targetm.addr_space.address_mode (as); | |
340 | from_mode = to_mode == pointer_mode ? address_mode : pointer_mode; | |
5ae6cd0d | 341 | |
0b04ec8c RK |
342 | /* Here we handle some special cases. If none of them apply, fall through |
343 | to the default case. */ | |
ea534b63 RK |
344 | switch (GET_CODE (x)) |
345 | { | |
d8116890 | 346 | CASE_CONST_SCALAR_INT: |
aa0f70e6 SE |
347 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)) |
348 | code = TRUNCATE; | |
349 | else if (POINTERS_EXTEND_UNSIGNED < 0) | |
350 | break; | |
351 | else if (POINTERS_EXTEND_UNSIGNED > 0) | |
352 | code = ZERO_EXTEND; | |
353 | else | |
354 | code = SIGN_EXTEND; | |
355 | temp = simplify_unary_operation (code, to_mode, x, from_mode); | |
356 | if (temp) | |
357 | return temp; | |
358 | break; | |
498b529f | 359 | |
d1405722 | 360 | case SUBREG: |
5da4f548 | 361 | if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x))) |
6dd12198 | 362 | && GET_MODE (SUBREG_REG (x)) == to_mode) |
d1405722 RK |
363 | return SUBREG_REG (x); |
364 | break; | |
365 | ||
ea534b63 | 366 | case LABEL_REF: |
a827d9b1 | 367 | temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x)); |
5da4f548 SE |
368 | LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); |
369 | return temp; | |
6dd12198 | 370 | break; |
498b529f | 371 | |
ea534b63 | 372 | case SYMBOL_REF: |
ce02ba25 EC |
373 | temp = shallow_copy_rtx (x); |
374 | PUT_MODE (temp, to_mode); | |
5da4f548 | 375 | return temp; |
6dd12198 | 376 | break; |
ea534b63 | 377 | |
498b529f | 378 | case CONST: |
5da4f548 | 379 | return gen_rtx_CONST (to_mode, |
7745730f AP |
380 | convert_memory_address_addr_space_1 |
381 | (to_mode, XEXP (x, 0), as, true)); | |
6dd12198 | 382 | break; |
ea534b63 | 383 | |
0b04ec8c RK |
384 | case PLUS: |
385 | case MULT: | |
ceeb2cbc AP |
386 | /* For addition we can safely permute the conversion and addition |
387 | operation if one operand is a constant and converting the constant | |
388 | does not change it or if one operand is a constant and we are | |
389 | using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0). | |
17939c98 | 390 | We can always safely permute them if we are making the address |
7745730f AP |
391 | narrower. Inside a CONST RTL, this is safe for both pointers |
392 | zero or sign extended as pointers cannot wrap. */ | |
aa0f70e6 SE |
393 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode) |
394 | || (GET_CODE (x) == PLUS | |
481683e1 | 395 | && CONST_INT_P (XEXP (x, 1)) |
7745730f AP |
396 | && ((in_const && POINTERS_EXTEND_UNSIGNED != 0) |
397 | || XEXP (x, 1) == convert_memory_address_addr_space_1 | |
398 | (to_mode, XEXP (x, 1), as, in_const) | |
399 | || POINTERS_EXTEND_UNSIGNED < 0))) | |
d9b3eb63 | 400 | return gen_rtx_fmt_ee (GET_CODE (x), to_mode, |
7745730f AP |
401 | convert_memory_address_addr_space_1 |
402 | (to_mode, XEXP (x, 0), as, in_const), | |
aa0f70e6 | 403 | XEXP (x, 1)); |
38a448ca | 404 | break; |
d9b3eb63 | 405 | |
38a448ca RH |
406 | default: |
407 | break; | |
ea534b63 | 408 | } |
0b04ec8c RK |
409 | |
410 | return convert_modes (to_mode, from_mode, | |
411 | x, POINTERS_EXTEND_UNSIGNED); | |
5ae6cd0d | 412 | #endif /* defined(POINTERS_EXTEND_UNSIGNED) */ |
ea534b63 | 413 | } |
7745730f AP |
414 | |
415 | /* Given X, a memory address in address space AS' pointer mode, convert it to | |
416 | an address in the address space's address mode, or vice versa (TO_MODE says | |
417 | which way). We take advantage of the fact that pointers are not allowed to | |
418 | overflow by commuting arithmetic operations over conversions so that address | |
419 | arithmetic insns can be used. */ | |
420 | ||
421 | rtx | |
ef4bddc2 | 422 | convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as) |
7745730f AP |
423 | { |
424 | return convert_memory_address_addr_space_1 (to_mode, x, as, false); | |
425 | } | |
18ca7dab | 426 | \f |
09e881c9 BE |
427 | /* Return something equivalent to X but valid as a memory address for something |
428 | of mode MODE in the named address space AS. When X is not itself valid, | |
429 | this works by copying X or subexpressions of it into registers. */ | |
18ca7dab RK |
430 | |
431 | rtx | |
ef4bddc2 | 432 | memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as) |
18ca7dab | 433 | { |
b3694847 | 434 | rtx oldx = x; |
ef4bddc2 | 435 | machine_mode address_mode = targetm.addr_space.address_mode (as); |
18ca7dab | 436 | |
d4ebfa65 | 437 | x = convert_memory_address_addr_space (address_mode, x, as); |
ea534b63 | 438 | |
ba228239 | 439 | /* By passing constant addresses through registers |
18ca7dab | 440 | we get a chance to cse them. */ |
cabeca29 | 441 | if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)) |
d4ebfa65 | 442 | x = force_reg (address_mode, x); |
18ca7dab | 443 | |
18ca7dab RK |
444 | /* We get better cse by rejecting indirect addressing at this stage. |
445 | Let the combiner create indirect addresses where appropriate. | |
446 | For now, generate the code so that the subexpressions useful to share | |
447 | are visible. But not if cse won't be done! */ | |
18b9ca6f | 448 | else |
18ca7dab | 449 | { |
f8cfc6aa | 450 | if (! cse_not_expected && !REG_P (x)) |
18b9ca6f RK |
451 | x = break_out_memory_refs (x); |
452 | ||
453 | /* At this point, any valid address is accepted. */ | |
09e881c9 | 454 | if (memory_address_addr_space_p (mode, x, as)) |
3de5e93a | 455 | goto done; |
18b9ca6f RK |
456 | |
457 | /* If it was valid before but breaking out memory refs invalidated it, | |
458 | use it the old way. */ | |
09e881c9 | 459 | if (memory_address_addr_space_p (mode, oldx, as)) |
3de5e93a SB |
460 | { |
461 | x = oldx; | |
462 | goto done; | |
463 | } | |
18b9ca6f RK |
464 | |
465 | /* Perform machine-dependent transformations on X | |
466 | in certain cases. This is not necessary since the code | |
467 | below can handle all possible cases, but machine-dependent | |
468 | transformations can make better code. */ | |
506d7b68 | 469 | { |
09e881c9 BE |
470 | rtx orig_x = x; |
471 | x = targetm.addr_space.legitimize_address (x, oldx, mode, as); | |
472 | if (orig_x != x && memory_address_addr_space_p (mode, x, as)) | |
506d7b68 PB |
473 | goto done; |
474 | } | |
18b9ca6f RK |
475 | |
476 | /* PLUS and MULT can appear in special ways | |
477 | as the result of attempts to make an address usable for indexing. | |
478 | Usually they are dealt with by calling force_operand, below. | |
479 | But a sum containing constant terms is special | |
480 | if removing them makes the sum a valid address: | |
481 | then we generate that address in a register | |
482 | and index off of it. We do this because it often makes | |
483 | shorter code, and because the addresses thus generated | |
484 | in registers often become common subexpressions. */ | |
485 | if (GET_CODE (x) == PLUS) | |
486 | { | |
487 | rtx constant_term = const0_rtx; | |
488 | rtx y = eliminate_constant_term (x, &constant_term); | |
489 | if (constant_term == const0_rtx | |
09e881c9 | 490 | || ! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
491 | x = force_operand (x, NULL_RTX); |
492 | else | |
493 | { | |
38a448ca | 494 | y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term); |
09e881c9 | 495 | if (! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
496 | x = force_operand (x, NULL_RTX); |
497 | else | |
498 | x = y; | |
499 | } | |
500 | } | |
18ca7dab | 501 | |
e475ed2a | 502 | else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS) |
18b9ca6f | 503 | x = force_operand (x, NULL_RTX); |
18ca7dab | 504 | |
18b9ca6f RK |
505 | /* If we have a register that's an invalid address, |
506 | it must be a hard reg of the wrong class. Copy it to a pseudo. */ | |
f8cfc6aa | 507 | else if (REG_P (x)) |
18b9ca6f RK |
508 | x = copy_to_reg (x); |
509 | ||
510 | /* Last resort: copy the value to a register, since | |
511 | the register is a valid address. */ | |
512 | else | |
d4ebfa65 | 513 | x = force_reg (address_mode, x); |
18ca7dab | 514 | } |
18b9ca6f RK |
515 | |
516 | done: | |
517 | ||
09e881c9 | 518 | gcc_assert (memory_address_addr_space_p (mode, x, as)); |
2cca6e3f RK |
519 | /* If we didn't change the address, we are done. Otherwise, mark |
520 | a reg as a pointer if we have REG or REG + CONST_INT. */ | |
521 | if (oldx == x) | |
522 | return x; | |
f8cfc6aa | 523 | else if (REG_P (x)) |
bdb429a5 | 524 | mark_reg_pointer (x, BITS_PER_UNIT); |
2cca6e3f | 525 | else if (GET_CODE (x) == PLUS |
f8cfc6aa | 526 | && REG_P (XEXP (x, 0)) |
481683e1 | 527 | && CONST_INT_P (XEXP (x, 1))) |
bdb429a5 | 528 | mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT); |
2cca6e3f | 529 | |
18b9ca6f RK |
530 | /* OLDX may have been the address on a temporary. Update the address |
531 | to indicate that X is now used. */ | |
532 | update_temp_slot_address (oldx, x); | |
533 | ||
18ca7dab RK |
534 | return x; |
535 | } | |
536 | ||
1a8cb155 RS |
537 | /* If REF is a MEM with an invalid address, change it into a valid address. |
538 | Pass through anything else unchanged. REF must be an unshared rtx and | |
539 | the function may modify it in-place. */ | |
18ca7dab RK |
540 | |
541 | rtx | |
502b8322 | 542 | validize_mem (rtx ref) |
18ca7dab | 543 | { |
3c0cb5de | 544 | if (!MEM_P (ref)) |
18ca7dab | 545 | return ref; |
aacd3885 | 546 | ref = use_anchored_address (ref); |
09e881c9 BE |
547 | if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0), |
548 | MEM_ADDR_SPACE (ref))) | |
18ca7dab | 549 | return ref; |
792760b9 | 550 | |
1a8cb155 | 551 | return replace_equiv_address (ref, XEXP (ref, 0), true); |
18ca7dab | 552 | } |
aacd3885 RS |
553 | |
554 | /* If X is a memory reference to a member of an object block, try rewriting | |
555 | it to use an anchor instead. Return the new memory reference on success | |
556 | and the old one on failure. */ | |
557 | ||
558 | rtx | |
559 | use_anchored_address (rtx x) | |
560 | { | |
561 | rtx base; | |
562 | HOST_WIDE_INT offset; | |
ef4bddc2 | 563 | machine_mode mode; |
aacd3885 RS |
564 | |
565 | if (!flag_section_anchors) | |
566 | return x; | |
567 | ||
568 | if (!MEM_P (x)) | |
569 | return x; | |
570 | ||
571 | /* Split the address into a base and offset. */ | |
572 | base = XEXP (x, 0); | |
573 | offset = 0; | |
574 | if (GET_CODE (base) == CONST | |
575 | && GET_CODE (XEXP (base, 0)) == PLUS | |
481683e1 | 576 | && CONST_INT_P (XEXP (XEXP (base, 0), 1))) |
aacd3885 RS |
577 | { |
578 | offset += INTVAL (XEXP (XEXP (base, 0), 1)); | |
579 | base = XEXP (XEXP (base, 0), 0); | |
580 | } | |
581 | ||
582 | /* Check whether BASE is suitable for anchors. */ | |
583 | if (GET_CODE (base) != SYMBOL_REF | |
3fa9c136 | 584 | || !SYMBOL_REF_HAS_BLOCK_INFO_P (base) |
aacd3885 | 585 | || SYMBOL_REF_ANCHOR_P (base) |
434aeebb | 586 | || SYMBOL_REF_BLOCK (base) == NULL |
aacd3885 RS |
587 | || !targetm.use_anchors_for_symbol_p (base)) |
588 | return x; | |
589 | ||
590 | /* Decide where BASE is going to be. */ | |
591 | place_block_symbol (base); | |
592 | ||
593 | /* Get the anchor we need to use. */ | |
594 | offset += SYMBOL_REF_BLOCK_OFFSET (base); | |
595 | base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset, | |
596 | SYMBOL_REF_TLS_MODEL (base)); | |
597 | ||
598 | /* Work out the offset from the anchor. */ | |
599 | offset -= SYMBOL_REF_BLOCK_OFFSET (base); | |
600 | ||
601 | /* If we're going to run a CSE pass, force the anchor into a register. | |
602 | We will then be able to reuse registers for several accesses, if the | |
603 | target costs say that that's worthwhile. */ | |
0a81f074 | 604 | mode = GET_MODE (base); |
aacd3885 | 605 | if (!cse_not_expected) |
0a81f074 | 606 | base = force_reg (mode, base); |
aacd3885 | 607 | |
0a81f074 | 608 | return replace_equiv_address (x, plus_constant (mode, base, offset)); |
aacd3885 | 609 | } |
18ca7dab | 610 | \f |
18ca7dab RK |
611 | /* Copy the value or contents of X to a new temp reg and return that reg. */ |
612 | ||
613 | rtx | |
502b8322 | 614 | copy_to_reg (rtx x) |
18ca7dab | 615 | { |
b3694847 | 616 | rtx temp = gen_reg_rtx (GET_MODE (x)); |
d9b3eb63 | 617 | |
18ca7dab | 618 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 619 | do the computation. */ |
18ca7dab RK |
620 | if (! general_operand (x, VOIDmode)) |
621 | x = force_operand (x, temp); | |
d9b3eb63 | 622 | |
18ca7dab RK |
623 | if (x != temp) |
624 | emit_move_insn (temp, x); | |
625 | ||
626 | return temp; | |
627 | } | |
628 | ||
629 | /* Like copy_to_reg but always give the new register mode Pmode | |
630 | in case X is a constant. */ | |
631 | ||
632 | rtx | |
502b8322 | 633 | copy_addr_to_reg (rtx x) |
18ca7dab RK |
634 | { |
635 | return copy_to_mode_reg (Pmode, x); | |
636 | } | |
637 | ||
638 | /* Like copy_to_reg but always give the new register mode MODE | |
639 | in case X is a constant. */ | |
640 | ||
641 | rtx | |
ef4bddc2 | 642 | copy_to_mode_reg (machine_mode mode, rtx x) |
18ca7dab | 643 | { |
b3694847 | 644 | rtx temp = gen_reg_rtx (mode); |
d9b3eb63 | 645 | |
18ca7dab | 646 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 647 | do the computation. */ |
18ca7dab RK |
648 | if (! general_operand (x, VOIDmode)) |
649 | x = force_operand (x, temp); | |
650 | ||
5b0264cb | 651 | gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); |
18ca7dab RK |
652 | if (x != temp) |
653 | emit_move_insn (temp, x); | |
654 | return temp; | |
655 | } | |
656 | ||
657 | /* Load X into a register if it is not already one. | |
658 | Use mode MODE for the register. | |
659 | X should be valid for mode MODE, but it may be a constant which | |
660 | is valid for all integer modes; that's why caller must specify MODE. | |
661 | ||
662 | The caller must not alter the value in the register we return, | |
663 | since we mark it as a "constant" register. */ | |
664 | ||
665 | rtx | |
ef4bddc2 | 666 | force_reg (machine_mode mode, rtx x) |
18ca7dab | 667 | { |
528a80c1 DM |
668 | rtx temp, set; |
669 | rtx_insn *insn; | |
18ca7dab | 670 | |
f8cfc6aa | 671 | if (REG_P (x)) |
18ca7dab | 672 | return x; |
d9b3eb63 | 673 | |
e3c8ea67 RH |
674 | if (general_operand (x, mode)) |
675 | { | |
676 | temp = gen_reg_rtx (mode); | |
677 | insn = emit_move_insn (temp, x); | |
678 | } | |
679 | else | |
680 | { | |
681 | temp = force_operand (x, NULL_RTX); | |
f8cfc6aa | 682 | if (REG_P (temp)) |
e3c8ea67 RH |
683 | insn = get_last_insn (); |
684 | else | |
685 | { | |
686 | rtx temp2 = gen_reg_rtx (mode); | |
687 | insn = emit_move_insn (temp2, temp); | |
688 | temp = temp2; | |
689 | } | |
690 | } | |
62874575 | 691 | |
18ca7dab | 692 | /* Let optimizers know that TEMP's value never changes |
62874575 RK |
693 | and that X can be substituted for it. Don't get confused |
694 | if INSN set something else (such as a SUBREG of TEMP). */ | |
695 | if (CONSTANT_P (x) | |
696 | && (set = single_set (insn)) != 0 | |
fd7acc30 RS |
697 | && SET_DEST (set) == temp |
698 | && ! rtx_equal_p (x, SET_SRC (set))) | |
3d238248 | 699 | set_unique_reg_note (insn, REG_EQUAL, x); |
e3c8ea67 | 700 | |
4a4f95d9 RH |
701 | /* Let optimizers know that TEMP is a pointer, and if so, the |
702 | known alignment of that pointer. */ | |
703 | { | |
704 | unsigned align = 0; | |
705 | if (GET_CODE (x) == SYMBOL_REF) | |
706 | { | |
707 | align = BITS_PER_UNIT; | |
708 | if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x))) | |
709 | align = DECL_ALIGN (SYMBOL_REF_DECL (x)); | |
710 | } | |
711 | else if (GET_CODE (x) == LABEL_REF) | |
712 | align = BITS_PER_UNIT; | |
713 | else if (GET_CODE (x) == CONST | |
714 | && GET_CODE (XEXP (x, 0)) == PLUS | |
715 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF | |
481683e1 | 716 | && CONST_INT_P (XEXP (XEXP (x, 0), 1))) |
4a4f95d9 RH |
717 | { |
718 | rtx s = XEXP (XEXP (x, 0), 0); | |
719 | rtx c = XEXP (XEXP (x, 0), 1); | |
720 | unsigned sa, ca; | |
721 | ||
722 | sa = BITS_PER_UNIT; | |
723 | if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s))) | |
724 | sa = DECL_ALIGN (SYMBOL_REF_DECL (s)); | |
725 | ||
bd95721f RH |
726 | if (INTVAL (c) == 0) |
727 | align = sa; | |
728 | else | |
729 | { | |
730 | ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT; | |
731 | align = MIN (sa, ca); | |
732 | } | |
4a4f95d9 RH |
733 | } |
734 | ||
0a317111 | 735 | if (align || (MEM_P (x) && MEM_POINTER (x))) |
4a4f95d9 RH |
736 | mark_reg_pointer (temp, align); |
737 | } | |
738 | ||
18ca7dab RK |
739 | return temp; |
740 | } | |
741 | ||
742 | /* If X is a memory ref, copy its contents to a new temp reg and return | |
743 | that reg. Otherwise, return X. */ | |
744 | ||
745 | rtx | |
502b8322 | 746 | force_not_mem (rtx x) |
18ca7dab | 747 | { |
b3694847 | 748 | rtx temp; |
fe3439b0 | 749 | |
3c0cb5de | 750 | if (!MEM_P (x) || GET_MODE (x) == BLKmode) |
18ca7dab | 751 | return x; |
fe3439b0 | 752 | |
18ca7dab | 753 | temp = gen_reg_rtx (GET_MODE (x)); |
f8ad8d7c ZD |
754 | |
755 | if (MEM_POINTER (x)) | |
756 | REG_POINTER (temp) = 1; | |
757 | ||
18ca7dab RK |
758 | emit_move_insn (temp, x); |
759 | return temp; | |
760 | } | |
761 | ||
762 | /* Copy X to TARGET (if it's nonzero and a reg) | |
763 | or to a new temp reg and return that reg. | |
764 | MODE is the mode to use for X in case it is a constant. */ | |
765 | ||
766 | rtx | |
ef4bddc2 | 767 | copy_to_suggested_reg (rtx x, rtx target, machine_mode mode) |
18ca7dab | 768 | { |
b3694847 | 769 | rtx temp; |
18ca7dab | 770 | |
f8cfc6aa | 771 | if (target && REG_P (target)) |
18ca7dab RK |
772 | temp = target; |
773 | else | |
774 | temp = gen_reg_rtx (mode); | |
775 | ||
776 | emit_move_insn (temp, x); | |
777 | return temp; | |
778 | } | |
779 | \f | |
cde0f3fd | 780 | /* Return the mode to use to pass or return a scalar of TYPE and MODE. |
9ff65789 RK |
781 | PUNSIGNEDP points to the signedness of the type and may be adjusted |
782 | to show what signedness to use on extension operations. | |
783 | ||
cde0f3fd PB |
784 | FOR_RETURN is nonzero if the caller is promoting the return value |
785 | of FNDECL, else it is for promoting args. */ | |
9ff65789 | 786 | |
ef4bddc2 RS |
787 | machine_mode |
788 | promote_function_mode (const_tree type, machine_mode mode, int *punsignedp, | |
cde0f3fd PB |
789 | const_tree funtype, int for_return) |
790 | { | |
5e617be8 AK |
791 | /* Called without a type node for a libcall. */ |
792 | if (type == NULL_TREE) | |
793 | { | |
794 | if (INTEGRAL_MODE_P (mode)) | |
795 | return targetm.calls.promote_function_mode (NULL_TREE, mode, | |
796 | punsignedp, funtype, | |
797 | for_return); | |
798 | else | |
799 | return mode; | |
800 | } | |
801 | ||
cde0f3fd PB |
802 | switch (TREE_CODE (type)) |
803 | { | |
804 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: | |
805 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: | |
806 | case POINTER_TYPE: case REFERENCE_TYPE: | |
807 | return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype, | |
808 | for_return); | |
809 | ||
810 | default: | |
811 | return mode; | |
812 | } | |
813 | } | |
814 | /* Return the mode to use to store a scalar of TYPE and MODE. | |
815 | PUNSIGNEDP points to the signedness of the type and may be adjusted | |
816 | to show what signedness to use on extension operations. */ | |
d4453b7a | 817 | |
ef4bddc2 RS |
818 | machine_mode |
819 | promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode, | |
b1680483 | 820 | int *punsignedp ATTRIBUTE_UNUSED) |
9ff65789 | 821 | { |
1e3287d0 RG |
822 | #ifdef PROMOTE_MODE |
823 | enum tree_code code; | |
824 | int unsignedp; | |
825 | #endif | |
826 | ||
5e617be8 AK |
827 | /* For libcalls this is invoked without TYPE from the backends |
828 | TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that | |
829 | case. */ | |
830 | if (type == NULL_TREE) | |
831 | return mode; | |
832 | ||
cde0f3fd PB |
833 | /* FIXME: this is the same logic that was there until GCC 4.4, but we |
834 | probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE | |
835 | is not defined. The affected targets are M32C, S390, SPARC. */ | |
836 | #ifdef PROMOTE_MODE | |
1e3287d0 RG |
837 | code = TREE_CODE (type); |
838 | unsignedp = *punsignedp; | |
9ff65789 | 839 | |
9ff65789 RK |
840 | switch (code) |
841 | { | |
9ff65789 | 842 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
325217ed | 843 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: |
cde0f3fd PB |
844 | PROMOTE_MODE (mode, unsignedp, type); |
845 | *punsignedp = unsignedp; | |
846 | return mode; | |
9ff65789 | 847 | break; |
9ff65789 | 848 | |
ea534b63 | 849 | #ifdef POINTERS_EXTEND_UNSIGNED |
56a4c9e2 | 850 | case REFERENCE_TYPE: |
9ff65789 | 851 | case POINTER_TYPE: |
cde0f3fd | 852 | *punsignedp = POINTERS_EXTEND_UNSIGNED; |
d4ebfa65 BE |
853 | return targetm.addr_space.address_mode |
854 | (TYPE_ADDR_SPACE (TREE_TYPE (type))); | |
9ff65789 | 855 | break; |
ea534b63 | 856 | #endif |
d9b3eb63 | 857 | |
38a448ca | 858 | default: |
cde0f3fd | 859 | return mode; |
9ff65789 | 860 | } |
cde0f3fd | 861 | #else |
9ff65789 | 862 | return mode; |
cde0f3fd | 863 | #endif |
9ff65789 | 864 | } |
cde0f3fd PB |
865 | |
866 | ||
867 | /* Use one of promote_mode or promote_function_mode to find the promoted | |
868 | mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness | |
869 | of DECL after promotion. */ | |
870 | ||
ef4bddc2 | 871 | machine_mode |
cde0f3fd PB |
872 | promote_decl_mode (const_tree decl, int *punsignedp) |
873 | { | |
874 | tree type = TREE_TYPE (decl); | |
875 | int unsignedp = TYPE_UNSIGNED (type); | |
ef4bddc2 RS |
876 | machine_mode mode = DECL_MODE (decl); |
877 | machine_mode pmode; | |
cde0f3fd | 878 | |
666e3ceb PB |
879 | if (TREE_CODE (decl) == RESULT_DECL |
880 | || TREE_CODE (decl) == PARM_DECL) | |
cde0f3fd | 881 | pmode = promote_function_mode (type, mode, &unsignedp, |
666e3ceb | 882 | TREE_TYPE (current_function_decl), 2); |
cde0f3fd PB |
883 | else |
884 | pmode = promote_mode (type, mode, &unsignedp); | |
885 | ||
886 | if (punsignedp) | |
887 | *punsignedp = unsignedp; | |
888 | return pmode; | |
889 | } | |
890 | ||
9ff65789 | 891 | \f |
9a08d230 RH |
892 | /* Controls the behaviour of {anti_,}adjust_stack. */ |
893 | static bool suppress_reg_args_size; | |
894 | ||
895 | /* A helper for adjust_stack and anti_adjust_stack. */ | |
896 | ||
897 | static void | |
898 | adjust_stack_1 (rtx adjust, bool anti_p) | |
899 | { | |
528a80c1 DM |
900 | rtx temp; |
901 | rtx_insn *insn; | |
9a08d230 RH |
902 | |
903 | #ifndef STACK_GROWS_DOWNWARD | |
904 | /* Hereafter anti_p means subtract_p. */ | |
905 | anti_p = !anti_p; | |
906 | #endif | |
907 | ||
908 | temp = expand_binop (Pmode, | |
909 | anti_p ? sub_optab : add_optab, | |
910 | stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
911 | OPTAB_LIB_WIDEN); | |
912 | ||
913 | if (temp != stack_pointer_rtx) | |
914 | insn = emit_move_insn (stack_pointer_rtx, temp); | |
915 | else | |
916 | { | |
917 | insn = get_last_insn (); | |
918 | temp = single_set (insn); | |
919 | gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx); | |
920 | } | |
921 | ||
922 | if (!suppress_reg_args_size) | |
923 | add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); | |
924 | } | |
925 | ||
18ca7dab RK |
926 | /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). |
927 | This pops when ADJUST is positive. ADJUST need not be constant. */ | |
928 | ||
929 | void | |
502b8322 | 930 | adjust_stack (rtx adjust) |
18ca7dab | 931 | { |
18ca7dab RK |
932 | if (adjust == const0_rtx) |
933 | return; | |
934 | ||
1503a7ec JH |
935 | /* We expect all variable sized adjustments to be multiple of |
936 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 937 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
938 | stack_pointer_delta -= INTVAL (adjust); |
939 | ||
9a08d230 | 940 | adjust_stack_1 (adjust, false); |
18ca7dab RK |
941 | } |
942 | ||
943 | /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). | |
944 | This pushes when ADJUST is positive. ADJUST need not be constant. */ | |
945 | ||
946 | void | |
502b8322 | 947 | anti_adjust_stack (rtx adjust) |
18ca7dab | 948 | { |
18ca7dab RK |
949 | if (adjust == const0_rtx) |
950 | return; | |
951 | ||
1503a7ec JH |
952 | /* We expect all variable sized adjustments to be multiple of |
953 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 954 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
955 | stack_pointer_delta += INTVAL (adjust); |
956 | ||
9a08d230 | 957 | adjust_stack_1 (adjust, true); |
18ca7dab RK |
958 | } |
959 | ||
960 | /* Round the size of a block to be pushed up to the boundary required | |
961 | by this machine. SIZE is the desired size, which need not be constant. */ | |
962 | ||
4dd9b044 | 963 | static rtx |
502b8322 | 964 | round_push (rtx size) |
18ca7dab | 965 | { |
32990d5b | 966 | rtx align_rtx, alignm1_rtx; |
41ee3204 | 967 | |
32990d5b JJ |
968 | if (!SUPPORTS_STACK_ALIGNMENT |
969 | || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT) | |
18ca7dab | 970 | { |
32990d5b JJ |
971 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
972 | ||
973 | if (align == 1) | |
974 | return size; | |
975 | ||
976 | if (CONST_INT_P (size)) | |
977 | { | |
978 | HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align; | |
41ee3204 | 979 | |
32990d5b JJ |
980 | if (INTVAL (size) != new_size) |
981 | size = GEN_INT (new_size); | |
982 | return size; | |
983 | } | |
984 | ||
985 | align_rtx = GEN_INT (align); | |
986 | alignm1_rtx = GEN_INT (align - 1); | |
18ca7dab RK |
987 | } |
988 | else | |
989 | { | |
32990d5b JJ |
990 | /* If crtl->preferred_stack_boundary might still grow, use |
991 | virtual_preferred_stack_boundary_rtx instead. This will be | |
992 | substituted by the right value in vregs pass and optimized | |
993 | during combine. */ | |
994 | align_rtx = virtual_preferred_stack_boundary_rtx; | |
0a81f074 RS |
995 | alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1), |
996 | NULL_RTX); | |
18ca7dab | 997 | } |
41ee3204 | 998 | |
32990d5b JJ |
999 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
1000 | but we know it can't. So add ourselves and then do | |
1001 | TRUNC_DIV_EXPR. */ | |
1002 | size = expand_binop (Pmode, add_optab, size, alignm1_rtx, | |
1003 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1004 | size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx, | |
1005 | NULL_RTX, 1); | |
1006 | size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1); | |
1007 | ||
18ca7dab RK |
1008 | return size; |
1009 | } | |
1010 | \f | |
59257ff7 RK |
1011 | /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer |
1012 | to a previously-created save area. If no save area has been allocated, | |
1013 | this function will allocate one. If a save area is specified, it | |
9eac0f2a | 1014 | must be of the proper mode. */ |
59257ff7 RK |
1015 | |
1016 | void | |
9eac0f2a | 1017 | emit_stack_save (enum save_level save_level, rtx *psave) |
59257ff7 RK |
1018 | { |
1019 | rtx sa = *psave; | |
1020 | /* The default is that we use a move insn and save in a Pmode object. */ | |
502b8322 | 1021 | rtx (*fcn) (rtx, rtx) = gen_move_insn; |
ef4bddc2 | 1022 | machine_mode mode = STACK_SAVEAREA_MODE (save_level); |
59257ff7 RK |
1023 | |
1024 | /* See if this machine has anything special to do for this kind of save. */ | |
1025 | switch (save_level) | |
1026 | { | |
1027 | #ifdef HAVE_save_stack_block | |
1028 | case SAVE_BLOCK: | |
1029 | if (HAVE_save_stack_block) | |
a260abc9 | 1030 | fcn = gen_save_stack_block; |
59257ff7 RK |
1031 | break; |
1032 | #endif | |
1033 | #ifdef HAVE_save_stack_function | |
1034 | case SAVE_FUNCTION: | |
1035 | if (HAVE_save_stack_function) | |
a260abc9 | 1036 | fcn = gen_save_stack_function; |
59257ff7 RK |
1037 | break; |
1038 | #endif | |
1039 | #ifdef HAVE_save_stack_nonlocal | |
1040 | case SAVE_NONLOCAL: | |
1041 | if (HAVE_save_stack_nonlocal) | |
a260abc9 | 1042 | fcn = gen_save_stack_nonlocal; |
59257ff7 RK |
1043 | break; |
1044 | #endif | |
38a448ca RH |
1045 | default: |
1046 | break; | |
59257ff7 RK |
1047 | } |
1048 | ||
1049 | /* If there is no save area and we have to allocate one, do so. Otherwise | |
1050 | verify the save area is the proper mode. */ | |
1051 | ||
1052 | if (sa == 0) | |
1053 | { | |
1054 | if (mode != VOIDmode) | |
1055 | { | |
1056 | if (save_level == SAVE_NONLOCAL) | |
1057 | *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0); | |
1058 | else | |
1059 | *psave = sa = gen_reg_rtx (mode); | |
1060 | } | |
1061 | } | |
59257ff7 | 1062 | |
9eac0f2a RH |
1063 | do_pending_stack_adjust (); |
1064 | if (sa != 0) | |
1065 | sa = validize_mem (sa); | |
1066 | emit_insn (fcn (sa, stack_pointer_rtx)); | |
59257ff7 RK |
1067 | } |
1068 | ||
1069 | /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save | |
9eac0f2a | 1070 | area made by emit_stack_save. If it is zero, we have nothing to do. */ |
59257ff7 RK |
1071 | |
1072 | void | |
9eac0f2a | 1073 | emit_stack_restore (enum save_level save_level, rtx sa) |
59257ff7 RK |
1074 | { |
1075 | /* The default is that we use a move insn. */ | |
502b8322 | 1076 | rtx (*fcn) (rtx, rtx) = gen_move_insn; |
59257ff7 | 1077 | |
50025f91 TV |
1078 | /* If stack_realign_drap, the x86 backend emits a prologue that aligns both |
1079 | STACK_POINTER and HARD_FRAME_POINTER. | |
1080 | If stack_realign_fp, the x86 backend emits a prologue that aligns only | |
1081 | STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing | |
1082 | aligned variables, which is reflected in ix86_can_eliminate. | |
1083 | We normally still have the realigned STACK_POINTER that we can use. | |
1084 | But if there is a stack restore still present at reload, it can trigger | |
1085 | mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate | |
1086 | FRAME_POINTER into a hard reg. | |
1087 | To prevent this situation, we force need_drap if we emit a stack | |
1088 | restore. */ | |
1089 | if (SUPPORTS_STACK_ALIGNMENT) | |
1090 | crtl->need_drap = true; | |
1091 | ||
59257ff7 RK |
1092 | /* See if this machine has anything special to do for this kind of save. */ |
1093 | switch (save_level) | |
1094 | { | |
1095 | #ifdef HAVE_restore_stack_block | |
1096 | case SAVE_BLOCK: | |
1097 | if (HAVE_restore_stack_block) | |
1098 | fcn = gen_restore_stack_block; | |
1099 | break; | |
1100 | #endif | |
1101 | #ifdef HAVE_restore_stack_function | |
1102 | case SAVE_FUNCTION: | |
1103 | if (HAVE_restore_stack_function) | |
1104 | fcn = gen_restore_stack_function; | |
1105 | break; | |
1106 | #endif | |
1107 | #ifdef HAVE_restore_stack_nonlocal | |
59257ff7 RK |
1108 | case SAVE_NONLOCAL: |
1109 | if (HAVE_restore_stack_nonlocal) | |
1110 | fcn = gen_restore_stack_nonlocal; | |
1111 | break; | |
1112 | #endif | |
38a448ca RH |
1113 | default: |
1114 | break; | |
59257ff7 RK |
1115 | } |
1116 | ||
d072107f | 1117 | if (sa != 0) |
260f91c2 DJ |
1118 | { |
1119 | sa = validize_mem (sa); | |
1120 | /* These clobbers prevent the scheduler from moving | |
1121 | references to variable arrays below the code | |
4b7e68e7 | 1122 | that deletes (pops) the arrays. */ |
c41c1387 RS |
1123 | emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); |
1124 | emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx)); | |
260f91c2 | 1125 | } |
d072107f | 1126 | |
a494ed43 EB |
1127 | discard_pending_stack_adjust (); |
1128 | ||
9eac0f2a | 1129 | emit_insn (fcn (stack_pointer_rtx, sa)); |
59257ff7 | 1130 | } |
6de9cd9a DN |
1131 | |
1132 | /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current | |
1133 | function. This function should be called whenever we allocate or | |
1134 | deallocate dynamic stack space. */ | |
1135 | ||
1136 | void | |
1137 | update_nonlocal_goto_save_area (void) | |
1138 | { | |
1139 | tree t_save; | |
1140 | rtx r_save; | |
1141 | ||
1142 | /* The nonlocal_goto_save_area object is an array of N pointers. The | |
1143 | first one is used for the frame pointer save; the rest are sized by | |
1144 | STACK_SAVEAREA_MODE. Create a reference to array index 1, the first | |
1145 | of the stack save area slots. */ | |
6bbec3e1 L |
1146 | t_save = build4 (ARRAY_REF, |
1147 | TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)), | |
1148 | cfun->nonlocal_goto_save_area, | |
3244e67d | 1149 | integer_one_node, NULL_TREE, NULL_TREE); |
6de9cd9a DN |
1150 | r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); |
1151 | ||
9eac0f2a | 1152 | emit_stack_save (SAVE_NONLOCAL, &r_save); |
6de9cd9a | 1153 | } |
59257ff7 | 1154 | \f |
18ca7dab | 1155 | /* Return an rtx representing the address of an area of memory dynamically |
3a42502d | 1156 | pushed on the stack. |
18ca7dab RK |
1157 | |
1158 | Any required stack pointer alignment is preserved. | |
1159 | ||
1160 | SIZE is an rtx representing the size of the area. | |
091ad0b9 | 1161 | |
3a42502d RH |
1162 | SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This |
1163 | parameter may be zero. If so, a proper value will be extracted | |
1164 | from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed. | |
1165 | ||
1166 | REQUIRED_ALIGN is the alignment (in bits) required for the region | |
1167 | of memory. | |
d3c12306 EB |
1168 | |
1169 | If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the | |
1170 | stack space allocated by the generated code cannot be added with itself | |
1171 | in the course of the execution of the function. It is always safe to | |
1172 | pass FALSE here and the following criterion is sufficient in order to | |
1173 | pass TRUE: every path in the CFG that starts at the allocation point and | |
1174 | loops to it executes the associated deallocation code. */ | |
18ca7dab RK |
1175 | |
1176 | rtx | |
3a42502d RH |
1177 | allocate_dynamic_stack_space (rtx size, unsigned size_align, |
1178 | unsigned required_align, bool cannot_accumulate) | |
18ca7dab | 1179 | { |
d3c12306 | 1180 | HOST_WIDE_INT stack_usage_size = -1; |
528a80c1 DM |
1181 | rtx_code_label *final_label; |
1182 | rtx final_target, target; | |
34831f3e | 1183 | unsigned extra_align = 0; |
3a42502d | 1184 | bool must_align; |
d3c12306 | 1185 | |
15fc0026 | 1186 | /* If we're asking for zero bytes, it doesn't matter what we point |
9faa82d8 | 1187 | to since we can't dereference it. But return a reasonable |
15fc0026 RK |
1188 | address anyway. */ |
1189 | if (size == const0_rtx) | |
1190 | return virtual_stack_dynamic_rtx; | |
1191 | ||
1192 | /* Otherwise, show we're calling alloca or equivalent. */ | |
e3b5732b | 1193 | cfun->calls_alloca = 1; |
15fc0026 | 1194 | |
d3c12306 EB |
1195 | /* If stack usage info is requested, look into the size we are passed. |
1196 | We need to do so this early to avoid the obfuscation that may be | |
1197 | introduced later by the various alignment operations. */ | |
a11e0df4 | 1198 | if (flag_stack_usage_info) |
d3c12306 | 1199 | { |
32990d5b | 1200 | if (CONST_INT_P (size)) |
d3c12306 | 1201 | stack_usage_size = INTVAL (size); |
32990d5b | 1202 | else if (REG_P (size)) |
d3c12306 EB |
1203 | { |
1204 | /* Look into the last emitted insn and see if we can deduce | |
1205 | something for the register. */ | |
528a80c1 DM |
1206 | rtx_insn *insn; |
1207 | rtx set, note; | |
d3c12306 EB |
1208 | insn = get_last_insn (); |
1209 | if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size)) | |
1210 | { | |
32990d5b | 1211 | if (CONST_INT_P (SET_SRC (set))) |
d3c12306 EB |
1212 | stack_usage_size = INTVAL (SET_SRC (set)); |
1213 | else if ((note = find_reg_equal_equiv_note (insn)) | |
32990d5b | 1214 | && CONST_INT_P (XEXP (note, 0))) |
d3c12306 EB |
1215 | stack_usage_size = INTVAL (XEXP (note, 0)); |
1216 | } | |
1217 | } | |
1218 | ||
1219 | /* If the size is not constant, we can't say anything. */ | |
1220 | if (stack_usage_size == -1) | |
1221 | { | |
1222 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1223 | stack_usage_size = 0; | |
1224 | } | |
1225 | } | |
1226 | ||
18ca7dab RK |
1227 | /* Ensure the size is in the proper mode. */ |
1228 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1229 | size = convert_to_mode (Pmode, size, 1); | |
1230 | ||
3a42502d RH |
1231 | /* Adjust SIZE_ALIGN, if needed. */ |
1232 | if (CONST_INT_P (size)) | |
1233 | { | |
1234 | unsigned HOST_WIDE_INT lsb; | |
1235 | ||
1236 | lsb = INTVAL (size); | |
1237 | lsb &= -lsb; | |
1238 | ||
1239 | /* Watch out for overflow truncating to "unsigned". */ | |
1240 | if (lsb > UINT_MAX / BITS_PER_UNIT) | |
1241 | size_align = 1u << (HOST_BITS_PER_INT - 1); | |
1242 | else | |
1243 | size_align = (unsigned)lsb * BITS_PER_UNIT; | |
1244 | } | |
1245 | else if (size_align < BITS_PER_UNIT) | |
1246 | size_align = BITS_PER_UNIT; | |
1247 | ||
34831f3e RH |
1248 | /* We can't attempt to minimize alignment necessary, because we don't |
1249 | know the final value of preferred_stack_boundary yet while executing | |
1250 | this code. */ | |
1251 | if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) | |
1252 | crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
1253 | ||
18ca7dab | 1254 | /* We will need to ensure that the address we return is aligned to |
34831f3e RH |
1255 | REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't |
1256 | always know its final value at this point in the compilation (it | |
1257 | might depend on the size of the outgoing parameter lists, for | |
1258 | example), so we must align the value to be returned in that case. | |
1259 | (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if | |
1260 | STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined). | |
1261 | We must also do an alignment operation on the returned value if | |
1262 | the stack pointer alignment is less strict than REQUIRED_ALIGN. | |
1263 | ||
1264 | If we have to align, we must leave space in SIZE for the hole | |
1265 | that might result from the alignment operation. */ | |
1266 | ||
1267 | must_align = (crtl->preferred_stack_boundary < required_align); | |
1268 | if (must_align) | |
d3c12306 | 1269 | { |
34831f3e RH |
1270 | if (required_align > PREFERRED_STACK_BOUNDARY) |
1271 | extra_align = PREFERRED_STACK_BOUNDARY; | |
1272 | else if (required_align > STACK_BOUNDARY) | |
1273 | extra_align = STACK_BOUNDARY; | |
1274 | else | |
1275 | extra_align = BITS_PER_UNIT; | |
1ecad98e EB |
1276 | } |
1277 | ||
34831f3e RH |
1278 | /* ??? STACK_POINTER_OFFSET is always defined now. */ |
1279 | #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) | |
1280 | must_align = true; | |
1281 | extra_align = BITS_PER_UNIT; | |
1282 | #endif | |
1ecad98e | 1283 | |
34831f3e RH |
1284 | if (must_align) |
1285 | { | |
1286 | unsigned extra = (required_align - extra_align) / BITS_PER_UNIT; | |
3a42502d | 1287 | |
0a81f074 | 1288 | size = plus_constant (Pmode, size, extra); |
3a42502d | 1289 | size = force_operand (size, NULL_RTX); |
d3c12306 | 1290 | |
a11e0df4 | 1291 | if (flag_stack_usage_info) |
3a42502d | 1292 | stack_usage_size += extra; |
34831f3e | 1293 | |
3a42502d RH |
1294 | if (extra && size_align > extra_align) |
1295 | size_align = extra_align; | |
d3c12306 | 1296 | } |
1d9d04f8 | 1297 | |
18ca7dab | 1298 | /* Round the size to a multiple of the required stack alignment. |
34831f3e | 1299 | Since the stack if presumed to be rounded before this allocation, |
18ca7dab RK |
1300 | this will maintain the required alignment. |
1301 | ||
1302 | If the stack grows downward, we could save an insn by subtracting | |
1303 | SIZE from the stack pointer and then aligning the stack pointer. | |
1304 | The problem with this is that the stack pointer may be unaligned | |
1305 | between the execution of the subtraction and alignment insns and | |
1306 | some machines do not allow this. Even on those that do, some | |
1307 | signal handlers malfunction if a signal should occur between those | |
1308 | insns. Since this is an extremely rare event, we have no reliable | |
1309 | way of knowing which systems have this problem. So we avoid even | |
1310 | momentarily mis-aligning the stack. */ | |
3a42502d | 1311 | if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0) |
d3c12306 EB |
1312 | { |
1313 | size = round_push (size); | |
18ca7dab | 1314 | |
a11e0df4 | 1315 | if (flag_stack_usage_info) |
d3c12306 | 1316 | { |
32990d5b | 1317 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
d3c12306 EB |
1318 | stack_usage_size = (stack_usage_size + align - 1) / align * align; |
1319 | } | |
1320 | } | |
1321 | ||
3a42502d | 1322 | target = gen_reg_rtx (Pmode); |
7458026b | 1323 | |
d3c12306 EB |
1324 | /* The size is supposed to be fully adjusted at this point so record it |
1325 | if stack usage info is requested. */ | |
a11e0df4 | 1326 | if (flag_stack_usage_info) |
d3c12306 EB |
1327 | { |
1328 | current_function_dynamic_stack_size += stack_usage_size; | |
1329 | ||
1330 | /* ??? This is gross but the only safe stance in the absence | |
1331 | of stack usage oriented flow analysis. */ | |
1332 | if (!cannot_accumulate) | |
1333 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1334 | } | |
18ca7dab | 1335 | |
528a80c1 | 1336 | final_label = NULL; |
7458026b ILT |
1337 | final_target = NULL_RTX; |
1338 | ||
1339 | /* If we are splitting the stack, we need to ask the backend whether | |
1340 | there is enough room on the current stack. If there isn't, or if | |
1341 | the backend doesn't know how to tell is, then we need to call a | |
1342 | function to allocate memory in some other way. This memory will | |
1343 | be released when we release the current stack segment. The | |
1344 | effect is that stack allocation becomes less efficient, but at | |
1345 | least it doesn't cause a stack overflow. */ | |
1346 | if (flag_split_stack) | |
1347 | { | |
528a80c1 DM |
1348 | rtx_code_label *available_label; |
1349 | rtx ask, space, func; | |
7458026b | 1350 | |
528a80c1 | 1351 | available_label = NULL; |
7458026b ILT |
1352 | |
1353 | #ifdef HAVE_split_stack_space_check | |
1354 | if (HAVE_split_stack_space_check) | |
1355 | { | |
1356 | available_label = gen_label_rtx (); | |
1357 | ||
1358 | /* This instruction will branch to AVAILABLE_LABEL if there | |
1359 | are SIZE bytes available on the stack. */ | |
1360 | emit_insn (gen_split_stack_space_check (size, available_label)); | |
1361 | } | |
1362 | #endif | |
1363 | ||
c3928dde | 1364 | /* The __morestack_allocate_stack_space function will allocate |
c070a3b9 ILT |
1365 | memory using malloc. If the alignment of the memory returned |
1366 | by malloc does not meet REQUIRED_ALIGN, we increase SIZE to | |
1367 | make sure we allocate enough space. */ | |
1368 | if (MALLOC_ABI_ALIGNMENT >= required_align) | |
1369 | ask = size; | |
1370 | else | |
1371 | { | |
1372 | ask = expand_binop (Pmode, add_optab, size, | |
2f1cd2eb RS |
1373 | gen_int_mode (required_align / BITS_PER_UNIT - 1, |
1374 | Pmode), | |
c070a3b9 ILT |
1375 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1376 | must_align = true; | |
1377 | } | |
c3928dde | 1378 | |
7458026b ILT |
1379 | func = init_one_libfunc ("__morestack_allocate_stack_space"); |
1380 | ||
1381 | space = emit_library_call_value (func, target, LCT_NORMAL, Pmode, | |
c3928dde | 1382 | 1, ask, Pmode); |
7458026b ILT |
1383 | |
1384 | if (available_label == NULL_RTX) | |
1385 | return space; | |
1386 | ||
1387 | final_target = gen_reg_rtx (Pmode); | |
7458026b ILT |
1388 | |
1389 | emit_move_insn (final_target, space); | |
1390 | ||
1391 | final_label = gen_label_rtx (); | |
1392 | emit_jump (final_label); | |
1393 | ||
1394 | emit_label (available_label); | |
1395 | } | |
1396 | ||
18ca7dab RK |
1397 | do_pending_stack_adjust (); |
1398 | ||
1503a7ec | 1399 | /* We ought to be called always on the toplevel and stack ought to be aligned |
a1f300c0 | 1400 | properly. */ |
5b0264cb NS |
1401 | gcc_assert (!(stack_pointer_delta |
1402 | % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); | |
1503a7ec | 1403 | |
d809253a EB |
1404 | /* If needed, check that we have the required amount of stack. Take into |
1405 | account what has already been checked. */ | |
1406 | if (STACK_CHECK_MOVING_SP) | |
1407 | ; | |
1408 | else if (flag_stack_check == GENERIC_STACK_CHECK) | |
b38f3813 EB |
1409 | probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE, |
1410 | size); | |
1411 | else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK) | |
1412 | probe_stack_range (STACK_CHECK_PROTECT, size); | |
edff2491 | 1413 | |
efec771a RH |
1414 | /* Don't let anti_adjust_stack emit notes. */ |
1415 | suppress_reg_args_size = true; | |
1416 | ||
18ca7dab RK |
1417 | /* Perform the required allocation from the stack. Some systems do |
1418 | this differently than simply incrementing/decrementing from the | |
38a448ca | 1419 | stack pointer, such as acquiring the space by calling malloc(). */ |
18ca7dab RK |
1420 | #ifdef HAVE_allocate_stack |
1421 | if (HAVE_allocate_stack) | |
1422 | { | |
a5c7d693 | 1423 | struct expand_operand ops[2]; |
4b6c1672 RK |
1424 | /* We don't have to check against the predicate for operand 0 since |
1425 | TARGET is known to be a pseudo of the proper mode, which must | |
a5c7d693 RS |
1426 | be valid for the operand. */ |
1427 | create_fixed_operand (&ops[0], target); | |
1428 | create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true); | |
1429 | expand_insn (CODE_FOR_allocate_stack, 2, ops); | |
18ca7dab RK |
1430 | } |
1431 | else | |
1432 | #endif | |
ea534b63 | 1433 | { |
32990d5b JJ |
1434 | int saved_stack_pointer_delta; |
1435 | ||
38a448ca RH |
1436 | #ifndef STACK_GROWS_DOWNWARD |
1437 | emit_move_insn (target, virtual_stack_dynamic_rtx); | |
1438 | #endif | |
a157febd GK |
1439 | |
1440 | /* Check stack bounds if necessary. */ | |
e3b5732b | 1441 | if (crtl->limit_stack) |
a157febd GK |
1442 | { |
1443 | rtx available; | |
528a80c1 | 1444 | rtx_code_label *space_available = gen_label_rtx (); |
a157febd | 1445 | #ifdef STACK_GROWS_DOWNWARD |
d9b3eb63 | 1446 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1447 | stack_pointer_rtx, stack_limit_rtx, |
1448 | NULL_RTX, 1, OPTAB_WIDEN); | |
1449 | #else | |
d9b3eb63 | 1450 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1451 | stack_limit_rtx, stack_pointer_rtx, |
1452 | NULL_RTX, 1, OPTAB_WIDEN); | |
1453 | #endif | |
1454 | emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1, | |
a06ef755 | 1455 | space_available); |
a157febd GK |
1456 | #ifdef HAVE_trap |
1457 | if (HAVE_trap) | |
1458 | emit_insn (gen_trap ()); | |
1459 | else | |
1460 | #endif | |
1461 | error ("stack limits not supported on this target"); | |
1462 | emit_barrier (); | |
1463 | emit_label (space_available); | |
1464 | } | |
1465 | ||
32990d5b | 1466 | saved_stack_pointer_delta = stack_pointer_delta; |
9a08d230 | 1467 | |
d809253a | 1468 | if (flag_stack_check && STACK_CHECK_MOVING_SP) |
c35af30f | 1469 | anti_adjust_stack_and_probe (size, false); |
d809253a EB |
1470 | else |
1471 | anti_adjust_stack (size); | |
9a08d230 | 1472 | |
32990d5b JJ |
1473 | /* Even if size is constant, don't modify stack_pointer_delta. |
1474 | The constant size alloca should preserve | |
1475 | crtl->preferred_stack_boundary alignment. */ | |
1476 | stack_pointer_delta = saved_stack_pointer_delta; | |
d5457140 | 1477 | |
18ca7dab | 1478 | #ifdef STACK_GROWS_DOWNWARD |
ca56cd30 | 1479 | emit_move_insn (target, virtual_stack_dynamic_rtx); |
18ca7dab | 1480 | #endif |
38a448ca | 1481 | } |
18ca7dab | 1482 | |
efec771a RH |
1483 | suppress_reg_args_size = false; |
1484 | ||
3a42502d RH |
1485 | /* Finish up the split stack handling. */ |
1486 | if (final_label != NULL_RTX) | |
1487 | { | |
1488 | gcc_assert (flag_split_stack); | |
1489 | emit_move_insn (final_target, target); | |
1490 | emit_label (final_label); | |
1491 | target = final_target; | |
1492 | } | |
1493 | ||
1494 | if (must_align) | |
091ad0b9 | 1495 | { |
5244db05 | 1496 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
0f41302f MS |
1497 | but we know it can't. So add ourselves and then do |
1498 | TRUNC_DIV_EXPR. */ | |
0f56a403 | 1499 | target = expand_binop (Pmode, add_optab, target, |
2f1cd2eb RS |
1500 | gen_int_mode (required_align / BITS_PER_UNIT - 1, |
1501 | Pmode), | |
5244db05 RK |
1502 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1503 | target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target, | |
2f1cd2eb RS |
1504 | gen_int_mode (required_align / BITS_PER_UNIT, |
1505 | Pmode), | |
b1ec3c92 | 1506 | NULL_RTX, 1); |
091ad0b9 | 1507 | target = expand_mult (Pmode, target, |
2f1cd2eb RS |
1508 | gen_int_mode (required_align / BITS_PER_UNIT, |
1509 | Pmode), | |
b1ec3c92 | 1510 | NULL_RTX, 1); |
091ad0b9 | 1511 | } |
d9b3eb63 | 1512 | |
3a42502d RH |
1513 | /* Now that we've committed to a return value, mark its alignment. */ |
1514 | mark_reg_pointer (target, required_align); | |
1515 | ||
15fc0026 | 1516 | /* Record the new stack level for nonlocal gotos. */ |
6de9cd9a DN |
1517 | if (cfun->nonlocal_goto_save_area != 0) |
1518 | update_nonlocal_goto_save_area (); | |
15fc0026 | 1519 | |
18ca7dab RK |
1520 | return target; |
1521 | } | |
1522 | \f | |
d9b3eb63 | 1523 | /* A front end may want to override GCC's stack checking by providing a |
14a774a9 RK |
1524 | run-time routine to call to check the stack, so provide a mechanism for |
1525 | calling that routine. */ | |
1526 | ||
e2500fed | 1527 | static GTY(()) rtx stack_check_libfunc; |
14a774a9 RK |
1528 | |
1529 | void | |
d477d1fe | 1530 | set_stack_check_libfunc (const char *libfunc_name) |
14a774a9 | 1531 | { |
d477d1fe SB |
1532 | gcc_assert (stack_check_libfunc == NULL_RTX); |
1533 | stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name); | |
14a774a9 RK |
1534 | } |
1535 | \f | |
edff2491 RK |
1536 | /* Emit one stack probe at ADDRESS, an address within the stack. */ |
1537 | ||
260c8ba3 | 1538 | void |
502b8322 | 1539 | emit_stack_probe (rtx address) |
edff2491 | 1540 | { |
7b84aac0 EB |
1541 | #ifdef HAVE_probe_stack_address |
1542 | if (HAVE_probe_stack_address) | |
1543 | emit_insn (gen_probe_stack_address (address)); | |
1544 | else | |
1545 | #endif | |
1546 | { | |
1547 | rtx memref = gen_rtx_MEM (word_mode, address); | |
edff2491 | 1548 | |
7b84aac0 | 1549 | MEM_VOLATILE_P (memref) = 1; |
edff2491 | 1550 | |
7b84aac0 | 1551 | /* See if we have an insn to probe the stack. */ |
d809253a | 1552 | #ifdef HAVE_probe_stack |
7b84aac0 EB |
1553 | if (HAVE_probe_stack) |
1554 | emit_insn (gen_probe_stack (memref)); | |
1555 | else | |
d809253a | 1556 | #endif |
7b84aac0 EB |
1557 | emit_move_insn (memref, const0_rtx); |
1558 | } | |
edff2491 RK |
1559 | } |
1560 | ||
d9b3eb63 | 1561 | /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. |
d809253a EB |
1562 | FIRST is a constant and size is a Pmode RTX. These are offsets from |
1563 | the current stack pointer. STACK_GROWS_DOWNWARD says whether to add | |
1564 | or subtract them from the stack pointer. */ | |
1565 | ||
1566 | #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP) | |
edff2491 RK |
1567 | |
1568 | #ifdef STACK_GROWS_DOWNWARD | |
1569 | #define STACK_GROW_OP MINUS | |
d809253a EB |
1570 | #define STACK_GROW_OPTAB sub_optab |
1571 | #define STACK_GROW_OFF(off) -(off) | |
edff2491 RK |
1572 | #else |
1573 | #define STACK_GROW_OP PLUS | |
d809253a EB |
1574 | #define STACK_GROW_OPTAB add_optab |
1575 | #define STACK_GROW_OFF(off) (off) | |
edff2491 RK |
1576 | #endif |
1577 | ||
1578 | void | |
502b8322 | 1579 | probe_stack_range (HOST_WIDE_INT first, rtx size) |
edff2491 | 1580 | { |
4b6c1672 RK |
1581 | /* First ensure SIZE is Pmode. */ |
1582 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1583 | size = convert_to_mode (Pmode, size, 1); | |
1584 | ||
d809253a EB |
1585 | /* Next see if we have a function to check the stack. */ |
1586 | if (stack_check_libfunc) | |
f5f5363f | 1587 | { |
d809253a | 1588 | rtx addr = memory_address (Pmode, |
2b3aadfc RH |
1589 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1590 | stack_pointer_rtx, | |
0a81f074 RS |
1591 | plus_constant (Pmode, |
1592 | size, first))); | |
949fa04c EB |
1593 | emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr, |
1594 | Pmode); | |
f5f5363f | 1595 | } |
14a774a9 | 1596 | |
d809253a | 1597 | /* Next see if we have an insn to check the stack. */ |
edff2491 | 1598 | #ifdef HAVE_check_stack |
d6a6a07a | 1599 | else if (HAVE_check_stack) |
edff2491 | 1600 | { |
a5c7d693 | 1601 | struct expand_operand ops[1]; |
d809253a EB |
1602 | rtx addr = memory_address (Pmode, |
1603 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1604 | stack_pointer_rtx, | |
0a81f074 RS |
1605 | plus_constant (Pmode, |
1606 | size, first))); | |
d6a6a07a | 1607 | bool success; |
a5c7d693 | 1608 | create_input_operand (&ops[0], addr, Pmode); |
d6a6a07a EB |
1609 | success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops); |
1610 | gcc_assert (success); | |
edff2491 RK |
1611 | } |
1612 | #endif | |
1613 | ||
d809253a EB |
1614 | /* Otherwise we have to generate explicit probes. If we have a constant |
1615 | small number of them to generate, that's the easy case. */ | |
1616 | else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) | |
edff2491 | 1617 | { |
d809253a EB |
1618 | HOST_WIDE_INT isize = INTVAL (size), i; |
1619 | rtx addr; | |
1620 | ||
1621 | /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until | |
1622 | it exceeds SIZE. If only one probe is needed, this will not | |
1623 | generate any code. Then probe at FIRST + SIZE. */ | |
1624 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1625 | { | |
1626 | addr = memory_address (Pmode, | |
0a81f074 | 1627 | plus_constant (Pmode, stack_pointer_rtx, |
d809253a EB |
1628 | STACK_GROW_OFF (first + i))); |
1629 | emit_stack_probe (addr); | |
1630 | } | |
1631 | ||
1632 | addr = memory_address (Pmode, | |
0a81f074 | 1633 | plus_constant (Pmode, stack_pointer_rtx, |
d809253a EB |
1634 | STACK_GROW_OFF (first + isize))); |
1635 | emit_stack_probe (addr); | |
edff2491 RK |
1636 | } |
1637 | ||
d809253a EB |
1638 | /* In the variable case, do the same as above, but in a loop. Note that we |
1639 | must be extra careful with variables wrapping around because we might be | |
1640 | at the very top (or the very bottom) of the address space and we have to | |
1641 | be able to handle this case properly; in particular, we use an equality | |
1642 | test for the loop condition. */ | |
edff2491 RK |
1643 | else |
1644 | { | |
d809253a | 1645 | rtx rounded_size, rounded_size_op, test_addr, last_addr, temp; |
528a80c1 DM |
1646 | rtx_code_label *loop_lab = gen_label_rtx (); |
1647 | rtx_code_label *end_lab = gen_label_rtx (); | |
edff2491 | 1648 | |
d809253a EB |
1649 | /* Step 1: round SIZE to the previous multiple of the interval. */ |
1650 | ||
1651 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1652 | rounded_size | |
69a59f0f RS |
1653 | = simplify_gen_binary (AND, Pmode, size, |
1654 | gen_int_mode (-PROBE_INTERVAL, Pmode)); | |
d809253a EB |
1655 | rounded_size_op = force_operand (rounded_size, NULL_RTX); |
1656 | ||
1657 | ||
1658 | /* Step 2: compute initial and final value of the loop counter. */ | |
1659 | ||
1660 | /* TEST_ADDR = SP + FIRST. */ | |
1661 | test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1662 | stack_pointer_rtx, | |
4789c0ce RS |
1663 | gen_int_mode (first, Pmode)), |
1664 | NULL_RTX); | |
d809253a EB |
1665 | |
1666 | /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */ | |
1667 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1668 | test_addr, | |
1669 | rounded_size_op), NULL_RTX); | |
1670 | ||
1671 | ||
1672 | /* Step 3: the loop | |
1673 | ||
1674 | while (TEST_ADDR != LAST_ADDR) | |
1675 | { | |
1676 | TEST_ADDR = TEST_ADDR + PROBE_INTERVAL | |
1677 | probe at TEST_ADDR | |
1678 | } | |
1679 | ||
1680 | probes at FIRST + N * PROBE_INTERVAL for values of N from 1 | |
1681 | until it is equal to ROUNDED_SIZE. */ | |
edff2491 RK |
1682 | |
1683 | emit_label (loop_lab); | |
edff2491 | 1684 | |
d809253a EB |
1685 | /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */ |
1686 | emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1, | |
1687 | end_lab); | |
1688 | ||
1689 | /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */ | |
1690 | temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr, | |
2f1cd2eb | 1691 | gen_int_mode (PROBE_INTERVAL, Pmode), test_addr, |
edff2491 | 1692 | 1, OPTAB_WIDEN); |
edff2491 | 1693 | |
5b0264cb | 1694 | gcc_assert (temp == test_addr); |
edff2491 | 1695 | |
d809253a EB |
1696 | /* Probe at TEST_ADDR. */ |
1697 | emit_stack_probe (test_addr); | |
1698 | ||
1699 | emit_jump (loop_lab); | |
1700 | ||
edff2491 RK |
1701 | emit_label (end_lab); |
1702 | ||
d809253a EB |
1703 | |
1704 | /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time | |
1705 | that SIZE is equal to ROUNDED_SIZE. */ | |
1706 | ||
1707 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1708 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1709 | if (temp != const0_rtx) | |
1710 | { | |
1711 | rtx addr; | |
1712 | ||
32990d5b | 1713 | if (CONST_INT_P (temp)) |
d809253a EB |
1714 | { |
1715 | /* Use [base + disp} addressing mode if supported. */ | |
1716 | HOST_WIDE_INT offset = INTVAL (temp); | |
1717 | addr = memory_address (Pmode, | |
0a81f074 | 1718 | plus_constant (Pmode, last_addr, |
d809253a EB |
1719 | STACK_GROW_OFF (offset))); |
1720 | } | |
1721 | else | |
1722 | { | |
1723 | /* Manual CSE if the difference is not known at compile-time. */ | |
1724 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1725 | addr = memory_address (Pmode, | |
1726 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1727 | last_addr, temp)); | |
1728 | } | |
1729 | ||
1730 | emit_stack_probe (addr); | |
1731 | } | |
edff2491 | 1732 | } |
eabcc725 EB |
1733 | |
1734 | /* Make sure nothing is scheduled before we are done. */ | |
1735 | emit_insn (gen_blockage ()); | |
edff2491 | 1736 | } |
d809253a | 1737 | |
c35af30f EB |
1738 | /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) |
1739 | while probing it. This pushes when SIZE is positive. SIZE need not | |
1740 | be constant. If ADJUST_BACK is true, adjust back the stack pointer | |
1741 | by plus SIZE at the end. */ | |
d809253a | 1742 | |
c35af30f EB |
1743 | void |
1744 | anti_adjust_stack_and_probe (rtx size, bool adjust_back) | |
d809253a | 1745 | { |
c35af30f EB |
1746 | /* We skip the probe for the first interval + a small dope of 4 words and |
1747 | probe that many bytes past the specified size to maintain a protection | |
1748 | area at the botton of the stack. */ | |
d809253a EB |
1749 | const int dope = 4 * UNITS_PER_WORD; |
1750 | ||
1751 | /* First ensure SIZE is Pmode. */ | |
1752 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1753 | size = convert_to_mode (Pmode, size, 1); | |
1754 | ||
1755 | /* If we have a constant small number of probes to generate, that's the | |
1756 | easy case. */ | |
32990d5b | 1757 | if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) |
d809253a EB |
1758 | { |
1759 | HOST_WIDE_INT isize = INTVAL (size), i; | |
1760 | bool first_probe = true; | |
1761 | ||
260c8ba3 | 1762 | /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1763 | values of N from 1 until it exceeds SIZE. If only one probe is |
1764 | needed, this will not generate any code. Then adjust and probe | |
1765 | to PROBE_INTERVAL + SIZE. */ | |
1766 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1767 | { | |
1768 | if (first_probe) | |
1769 | { | |
1770 | anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope)); | |
1771 | first_probe = false; | |
1772 | } | |
1773 | else | |
1774 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1775 | emit_stack_probe (stack_pointer_rtx); | |
1776 | } | |
1777 | ||
1778 | if (first_probe) | |
0a81f074 | 1779 | anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
d809253a | 1780 | else |
0a81f074 | 1781 | anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i)); |
d809253a EB |
1782 | emit_stack_probe (stack_pointer_rtx); |
1783 | } | |
1784 | ||
1785 | /* In the variable case, do the same as above, but in a loop. Note that we | |
1786 | must be extra careful with variables wrapping around because we might be | |
1787 | at the very top (or the very bottom) of the address space and we have to | |
1788 | be able to handle this case properly; in particular, we use an equality | |
1789 | test for the loop condition. */ | |
1790 | else | |
1791 | { | |
1792 | rtx rounded_size, rounded_size_op, last_addr, temp; | |
528a80c1 DM |
1793 | rtx_code_label *loop_lab = gen_label_rtx (); |
1794 | rtx_code_label *end_lab = gen_label_rtx (); | |
d809253a EB |
1795 | |
1796 | ||
1797 | /* Step 1: round SIZE to the previous multiple of the interval. */ | |
1798 | ||
1799 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1800 | rounded_size | |
69a59f0f RS |
1801 | = simplify_gen_binary (AND, Pmode, size, |
1802 | gen_int_mode (-PROBE_INTERVAL, Pmode)); | |
d809253a EB |
1803 | rounded_size_op = force_operand (rounded_size, NULL_RTX); |
1804 | ||
1805 | ||
1806 | /* Step 2: compute initial and final value of the loop counter. */ | |
1807 | ||
1808 | /* SP = SP_0 + PROBE_INTERVAL. */ | |
1809 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
1810 | ||
1811 | /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */ | |
1812 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1813 | stack_pointer_rtx, | |
1814 | rounded_size_op), NULL_RTX); | |
1815 | ||
1816 | ||
1817 | /* Step 3: the loop | |
1818 | ||
260c8ba3 EB |
1819 | while (SP != LAST_ADDR) |
1820 | { | |
1821 | SP = SP + PROBE_INTERVAL | |
1822 | probe at SP | |
1823 | } | |
d809253a | 1824 | |
260c8ba3 | 1825 | adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1826 | values of N from 1 until it is equal to ROUNDED_SIZE. */ |
1827 | ||
1828 | emit_label (loop_lab); | |
1829 | ||
1830 | /* Jump to END_LAB if SP == LAST_ADDR. */ | |
1831 | emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX, | |
1832 | Pmode, 1, end_lab); | |
1833 | ||
1834 | /* SP = SP + PROBE_INTERVAL and probe at SP. */ | |
1835 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1836 | emit_stack_probe (stack_pointer_rtx); | |
1837 | ||
1838 | emit_jump (loop_lab); | |
1839 | ||
1840 | emit_label (end_lab); | |
1841 | ||
1842 | ||
260c8ba3 | 1843 | /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot |
d809253a EB |
1844 | assert at compile-time that SIZE is equal to ROUNDED_SIZE. */ |
1845 | ||
1846 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1847 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1848 | if (temp != const0_rtx) | |
1849 | { | |
1850 | /* Manual CSE if the difference is not known at compile-time. */ | |
1851 | if (GET_CODE (temp) != CONST_INT) | |
1852 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1853 | anti_adjust_stack (temp); | |
1854 | emit_stack_probe (stack_pointer_rtx); | |
1855 | } | |
1856 | } | |
1857 | ||
c35af30f EB |
1858 | /* Adjust back and account for the additional first interval. */ |
1859 | if (adjust_back) | |
0a81f074 | 1860 | adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
c35af30f EB |
1861 | else |
1862 | adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
d809253a EB |
1863 | } |
1864 | ||
18ca7dab RK |
1865 | /* Return an rtx representing the register or memory location |
1866 | in which a scalar value of data type VALTYPE | |
1867 | was returned by a function call to function FUNC. | |
1d636cc6 RG |
1868 | FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise |
1869 | function is known, otherwise 0. | |
4dc07bd7 JJ |
1870 | OUTGOING is 1 if on a machine with register windows this function |
1871 | should return the register in which the function will put its result | |
30f7a378 | 1872 | and 0 otherwise. */ |
18ca7dab RK |
1873 | |
1874 | rtx | |
586de218 | 1875 | hard_function_value (const_tree valtype, const_tree func, const_tree fntype, |
502b8322 | 1876 | int outgoing ATTRIBUTE_UNUSED) |
18ca7dab | 1877 | { |
4dc07bd7 | 1878 | rtx val; |
770ae6cc | 1879 | |
1d636cc6 | 1880 | val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing); |
770ae6cc | 1881 | |
f8cfc6aa | 1882 | if (REG_P (val) |
e1a4071f JL |
1883 | && GET_MODE (val) == BLKmode) |
1884 | { | |
770ae6cc | 1885 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype); |
ef4bddc2 | 1886 | machine_mode tmpmode; |
770ae6cc | 1887 | |
d9b3eb63 | 1888 | /* int_size_in_bytes can return -1. We don't need a check here |
535a42b1 NS |
1889 | since the value of bytes will then be large enough that no |
1890 | mode will match anyway. */ | |
d9b3eb63 | 1891 | |
e1a4071f | 1892 | for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
0fb7aeda KH |
1893 | tmpmode != VOIDmode; |
1894 | tmpmode = GET_MODE_WIDER_MODE (tmpmode)) | |
1895 | { | |
1896 | /* Have we found a large enough mode? */ | |
1897 | if (GET_MODE_SIZE (tmpmode) >= bytes) | |
1898 | break; | |
1899 | } | |
e1a4071f JL |
1900 | |
1901 | /* No suitable mode found. */ | |
5b0264cb | 1902 | gcc_assert (tmpmode != VOIDmode); |
e1a4071f JL |
1903 | |
1904 | PUT_MODE (val, tmpmode); | |
d9b3eb63 | 1905 | } |
e1a4071f | 1906 | return val; |
18ca7dab RK |
1907 | } |
1908 | ||
1909 | /* Return an rtx representing the register or memory location | |
1910 | in which a scalar value of mode MODE was returned by a library call. */ | |
1911 | ||
1912 | rtx | |
ef4bddc2 | 1913 | hard_libcall_value (machine_mode mode, rtx fun) |
18ca7dab | 1914 | { |
390b17c2 | 1915 | return targetm.calls.libcall_value (mode, fun); |
18ca7dab | 1916 | } |
0c5e217d RS |
1917 | |
1918 | /* Look up the tree code for a given rtx code | |
1919 | to provide the arithmetic operation for REAL_ARITHMETIC. | |
1920 | The function returns an int because the caller may not know | |
1921 | what `enum tree_code' means. */ | |
1922 | ||
1923 | int | |
502b8322 | 1924 | rtx_to_tree_code (enum rtx_code code) |
0c5e217d RS |
1925 | { |
1926 | enum tree_code tcode; | |
1927 | ||
1928 | switch (code) | |
1929 | { | |
1930 | case PLUS: | |
1931 | tcode = PLUS_EXPR; | |
1932 | break; | |
1933 | case MINUS: | |
1934 | tcode = MINUS_EXPR; | |
1935 | break; | |
1936 | case MULT: | |
1937 | tcode = MULT_EXPR; | |
1938 | break; | |
1939 | case DIV: | |
1940 | tcode = RDIV_EXPR; | |
1941 | break; | |
1942 | case SMIN: | |
1943 | tcode = MIN_EXPR; | |
1944 | break; | |
1945 | case SMAX: | |
1946 | tcode = MAX_EXPR; | |
1947 | break; | |
1948 | default: | |
1949 | tcode = LAST_AND_UNUSED_TREE_CODE; | |
1950 | break; | |
1951 | } | |
1952 | return ((int) tcode); | |
1953 | } | |
e2500fed GK |
1954 | |
1955 | #include "gt-explow.h" |