]>
Commit | Line | Data |
---|---|---|
18ca7dab | 1 | /* Subroutines for manipulating rtx's in semantically interesting ways. |
bc6d3f91 EB |
2 | Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998, 1999, 2000, |
3 | 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 | |
71d59383 | 4 | Free Software Foundation, Inc. |
18ca7dab | 5 | |
1322177d | 6 | This file is part of GCC. |
18ca7dab | 7 | |
1322177d LB |
8 | GCC is free software; you can redistribute it and/or modify it under |
9 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 10 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 11 | version. |
18ca7dab | 12 | |
1322177d LB |
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 | for more details. | |
18ca7dab RK |
17 | |
18 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
19 | along with GCC; see the file COPYING3. If not see |
20 | <http://www.gnu.org/licenses/>. */ | |
18ca7dab RK |
21 | |
22 | ||
23 | #include "config.h" | |
670ee920 | 24 | #include "system.h" |
4977bab6 ZW |
25 | #include "coretypes.h" |
26 | #include "tm.h" | |
718f9c0f | 27 | #include "diagnostic-core.h" |
18ca7dab RK |
28 | #include "rtl.h" |
29 | #include "tree.h" | |
6baf1cc8 | 30 | #include "tm_p.h" |
18ca7dab | 31 | #include "flags.h" |
b38f3813 | 32 | #include "except.h" |
49ad7cfa | 33 | #include "function.h" |
18ca7dab | 34 | #include "expr.h" |
e78d8e51 | 35 | #include "optabs.h" |
d477d1fe | 36 | #include "libfuncs.h" |
18ca7dab RK |
37 | #include "hard-reg-set.h" |
38 | #include "insn-config.h" | |
1d974ca7 | 39 | #include "ggc.h" |
18ca7dab | 40 | #include "recog.h" |
a77a9a18 | 41 | #include "langhooks.h" |
1d636cc6 | 42 | #include "target.h" |
677f3fa8 | 43 | #include "common/common-target.h" |
aacd3885 | 44 | #include "output.h" |
18ca7dab | 45 | |
502b8322 | 46 | static rtx break_out_memory_refs (rtx); |
7e4ce834 RH |
47 | |
48 | ||
49 | /* Truncate and perhaps sign-extend C as appropriate for MODE. */ | |
50 | ||
51 | HOST_WIDE_INT | |
502b8322 | 52 | trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) |
7e4ce834 | 53 | { |
5511bc5a | 54 | int width = GET_MODE_PRECISION (mode); |
7e4ce834 | 55 | |
71012d97 | 56 | /* You want to truncate to a _what_? */ |
5b0264cb | 57 | gcc_assert (SCALAR_INT_MODE_P (mode)); |
71012d97 | 58 | |
1f3f36d1 RH |
59 | /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ |
60 | if (mode == BImode) | |
61 | return c & 1 ? STORE_FLAG_VALUE : 0; | |
62 | ||
5b0d91c3 AO |
63 | /* Sign-extend for the requested mode. */ |
64 | ||
65 | if (width < HOST_BITS_PER_WIDE_INT) | |
66 | { | |
67 | HOST_WIDE_INT sign = 1; | |
68 | sign <<= width - 1; | |
69 | c &= (sign << 1) - 1; | |
70 | c ^= sign; | |
71 | c -= sign; | |
72 | } | |
7e4ce834 RH |
73 | |
74 | return c; | |
75 | } | |
76 | ||
929e10f4 | 77 | /* Return an rtx for the sum of X and the integer C, given that X has |
0a81f074 | 78 | mode MODE. */ |
18ca7dab RK |
79 | |
80 | rtx | |
0a81f074 | 81 | plus_constant (enum machine_mode mode, rtx x, HOST_WIDE_INT c) |
18ca7dab | 82 | { |
b3694847 | 83 | RTX_CODE code; |
17ab7c59 | 84 | rtx y; |
b3694847 | 85 | rtx tem; |
18ca7dab RK |
86 | int all_constant = 0; |
87 | ||
0a81f074 RS |
88 | gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode); |
89 | ||
18ca7dab RK |
90 | if (c == 0) |
91 | return x; | |
92 | ||
93 | restart: | |
94 | ||
95 | code = GET_CODE (x); | |
17ab7c59 RK |
96 | y = x; |
97 | ||
18ca7dab RK |
98 | switch (code) |
99 | { | |
100 | case CONST_INT: | |
929e10f4 MS |
101 | if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) |
102 | { | |
103 | unsigned HOST_WIDE_INT l1 = INTVAL (x); | |
104 | HOST_WIDE_INT h1 = (l1 >> (HOST_BITS_PER_WIDE_INT - 1)) ? -1 : 0; | |
105 | unsigned HOST_WIDE_INT l2 = c; | |
106 | HOST_WIDE_INT h2 = c < 0 ? -1 : 0; | |
107 | unsigned HOST_WIDE_INT lv; | |
108 | HOST_WIDE_INT hv; | |
109 | ||
110 | if (add_double_with_sign (l1, h1, l2, h2, &lv, &hv, false)) | |
111 | gcc_unreachable (); | |
112 | ||
113 | return immed_double_const (lv, hv, VOIDmode); | |
114 | } | |
115 | ||
b1ec3c92 | 116 | return GEN_INT (INTVAL (x) + c); |
18ca7dab RK |
117 | |
118 | case CONST_DOUBLE: | |
119 | { | |
f9e158c3 | 120 | unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x); |
b1ec3c92 | 121 | HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x); |
f9e158c3 | 122 | unsigned HOST_WIDE_INT l2 = c; |
929e10f4 | 123 | HOST_WIDE_INT h2 = c < 0 ? -1 : 0; |
f9e158c3 JM |
124 | unsigned HOST_WIDE_INT lv; |
125 | HOST_WIDE_INT hv; | |
18ca7dab | 126 | |
929e10f4 MS |
127 | if (add_double_with_sign (l1, h1, l2, h2, &lv, &hv, false)) |
128 | /* Sorry, we have no way to represent overflows this wide. | |
129 | To fix, add constant support wider than CONST_DOUBLE. */ | |
49ab6098 | 130 | gcc_assert (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_DOUBLE_INT); |
18ca7dab RK |
131 | |
132 | return immed_double_const (lv, hv, VOIDmode); | |
133 | } | |
134 | ||
135 | case MEM: | |
136 | /* If this is a reference to the constant pool, try replacing it with | |
137 | a reference to a new constant. If the resulting address isn't | |
138 | valid, don't return it because we have no way to validize it. */ | |
139 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
140 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) | |
141 | { | |
0a81f074 | 142 | tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c); |
929e10f4 | 143 | tem = force_const_mem (GET_MODE (x), tem); |
18ca7dab RK |
144 | if (memory_address_p (GET_MODE (tem), XEXP (tem, 0))) |
145 | return tem; | |
146 | } | |
147 | break; | |
148 | ||
149 | case CONST: | |
150 | /* If adding to something entirely constant, set a flag | |
151 | so that we can add a CONST around the result. */ | |
152 | x = XEXP (x, 0); | |
153 | all_constant = 1; | |
154 | goto restart; | |
155 | ||
156 | case SYMBOL_REF: | |
157 | case LABEL_REF: | |
158 | all_constant = 1; | |
159 | break; | |
160 | ||
161 | case PLUS: | |
929e10f4 MS |
162 | /* The interesting case is adding the integer to a sum. Look |
163 | for constant term in the sum and combine with C. For an | |
164 | integer constant term or a constant term that is not an | |
165 | explicit integer, we combine or group them together anyway. | |
03d937fc R |
166 | |
167 | We may not immediately return from the recursive call here, lest | |
168 | all_constant gets lost. */ | |
e5671f2b | 169 | |
929e10f4 | 170 | if (CONSTANT_P (XEXP (x, 1))) |
03d937fc | 171 | { |
0a81f074 RS |
172 | x = gen_rtx_PLUS (mode, XEXP (x, 0), |
173 | plus_constant (mode, XEXP (x, 1), c)); | |
03d937fc R |
174 | c = 0; |
175 | } | |
b72f00af | 176 | else if (find_constant_term_loc (&y)) |
03d937fc | 177 | { |
b72f00af RK |
178 | /* We need to be careful since X may be shared and we can't |
179 | modify it in place. */ | |
180 | rtx copy = copy_rtx (x); | |
181 | rtx *const_loc = find_constant_term_loc (©); | |
182 | ||
0a81f074 | 183 | *const_loc = plus_constant (mode, *const_loc, c); |
b72f00af | 184 | x = copy; |
03d937fc R |
185 | c = 0; |
186 | } | |
38a448ca | 187 | break; |
ed8908e7 | 188 | |
38a448ca RH |
189 | default: |
190 | break; | |
18ca7dab RK |
191 | } |
192 | ||
193 | if (c != 0) | |
38a448ca | 194 | x = gen_rtx_PLUS (mode, x, GEN_INT (c)); |
18ca7dab RK |
195 | |
196 | if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) | |
197 | return x; | |
198 | else if (all_constant) | |
38a448ca | 199 | return gen_rtx_CONST (mode, x); |
18ca7dab RK |
200 | else |
201 | return x; | |
202 | } | |
18ca7dab RK |
203 | \f |
204 | /* If X is a sum, return a new sum like X but lacking any constant terms. | |
205 | Add all the removed constant terms into *CONSTPTR. | |
206 | X itself is not altered. The result != X if and only if | |
207 | it is not isomorphic to X. */ | |
208 | ||
209 | rtx | |
502b8322 | 210 | eliminate_constant_term (rtx x, rtx *constptr) |
18ca7dab | 211 | { |
b3694847 | 212 | rtx x0, x1; |
18ca7dab RK |
213 | rtx tem; |
214 | ||
215 | if (GET_CODE (x) != PLUS) | |
216 | return x; | |
217 | ||
218 | /* First handle constants appearing at this level explicitly. */ | |
481683e1 | 219 | if (CONST_INT_P (XEXP (x, 1)) |
18ca7dab RK |
220 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr, |
221 | XEXP (x, 1))) | |
481683e1 | 222 | && CONST_INT_P (tem)) |
18ca7dab RK |
223 | { |
224 | *constptr = tem; | |
225 | return eliminate_constant_term (XEXP (x, 0), constptr); | |
226 | } | |
227 | ||
228 | tem = const0_rtx; | |
229 | x0 = eliminate_constant_term (XEXP (x, 0), &tem); | |
230 | x1 = eliminate_constant_term (XEXP (x, 1), &tem); | |
231 | if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0)) | |
232 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), | |
233 | *constptr, tem)) | |
481683e1 | 234 | && CONST_INT_P (tem)) |
18ca7dab RK |
235 | { |
236 | *constptr = tem; | |
38a448ca | 237 | return gen_rtx_PLUS (GET_MODE (x), x0, x1); |
18ca7dab RK |
238 | } |
239 | ||
240 | return x; | |
241 | } | |
242 | ||
18ca7dab RK |
243 | /* Return an rtx for the size in bytes of the value of EXP. */ |
244 | ||
245 | rtx | |
502b8322 | 246 | expr_size (tree exp) |
18ca7dab | 247 | { |
d25cee4d RH |
248 | tree size; |
249 | ||
250 | if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
251 | size = TREE_OPERAND (exp, 1); | |
252 | else | |
26979bc2 | 253 | { |
71c00b5c | 254 | size = tree_expr_size (exp); |
26979bc2 | 255 | gcc_assert (size); |
2ec5deb5 | 256 | gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp)); |
26979bc2 | 257 | } |
99098c66 | 258 | |
49452c07 | 259 | return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL); |
18ca7dab | 260 | } |
de8920be JM |
261 | |
262 | /* Return a wide integer for the size in bytes of the value of EXP, or -1 | |
263 | if the size can vary or is larger than an integer. */ | |
264 | ||
265 | HOST_WIDE_INT | |
502b8322 | 266 | int_expr_size (tree exp) |
de8920be | 267 | { |
d25cee4d RH |
268 | tree size; |
269 | ||
270 | if (TREE_CODE (exp) == WITH_SIZE_EXPR) | |
271 | size = TREE_OPERAND (exp, 1); | |
272 | else | |
26979bc2 | 273 | { |
71c00b5c | 274 | size = tree_expr_size (exp); |
26979bc2 JH |
275 | gcc_assert (size); |
276 | } | |
d25cee4d RH |
277 | |
278 | if (size == 0 || !host_integerp (size, 0)) | |
de8920be JM |
279 | return -1; |
280 | ||
d25cee4d | 281 | return tree_low_cst (size, 0); |
de8920be | 282 | } |
18ca7dab RK |
283 | \f |
284 | /* Return a copy of X in which all memory references | |
285 | and all constants that involve symbol refs | |
286 | have been replaced with new temporary registers. | |
287 | Also emit code to load the memory locations and constants | |
288 | into those registers. | |
289 | ||
290 | If X contains no such constants or memory references, | |
291 | X itself (not a copy) is returned. | |
292 | ||
293 | If a constant is found in the address that is not a legitimate constant | |
294 | in an insn, it is left alone in the hope that it might be valid in the | |
295 | address. | |
296 | ||
297 | X may contain no arithmetic except addition, subtraction and multiplication. | |
298 | Values returned by expand_expr with 1 for sum_ok fit this constraint. */ | |
299 | ||
300 | static rtx | |
502b8322 | 301 | break_out_memory_refs (rtx x) |
18ca7dab | 302 | { |
3c0cb5de | 303 | if (MEM_P (x) |
cabeca29 | 304 | || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x) |
18ca7dab | 305 | && GET_MODE (x) != VOIDmode)) |
2cca6e3f | 306 | x = force_reg (GET_MODE (x), x); |
18ca7dab RK |
307 | else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS |
308 | || GET_CODE (x) == MULT) | |
309 | { | |
b3694847 SS |
310 | rtx op0 = break_out_memory_refs (XEXP (x, 0)); |
311 | rtx op1 = break_out_memory_refs (XEXP (x, 1)); | |
2cca6e3f | 312 | |
18ca7dab | 313 | if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
d4ebfa65 | 314 | x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1); |
18ca7dab | 315 | } |
2cca6e3f | 316 | |
18ca7dab RK |
317 | return x; |
318 | } | |
319 | ||
d4ebfa65 BE |
320 | /* Given X, a memory address in address space AS' pointer mode, convert it to |
321 | an address in the address space's address mode, or vice versa (TO_MODE says | |
322 | which way). We take advantage of the fact that pointers are not allowed to | |
323 | overflow by commuting arithmetic operations over conversions so that address | |
324 | arithmetic insns can be used. */ | |
ea534b63 | 325 | |
498b529f | 326 | rtx |
d4ebfa65 BE |
327 | convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED, |
328 | rtx x, addr_space_t as ATTRIBUTE_UNUSED) | |
ea534b63 | 329 | { |
5ae6cd0d | 330 | #ifndef POINTERS_EXTEND_UNSIGNED |
7c137931 | 331 | gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode); |
5ae6cd0d MM |
332 | return x; |
333 | #else /* defined(POINTERS_EXTEND_UNSIGNED) */ | |
d4ebfa65 | 334 | enum machine_mode pointer_mode, address_mode, from_mode; |
498b529f | 335 | rtx temp; |
aa0f70e6 | 336 | enum rtx_code code; |
498b529f | 337 | |
5ae6cd0d MM |
338 | /* If X already has the right mode, just return it. */ |
339 | if (GET_MODE (x) == to_mode) | |
340 | return x; | |
341 | ||
d4ebfa65 BE |
342 | pointer_mode = targetm.addr_space.pointer_mode (as); |
343 | address_mode = targetm.addr_space.address_mode (as); | |
344 | from_mode = to_mode == pointer_mode ? address_mode : pointer_mode; | |
5ae6cd0d | 345 | |
0b04ec8c RK |
346 | /* Here we handle some special cases. If none of them apply, fall through |
347 | to the default case. */ | |
ea534b63 RK |
348 | switch (GET_CODE (x)) |
349 | { | |
350 | case CONST_INT: | |
351 | case CONST_DOUBLE: | |
aa0f70e6 SE |
352 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)) |
353 | code = TRUNCATE; | |
354 | else if (POINTERS_EXTEND_UNSIGNED < 0) | |
355 | break; | |
356 | else if (POINTERS_EXTEND_UNSIGNED > 0) | |
357 | code = ZERO_EXTEND; | |
358 | else | |
359 | code = SIGN_EXTEND; | |
360 | temp = simplify_unary_operation (code, to_mode, x, from_mode); | |
361 | if (temp) | |
362 | return temp; | |
363 | break; | |
498b529f | 364 | |
d1405722 | 365 | case SUBREG: |
5da4f548 | 366 | if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x))) |
6dd12198 | 367 | && GET_MODE (SUBREG_REG (x)) == to_mode) |
d1405722 RK |
368 | return SUBREG_REG (x); |
369 | break; | |
370 | ||
ea534b63 | 371 | case LABEL_REF: |
5da4f548 SE |
372 | temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0)); |
373 | LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); | |
374 | return temp; | |
6dd12198 | 375 | break; |
498b529f | 376 | |
ea534b63 | 377 | case SYMBOL_REF: |
ce02ba25 EC |
378 | temp = shallow_copy_rtx (x); |
379 | PUT_MODE (temp, to_mode); | |
5da4f548 | 380 | return temp; |
6dd12198 | 381 | break; |
ea534b63 | 382 | |
498b529f | 383 | case CONST: |
5da4f548 | 384 | return gen_rtx_CONST (to_mode, |
d4ebfa65 BE |
385 | convert_memory_address_addr_space |
386 | (to_mode, XEXP (x, 0), as)); | |
6dd12198 | 387 | break; |
ea534b63 | 388 | |
0b04ec8c RK |
389 | case PLUS: |
390 | case MULT: | |
54f6892e L |
391 | /* FIXME: For addition, we used to permute the conversion and |
392 | addition operation only if one operand is a constant and | |
393 | converting the constant does not change it or if one operand | |
394 | is a constant and we are using a ptr_extend instruction | |
395 | (POINTERS_EXTEND_UNSIGNED < 0) even if the resulting address | |
396 | may overflow/underflow. We relax the condition to include | |
397 | zero-extend (POINTERS_EXTEND_UNSIGNED > 0) since the other | |
398 | parts of the compiler depend on it. See PR 49721. | |
399 | ||
17939c98 SE |
400 | We can always safely permute them if we are making the address |
401 | narrower. */ | |
aa0f70e6 SE |
402 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode) |
403 | || (GET_CODE (x) == PLUS | |
481683e1 | 404 | && CONST_INT_P (XEXP (x, 1)) |
54f6892e L |
405 | && (POINTERS_EXTEND_UNSIGNED != 0 |
406 | || XEXP (x, 1) == convert_memory_address_addr_space | |
407 | (to_mode, XEXP (x, 1), as)))) | |
d9b3eb63 | 408 | return gen_rtx_fmt_ee (GET_CODE (x), to_mode, |
d4ebfa65 BE |
409 | convert_memory_address_addr_space |
410 | (to_mode, XEXP (x, 0), as), | |
aa0f70e6 | 411 | XEXP (x, 1)); |
38a448ca | 412 | break; |
d9b3eb63 | 413 | |
38a448ca RH |
414 | default: |
415 | break; | |
ea534b63 | 416 | } |
0b04ec8c RK |
417 | |
418 | return convert_modes (to_mode, from_mode, | |
419 | x, POINTERS_EXTEND_UNSIGNED); | |
5ae6cd0d | 420 | #endif /* defined(POINTERS_EXTEND_UNSIGNED) */ |
ea534b63 | 421 | } |
18ca7dab | 422 | \f |
09e881c9 BE |
423 | /* Return something equivalent to X but valid as a memory address for something |
424 | of mode MODE in the named address space AS. When X is not itself valid, | |
425 | this works by copying X or subexpressions of it into registers. */ | |
18ca7dab RK |
426 | |
427 | rtx | |
09e881c9 | 428 | memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as) |
18ca7dab | 429 | { |
b3694847 | 430 | rtx oldx = x; |
d4ebfa65 | 431 | enum machine_mode address_mode = targetm.addr_space.address_mode (as); |
18ca7dab | 432 | |
d4ebfa65 | 433 | x = convert_memory_address_addr_space (address_mode, x, as); |
ea534b63 | 434 | |
ba228239 | 435 | /* By passing constant addresses through registers |
18ca7dab | 436 | we get a chance to cse them. */ |
cabeca29 | 437 | if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)) |
d4ebfa65 | 438 | x = force_reg (address_mode, x); |
18ca7dab | 439 | |
18ca7dab RK |
440 | /* We get better cse by rejecting indirect addressing at this stage. |
441 | Let the combiner create indirect addresses where appropriate. | |
442 | For now, generate the code so that the subexpressions useful to share | |
443 | are visible. But not if cse won't be done! */ | |
18b9ca6f | 444 | else |
18ca7dab | 445 | { |
f8cfc6aa | 446 | if (! cse_not_expected && !REG_P (x)) |
18b9ca6f RK |
447 | x = break_out_memory_refs (x); |
448 | ||
449 | /* At this point, any valid address is accepted. */ | |
09e881c9 | 450 | if (memory_address_addr_space_p (mode, x, as)) |
3de5e93a | 451 | goto done; |
18b9ca6f RK |
452 | |
453 | /* If it was valid before but breaking out memory refs invalidated it, | |
454 | use it the old way. */ | |
09e881c9 | 455 | if (memory_address_addr_space_p (mode, oldx, as)) |
3de5e93a SB |
456 | { |
457 | x = oldx; | |
458 | goto done; | |
459 | } | |
18b9ca6f RK |
460 | |
461 | /* Perform machine-dependent transformations on X | |
462 | in certain cases. This is not necessary since the code | |
463 | below can handle all possible cases, but machine-dependent | |
464 | transformations can make better code. */ | |
506d7b68 | 465 | { |
09e881c9 BE |
466 | rtx orig_x = x; |
467 | x = targetm.addr_space.legitimize_address (x, oldx, mode, as); | |
468 | if (orig_x != x && memory_address_addr_space_p (mode, x, as)) | |
506d7b68 PB |
469 | goto done; |
470 | } | |
18b9ca6f RK |
471 | |
472 | /* PLUS and MULT can appear in special ways | |
473 | as the result of attempts to make an address usable for indexing. | |
474 | Usually they are dealt with by calling force_operand, below. | |
475 | But a sum containing constant terms is special | |
476 | if removing them makes the sum a valid address: | |
477 | then we generate that address in a register | |
478 | and index off of it. We do this because it often makes | |
479 | shorter code, and because the addresses thus generated | |
480 | in registers often become common subexpressions. */ | |
481 | if (GET_CODE (x) == PLUS) | |
482 | { | |
483 | rtx constant_term = const0_rtx; | |
484 | rtx y = eliminate_constant_term (x, &constant_term); | |
485 | if (constant_term == const0_rtx | |
09e881c9 | 486 | || ! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
487 | x = force_operand (x, NULL_RTX); |
488 | else | |
489 | { | |
38a448ca | 490 | y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term); |
09e881c9 | 491 | if (! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
492 | x = force_operand (x, NULL_RTX); |
493 | else | |
494 | x = y; | |
495 | } | |
496 | } | |
18ca7dab | 497 | |
e475ed2a | 498 | else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS) |
18b9ca6f | 499 | x = force_operand (x, NULL_RTX); |
18ca7dab | 500 | |
18b9ca6f RK |
501 | /* If we have a register that's an invalid address, |
502 | it must be a hard reg of the wrong class. Copy it to a pseudo. */ | |
f8cfc6aa | 503 | else if (REG_P (x)) |
18b9ca6f RK |
504 | x = copy_to_reg (x); |
505 | ||
506 | /* Last resort: copy the value to a register, since | |
507 | the register is a valid address. */ | |
508 | else | |
d4ebfa65 | 509 | x = force_reg (address_mode, x); |
18ca7dab | 510 | } |
18b9ca6f RK |
511 | |
512 | done: | |
513 | ||
09e881c9 | 514 | gcc_assert (memory_address_addr_space_p (mode, x, as)); |
2cca6e3f RK |
515 | /* If we didn't change the address, we are done. Otherwise, mark |
516 | a reg as a pointer if we have REG or REG + CONST_INT. */ | |
517 | if (oldx == x) | |
518 | return x; | |
f8cfc6aa | 519 | else if (REG_P (x)) |
bdb429a5 | 520 | mark_reg_pointer (x, BITS_PER_UNIT); |
2cca6e3f | 521 | else if (GET_CODE (x) == PLUS |
f8cfc6aa | 522 | && REG_P (XEXP (x, 0)) |
481683e1 | 523 | && CONST_INT_P (XEXP (x, 1))) |
bdb429a5 | 524 | mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT); |
2cca6e3f | 525 | |
18b9ca6f RK |
526 | /* OLDX may have been the address on a temporary. Update the address |
527 | to indicate that X is now used. */ | |
528 | update_temp_slot_address (oldx, x); | |
529 | ||
18ca7dab RK |
530 | return x; |
531 | } | |
532 | ||
18ca7dab RK |
533 | /* Convert a mem ref into one with a valid memory address. |
534 | Pass through anything else unchanged. */ | |
535 | ||
536 | rtx | |
502b8322 | 537 | validize_mem (rtx ref) |
18ca7dab | 538 | { |
3c0cb5de | 539 | if (!MEM_P (ref)) |
18ca7dab | 540 | return ref; |
aacd3885 | 541 | ref = use_anchored_address (ref); |
09e881c9 BE |
542 | if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0), |
543 | MEM_ADDR_SPACE (ref))) | |
18ca7dab | 544 | return ref; |
792760b9 | 545 | |
18ca7dab | 546 | /* Don't alter REF itself, since that is probably a stack slot. */ |
792760b9 | 547 | return replace_equiv_address (ref, XEXP (ref, 0)); |
18ca7dab | 548 | } |
aacd3885 RS |
549 | |
550 | /* If X is a memory reference to a member of an object block, try rewriting | |
551 | it to use an anchor instead. Return the new memory reference on success | |
552 | and the old one on failure. */ | |
553 | ||
554 | rtx | |
555 | use_anchored_address (rtx x) | |
556 | { | |
557 | rtx base; | |
558 | HOST_WIDE_INT offset; | |
0a81f074 | 559 | enum machine_mode mode; |
aacd3885 RS |
560 | |
561 | if (!flag_section_anchors) | |
562 | return x; | |
563 | ||
564 | if (!MEM_P (x)) | |
565 | return x; | |
566 | ||
567 | /* Split the address into a base and offset. */ | |
568 | base = XEXP (x, 0); | |
569 | offset = 0; | |
570 | if (GET_CODE (base) == CONST | |
571 | && GET_CODE (XEXP (base, 0)) == PLUS | |
481683e1 | 572 | && CONST_INT_P (XEXP (XEXP (base, 0), 1))) |
aacd3885 RS |
573 | { |
574 | offset += INTVAL (XEXP (XEXP (base, 0), 1)); | |
575 | base = XEXP (XEXP (base, 0), 0); | |
576 | } | |
577 | ||
578 | /* Check whether BASE is suitable for anchors. */ | |
579 | if (GET_CODE (base) != SYMBOL_REF | |
3fa9c136 | 580 | || !SYMBOL_REF_HAS_BLOCK_INFO_P (base) |
aacd3885 | 581 | || SYMBOL_REF_ANCHOR_P (base) |
434aeebb | 582 | || SYMBOL_REF_BLOCK (base) == NULL |
aacd3885 RS |
583 | || !targetm.use_anchors_for_symbol_p (base)) |
584 | return x; | |
585 | ||
586 | /* Decide where BASE is going to be. */ | |
587 | place_block_symbol (base); | |
588 | ||
589 | /* Get the anchor we need to use. */ | |
590 | offset += SYMBOL_REF_BLOCK_OFFSET (base); | |
591 | base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset, | |
592 | SYMBOL_REF_TLS_MODEL (base)); | |
593 | ||
594 | /* Work out the offset from the anchor. */ | |
595 | offset -= SYMBOL_REF_BLOCK_OFFSET (base); | |
596 | ||
597 | /* If we're going to run a CSE pass, force the anchor into a register. | |
598 | We will then be able to reuse registers for several accesses, if the | |
599 | target costs say that that's worthwhile. */ | |
0a81f074 | 600 | mode = GET_MODE (base); |
aacd3885 | 601 | if (!cse_not_expected) |
0a81f074 | 602 | base = force_reg (mode, base); |
aacd3885 | 603 | |
0a81f074 | 604 | return replace_equiv_address (x, plus_constant (mode, base, offset)); |
aacd3885 | 605 | } |
18ca7dab | 606 | \f |
18ca7dab RK |
607 | /* Copy the value or contents of X to a new temp reg and return that reg. */ |
608 | ||
609 | rtx | |
502b8322 | 610 | copy_to_reg (rtx x) |
18ca7dab | 611 | { |
b3694847 | 612 | rtx temp = gen_reg_rtx (GET_MODE (x)); |
d9b3eb63 | 613 | |
18ca7dab | 614 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 615 | do the computation. */ |
18ca7dab RK |
616 | if (! general_operand (x, VOIDmode)) |
617 | x = force_operand (x, temp); | |
d9b3eb63 | 618 | |
18ca7dab RK |
619 | if (x != temp) |
620 | emit_move_insn (temp, x); | |
621 | ||
622 | return temp; | |
623 | } | |
624 | ||
625 | /* Like copy_to_reg but always give the new register mode Pmode | |
626 | in case X is a constant. */ | |
627 | ||
628 | rtx | |
502b8322 | 629 | copy_addr_to_reg (rtx x) |
18ca7dab RK |
630 | { |
631 | return copy_to_mode_reg (Pmode, x); | |
632 | } | |
633 | ||
634 | /* Like copy_to_reg but always give the new register mode MODE | |
635 | in case X is a constant. */ | |
636 | ||
637 | rtx | |
502b8322 | 638 | copy_to_mode_reg (enum machine_mode mode, rtx x) |
18ca7dab | 639 | { |
b3694847 | 640 | rtx temp = gen_reg_rtx (mode); |
d9b3eb63 | 641 | |
18ca7dab | 642 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 643 | do the computation. */ |
18ca7dab RK |
644 | if (! general_operand (x, VOIDmode)) |
645 | x = force_operand (x, temp); | |
646 | ||
5b0264cb | 647 | gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); |
18ca7dab RK |
648 | if (x != temp) |
649 | emit_move_insn (temp, x); | |
650 | return temp; | |
651 | } | |
652 | ||
653 | /* Load X into a register if it is not already one. | |
654 | Use mode MODE for the register. | |
655 | X should be valid for mode MODE, but it may be a constant which | |
656 | is valid for all integer modes; that's why caller must specify MODE. | |
657 | ||
658 | The caller must not alter the value in the register we return, | |
659 | since we mark it as a "constant" register. */ | |
660 | ||
661 | rtx | |
502b8322 | 662 | force_reg (enum machine_mode mode, rtx x) |
18ca7dab | 663 | { |
b3694847 | 664 | rtx temp, insn, set; |
18ca7dab | 665 | |
f8cfc6aa | 666 | if (REG_P (x)) |
18ca7dab | 667 | return x; |
d9b3eb63 | 668 | |
e3c8ea67 RH |
669 | if (general_operand (x, mode)) |
670 | { | |
671 | temp = gen_reg_rtx (mode); | |
672 | insn = emit_move_insn (temp, x); | |
673 | } | |
674 | else | |
675 | { | |
676 | temp = force_operand (x, NULL_RTX); | |
f8cfc6aa | 677 | if (REG_P (temp)) |
e3c8ea67 RH |
678 | insn = get_last_insn (); |
679 | else | |
680 | { | |
681 | rtx temp2 = gen_reg_rtx (mode); | |
682 | insn = emit_move_insn (temp2, temp); | |
683 | temp = temp2; | |
684 | } | |
685 | } | |
62874575 | 686 | |
18ca7dab | 687 | /* Let optimizers know that TEMP's value never changes |
62874575 RK |
688 | and that X can be substituted for it. Don't get confused |
689 | if INSN set something else (such as a SUBREG of TEMP). */ | |
690 | if (CONSTANT_P (x) | |
691 | && (set = single_set (insn)) != 0 | |
fd7acc30 RS |
692 | && SET_DEST (set) == temp |
693 | && ! rtx_equal_p (x, SET_SRC (set))) | |
3d238248 | 694 | set_unique_reg_note (insn, REG_EQUAL, x); |
e3c8ea67 | 695 | |
4a4f95d9 RH |
696 | /* Let optimizers know that TEMP is a pointer, and if so, the |
697 | known alignment of that pointer. */ | |
698 | { | |
699 | unsigned align = 0; | |
700 | if (GET_CODE (x) == SYMBOL_REF) | |
701 | { | |
702 | align = BITS_PER_UNIT; | |
703 | if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x))) | |
704 | align = DECL_ALIGN (SYMBOL_REF_DECL (x)); | |
705 | } | |
706 | else if (GET_CODE (x) == LABEL_REF) | |
707 | align = BITS_PER_UNIT; | |
708 | else if (GET_CODE (x) == CONST | |
709 | && GET_CODE (XEXP (x, 0)) == PLUS | |
710 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF | |
481683e1 | 711 | && CONST_INT_P (XEXP (XEXP (x, 0), 1))) |
4a4f95d9 RH |
712 | { |
713 | rtx s = XEXP (XEXP (x, 0), 0); | |
714 | rtx c = XEXP (XEXP (x, 0), 1); | |
715 | unsigned sa, ca; | |
716 | ||
717 | sa = BITS_PER_UNIT; | |
718 | if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s))) | |
719 | sa = DECL_ALIGN (SYMBOL_REF_DECL (s)); | |
720 | ||
bd95721f RH |
721 | if (INTVAL (c) == 0) |
722 | align = sa; | |
723 | else | |
724 | { | |
725 | ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT; | |
726 | align = MIN (sa, ca); | |
727 | } | |
4a4f95d9 RH |
728 | } |
729 | ||
0a317111 | 730 | if (align || (MEM_P (x) && MEM_POINTER (x))) |
4a4f95d9 RH |
731 | mark_reg_pointer (temp, align); |
732 | } | |
733 | ||
18ca7dab RK |
734 | return temp; |
735 | } | |
736 | ||
737 | /* If X is a memory ref, copy its contents to a new temp reg and return | |
738 | that reg. Otherwise, return X. */ | |
739 | ||
740 | rtx | |
502b8322 | 741 | force_not_mem (rtx x) |
18ca7dab | 742 | { |
b3694847 | 743 | rtx temp; |
fe3439b0 | 744 | |
3c0cb5de | 745 | if (!MEM_P (x) || GET_MODE (x) == BLKmode) |
18ca7dab | 746 | return x; |
fe3439b0 | 747 | |
18ca7dab | 748 | temp = gen_reg_rtx (GET_MODE (x)); |
f8ad8d7c ZD |
749 | |
750 | if (MEM_POINTER (x)) | |
751 | REG_POINTER (temp) = 1; | |
752 | ||
18ca7dab RK |
753 | emit_move_insn (temp, x); |
754 | return temp; | |
755 | } | |
756 | ||
757 | /* Copy X to TARGET (if it's nonzero and a reg) | |
758 | or to a new temp reg and return that reg. | |
759 | MODE is the mode to use for X in case it is a constant. */ | |
760 | ||
761 | rtx | |
502b8322 | 762 | copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode) |
18ca7dab | 763 | { |
b3694847 | 764 | rtx temp; |
18ca7dab | 765 | |
f8cfc6aa | 766 | if (target && REG_P (target)) |
18ca7dab RK |
767 | temp = target; |
768 | else | |
769 | temp = gen_reg_rtx (mode); | |
770 | ||
771 | emit_move_insn (temp, x); | |
772 | return temp; | |
773 | } | |
774 | \f | |
cde0f3fd | 775 | /* Return the mode to use to pass or return a scalar of TYPE and MODE. |
9ff65789 RK |
776 | PUNSIGNEDP points to the signedness of the type and may be adjusted |
777 | to show what signedness to use on extension operations. | |
778 | ||
cde0f3fd PB |
779 | FOR_RETURN is nonzero if the caller is promoting the return value |
780 | of FNDECL, else it is for promoting args. */ | |
9ff65789 | 781 | |
cde0f3fd PB |
782 | enum machine_mode |
783 | promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp, | |
784 | const_tree funtype, int for_return) | |
785 | { | |
5e617be8 AK |
786 | /* Called without a type node for a libcall. */ |
787 | if (type == NULL_TREE) | |
788 | { | |
789 | if (INTEGRAL_MODE_P (mode)) | |
790 | return targetm.calls.promote_function_mode (NULL_TREE, mode, | |
791 | punsignedp, funtype, | |
792 | for_return); | |
793 | else | |
794 | return mode; | |
795 | } | |
796 | ||
cde0f3fd PB |
797 | switch (TREE_CODE (type)) |
798 | { | |
799 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: | |
800 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: | |
801 | case POINTER_TYPE: case REFERENCE_TYPE: | |
802 | return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype, | |
803 | for_return); | |
804 | ||
805 | default: | |
806 | return mode; | |
807 | } | |
808 | } | |
809 | /* Return the mode to use to store a scalar of TYPE and MODE. | |
810 | PUNSIGNEDP points to the signedness of the type and may be adjusted | |
811 | to show what signedness to use on extension operations. */ | |
d4453b7a | 812 | |
9ff65789 | 813 | enum machine_mode |
b1680483 AK |
814 | promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode, |
815 | int *punsignedp ATTRIBUTE_UNUSED) | |
9ff65789 | 816 | { |
1e3287d0 RG |
817 | #ifdef PROMOTE_MODE |
818 | enum tree_code code; | |
819 | int unsignedp; | |
820 | #endif | |
821 | ||
5e617be8 AK |
822 | /* For libcalls this is invoked without TYPE from the backends |
823 | TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that | |
824 | case. */ | |
825 | if (type == NULL_TREE) | |
826 | return mode; | |
827 | ||
cde0f3fd PB |
828 | /* FIXME: this is the same logic that was there until GCC 4.4, but we |
829 | probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE | |
830 | is not defined. The affected targets are M32C, S390, SPARC. */ | |
831 | #ifdef PROMOTE_MODE | |
1e3287d0 RG |
832 | code = TREE_CODE (type); |
833 | unsignedp = *punsignedp; | |
9ff65789 | 834 | |
9ff65789 RK |
835 | switch (code) |
836 | { | |
9ff65789 | 837 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
325217ed | 838 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: |
cde0f3fd PB |
839 | PROMOTE_MODE (mode, unsignedp, type); |
840 | *punsignedp = unsignedp; | |
841 | return mode; | |
9ff65789 | 842 | break; |
9ff65789 | 843 | |
ea534b63 | 844 | #ifdef POINTERS_EXTEND_UNSIGNED |
56a4c9e2 | 845 | case REFERENCE_TYPE: |
9ff65789 | 846 | case POINTER_TYPE: |
cde0f3fd | 847 | *punsignedp = POINTERS_EXTEND_UNSIGNED; |
d4ebfa65 BE |
848 | return targetm.addr_space.address_mode |
849 | (TYPE_ADDR_SPACE (TREE_TYPE (type))); | |
9ff65789 | 850 | break; |
ea534b63 | 851 | #endif |
d9b3eb63 | 852 | |
38a448ca | 853 | default: |
cde0f3fd | 854 | return mode; |
9ff65789 | 855 | } |
cde0f3fd | 856 | #else |
9ff65789 | 857 | return mode; |
cde0f3fd | 858 | #endif |
9ff65789 | 859 | } |
cde0f3fd PB |
860 | |
861 | ||
862 | /* Use one of promote_mode or promote_function_mode to find the promoted | |
863 | mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness | |
864 | of DECL after promotion. */ | |
865 | ||
866 | enum machine_mode | |
867 | promote_decl_mode (const_tree decl, int *punsignedp) | |
868 | { | |
869 | tree type = TREE_TYPE (decl); | |
870 | int unsignedp = TYPE_UNSIGNED (type); | |
871 | enum machine_mode mode = DECL_MODE (decl); | |
872 | enum machine_mode pmode; | |
873 | ||
666e3ceb PB |
874 | if (TREE_CODE (decl) == RESULT_DECL |
875 | || TREE_CODE (decl) == PARM_DECL) | |
cde0f3fd | 876 | pmode = promote_function_mode (type, mode, &unsignedp, |
666e3ceb | 877 | TREE_TYPE (current_function_decl), 2); |
cde0f3fd PB |
878 | else |
879 | pmode = promote_mode (type, mode, &unsignedp); | |
880 | ||
881 | if (punsignedp) | |
882 | *punsignedp = unsignedp; | |
883 | return pmode; | |
884 | } | |
885 | ||
9ff65789 | 886 | \f |
9a08d230 RH |
887 | /* Controls the behaviour of {anti_,}adjust_stack. */ |
888 | static bool suppress_reg_args_size; | |
889 | ||
890 | /* A helper for adjust_stack and anti_adjust_stack. */ | |
891 | ||
892 | static void | |
893 | adjust_stack_1 (rtx adjust, bool anti_p) | |
894 | { | |
895 | rtx temp, insn; | |
896 | ||
897 | #ifndef STACK_GROWS_DOWNWARD | |
898 | /* Hereafter anti_p means subtract_p. */ | |
899 | anti_p = !anti_p; | |
900 | #endif | |
901 | ||
902 | temp = expand_binop (Pmode, | |
903 | anti_p ? sub_optab : add_optab, | |
904 | stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
905 | OPTAB_LIB_WIDEN); | |
906 | ||
907 | if (temp != stack_pointer_rtx) | |
908 | insn = emit_move_insn (stack_pointer_rtx, temp); | |
909 | else | |
910 | { | |
911 | insn = get_last_insn (); | |
912 | temp = single_set (insn); | |
913 | gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx); | |
914 | } | |
915 | ||
916 | if (!suppress_reg_args_size) | |
917 | add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); | |
918 | } | |
919 | ||
18ca7dab RK |
920 | /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). |
921 | This pops when ADJUST is positive. ADJUST need not be constant. */ | |
922 | ||
923 | void | |
502b8322 | 924 | adjust_stack (rtx adjust) |
18ca7dab | 925 | { |
18ca7dab RK |
926 | if (adjust == const0_rtx) |
927 | return; | |
928 | ||
1503a7ec JH |
929 | /* We expect all variable sized adjustments to be multiple of |
930 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 931 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
932 | stack_pointer_delta -= INTVAL (adjust); |
933 | ||
9a08d230 | 934 | adjust_stack_1 (adjust, false); |
18ca7dab RK |
935 | } |
936 | ||
937 | /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). | |
938 | This pushes when ADJUST is positive. ADJUST need not be constant. */ | |
939 | ||
940 | void | |
502b8322 | 941 | anti_adjust_stack (rtx adjust) |
18ca7dab | 942 | { |
18ca7dab RK |
943 | if (adjust == const0_rtx) |
944 | return; | |
945 | ||
1503a7ec JH |
946 | /* We expect all variable sized adjustments to be multiple of |
947 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 948 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
949 | stack_pointer_delta += INTVAL (adjust); |
950 | ||
9a08d230 | 951 | adjust_stack_1 (adjust, true); |
18ca7dab RK |
952 | } |
953 | ||
954 | /* Round the size of a block to be pushed up to the boundary required | |
955 | by this machine. SIZE is the desired size, which need not be constant. */ | |
956 | ||
4dd9b044 | 957 | static rtx |
502b8322 | 958 | round_push (rtx size) |
18ca7dab | 959 | { |
32990d5b | 960 | rtx align_rtx, alignm1_rtx; |
41ee3204 | 961 | |
32990d5b JJ |
962 | if (!SUPPORTS_STACK_ALIGNMENT |
963 | || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT) | |
18ca7dab | 964 | { |
32990d5b JJ |
965 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
966 | ||
967 | if (align == 1) | |
968 | return size; | |
969 | ||
970 | if (CONST_INT_P (size)) | |
971 | { | |
972 | HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align; | |
41ee3204 | 973 | |
32990d5b JJ |
974 | if (INTVAL (size) != new_size) |
975 | size = GEN_INT (new_size); | |
976 | return size; | |
977 | } | |
978 | ||
979 | align_rtx = GEN_INT (align); | |
980 | alignm1_rtx = GEN_INT (align - 1); | |
18ca7dab RK |
981 | } |
982 | else | |
983 | { | |
32990d5b JJ |
984 | /* If crtl->preferred_stack_boundary might still grow, use |
985 | virtual_preferred_stack_boundary_rtx instead. This will be | |
986 | substituted by the right value in vregs pass and optimized | |
987 | during combine. */ | |
988 | align_rtx = virtual_preferred_stack_boundary_rtx; | |
0a81f074 RS |
989 | alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1), |
990 | NULL_RTX); | |
18ca7dab | 991 | } |
41ee3204 | 992 | |
32990d5b JJ |
993 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
994 | but we know it can't. So add ourselves and then do | |
995 | TRUNC_DIV_EXPR. */ | |
996 | size = expand_binop (Pmode, add_optab, size, alignm1_rtx, | |
997 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
998 | size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx, | |
999 | NULL_RTX, 1); | |
1000 | size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1); | |
1001 | ||
18ca7dab RK |
1002 | return size; |
1003 | } | |
1004 | \f | |
59257ff7 RK |
1005 | /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer |
1006 | to a previously-created save area. If no save area has been allocated, | |
1007 | this function will allocate one. If a save area is specified, it | |
9eac0f2a | 1008 | must be of the proper mode. */ |
59257ff7 RK |
1009 | |
1010 | void | |
9eac0f2a | 1011 | emit_stack_save (enum save_level save_level, rtx *psave) |
59257ff7 RK |
1012 | { |
1013 | rtx sa = *psave; | |
1014 | /* The default is that we use a move insn and save in a Pmode object. */ | |
502b8322 | 1015 | rtx (*fcn) (rtx, rtx) = gen_move_insn; |
a260abc9 | 1016 | enum machine_mode mode = STACK_SAVEAREA_MODE (save_level); |
59257ff7 RK |
1017 | |
1018 | /* See if this machine has anything special to do for this kind of save. */ | |
1019 | switch (save_level) | |
1020 | { | |
1021 | #ifdef HAVE_save_stack_block | |
1022 | case SAVE_BLOCK: | |
1023 | if (HAVE_save_stack_block) | |
a260abc9 | 1024 | fcn = gen_save_stack_block; |
59257ff7 RK |
1025 | break; |
1026 | #endif | |
1027 | #ifdef HAVE_save_stack_function | |
1028 | case SAVE_FUNCTION: | |
1029 | if (HAVE_save_stack_function) | |
a260abc9 | 1030 | fcn = gen_save_stack_function; |
59257ff7 RK |
1031 | break; |
1032 | #endif | |
1033 | #ifdef HAVE_save_stack_nonlocal | |
1034 | case SAVE_NONLOCAL: | |
1035 | if (HAVE_save_stack_nonlocal) | |
a260abc9 | 1036 | fcn = gen_save_stack_nonlocal; |
59257ff7 RK |
1037 | break; |
1038 | #endif | |
38a448ca RH |
1039 | default: |
1040 | break; | |
59257ff7 RK |
1041 | } |
1042 | ||
1043 | /* If there is no save area and we have to allocate one, do so. Otherwise | |
1044 | verify the save area is the proper mode. */ | |
1045 | ||
1046 | if (sa == 0) | |
1047 | { | |
1048 | if (mode != VOIDmode) | |
1049 | { | |
1050 | if (save_level == SAVE_NONLOCAL) | |
1051 | *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0); | |
1052 | else | |
1053 | *psave = sa = gen_reg_rtx (mode); | |
1054 | } | |
1055 | } | |
59257ff7 | 1056 | |
9eac0f2a RH |
1057 | do_pending_stack_adjust (); |
1058 | if (sa != 0) | |
1059 | sa = validize_mem (sa); | |
1060 | emit_insn (fcn (sa, stack_pointer_rtx)); | |
59257ff7 RK |
1061 | } |
1062 | ||
1063 | /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save | |
9eac0f2a | 1064 | area made by emit_stack_save. If it is zero, we have nothing to do. */ |
59257ff7 RK |
1065 | |
1066 | void | |
9eac0f2a | 1067 | emit_stack_restore (enum save_level save_level, rtx sa) |
59257ff7 RK |
1068 | { |
1069 | /* The default is that we use a move insn. */ | |
502b8322 | 1070 | rtx (*fcn) (rtx, rtx) = gen_move_insn; |
59257ff7 | 1071 | |
50025f91 TV |
1072 | /* If stack_realign_drap, the x86 backend emits a prologue that aligns both |
1073 | STACK_POINTER and HARD_FRAME_POINTER. | |
1074 | If stack_realign_fp, the x86 backend emits a prologue that aligns only | |
1075 | STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing | |
1076 | aligned variables, which is reflected in ix86_can_eliminate. | |
1077 | We normally still have the realigned STACK_POINTER that we can use. | |
1078 | But if there is a stack restore still present at reload, it can trigger | |
1079 | mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate | |
1080 | FRAME_POINTER into a hard reg. | |
1081 | To prevent this situation, we force need_drap if we emit a stack | |
1082 | restore. */ | |
1083 | if (SUPPORTS_STACK_ALIGNMENT) | |
1084 | crtl->need_drap = true; | |
1085 | ||
59257ff7 RK |
1086 | /* See if this machine has anything special to do for this kind of save. */ |
1087 | switch (save_level) | |
1088 | { | |
1089 | #ifdef HAVE_restore_stack_block | |
1090 | case SAVE_BLOCK: | |
1091 | if (HAVE_restore_stack_block) | |
1092 | fcn = gen_restore_stack_block; | |
1093 | break; | |
1094 | #endif | |
1095 | #ifdef HAVE_restore_stack_function | |
1096 | case SAVE_FUNCTION: | |
1097 | if (HAVE_restore_stack_function) | |
1098 | fcn = gen_restore_stack_function; | |
1099 | break; | |
1100 | #endif | |
1101 | #ifdef HAVE_restore_stack_nonlocal | |
59257ff7 RK |
1102 | case SAVE_NONLOCAL: |
1103 | if (HAVE_restore_stack_nonlocal) | |
1104 | fcn = gen_restore_stack_nonlocal; | |
1105 | break; | |
1106 | #endif | |
38a448ca RH |
1107 | default: |
1108 | break; | |
59257ff7 RK |
1109 | } |
1110 | ||
d072107f | 1111 | if (sa != 0) |
260f91c2 DJ |
1112 | { |
1113 | sa = validize_mem (sa); | |
1114 | /* These clobbers prevent the scheduler from moving | |
1115 | references to variable arrays below the code | |
4b7e68e7 | 1116 | that deletes (pops) the arrays. */ |
c41c1387 RS |
1117 | emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); |
1118 | emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx)); | |
260f91c2 | 1119 | } |
d072107f | 1120 | |
a494ed43 EB |
1121 | discard_pending_stack_adjust (); |
1122 | ||
9eac0f2a | 1123 | emit_insn (fcn (stack_pointer_rtx, sa)); |
59257ff7 | 1124 | } |
6de9cd9a DN |
1125 | |
1126 | /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current | |
1127 | function. This function should be called whenever we allocate or | |
1128 | deallocate dynamic stack space. */ | |
1129 | ||
1130 | void | |
1131 | update_nonlocal_goto_save_area (void) | |
1132 | { | |
1133 | tree t_save; | |
1134 | rtx r_save; | |
1135 | ||
1136 | /* The nonlocal_goto_save_area object is an array of N pointers. The | |
1137 | first one is used for the frame pointer save; the rest are sized by | |
1138 | STACK_SAVEAREA_MODE. Create a reference to array index 1, the first | |
1139 | of the stack save area slots. */ | |
6bbec3e1 L |
1140 | t_save = build4 (ARRAY_REF, |
1141 | TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)), | |
1142 | cfun->nonlocal_goto_save_area, | |
3244e67d | 1143 | integer_one_node, NULL_TREE, NULL_TREE); |
6de9cd9a DN |
1144 | r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); |
1145 | ||
9eac0f2a | 1146 | emit_stack_save (SAVE_NONLOCAL, &r_save); |
6de9cd9a | 1147 | } |
59257ff7 | 1148 | \f |
18ca7dab | 1149 | /* Return an rtx representing the address of an area of memory dynamically |
3a42502d | 1150 | pushed on the stack. |
18ca7dab RK |
1151 | |
1152 | Any required stack pointer alignment is preserved. | |
1153 | ||
1154 | SIZE is an rtx representing the size of the area. | |
091ad0b9 | 1155 | |
3a42502d RH |
1156 | SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This |
1157 | parameter may be zero. If so, a proper value will be extracted | |
1158 | from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed. | |
1159 | ||
1160 | REQUIRED_ALIGN is the alignment (in bits) required for the region | |
1161 | of memory. | |
d3c12306 EB |
1162 | |
1163 | If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the | |
1164 | stack space allocated by the generated code cannot be added with itself | |
1165 | in the course of the execution of the function. It is always safe to | |
1166 | pass FALSE here and the following criterion is sufficient in order to | |
1167 | pass TRUE: every path in the CFG that starts at the allocation point and | |
1168 | loops to it executes the associated deallocation code. */ | |
18ca7dab RK |
1169 | |
1170 | rtx | |
3a42502d RH |
1171 | allocate_dynamic_stack_space (rtx size, unsigned size_align, |
1172 | unsigned required_align, bool cannot_accumulate) | |
18ca7dab | 1173 | { |
d3c12306 | 1174 | HOST_WIDE_INT stack_usage_size = -1; |
3a42502d RH |
1175 | rtx final_label, final_target, target; |
1176 | bool must_align; | |
d3c12306 | 1177 | |
15fc0026 | 1178 | /* If we're asking for zero bytes, it doesn't matter what we point |
9faa82d8 | 1179 | to since we can't dereference it. But return a reasonable |
15fc0026 RK |
1180 | address anyway. */ |
1181 | if (size == const0_rtx) | |
1182 | return virtual_stack_dynamic_rtx; | |
1183 | ||
1184 | /* Otherwise, show we're calling alloca or equivalent. */ | |
e3b5732b | 1185 | cfun->calls_alloca = 1; |
15fc0026 | 1186 | |
d3c12306 EB |
1187 | /* If stack usage info is requested, look into the size we are passed. |
1188 | We need to do so this early to avoid the obfuscation that may be | |
1189 | introduced later by the various alignment operations. */ | |
a11e0df4 | 1190 | if (flag_stack_usage_info) |
d3c12306 | 1191 | { |
32990d5b | 1192 | if (CONST_INT_P (size)) |
d3c12306 | 1193 | stack_usage_size = INTVAL (size); |
32990d5b | 1194 | else if (REG_P (size)) |
d3c12306 EB |
1195 | { |
1196 | /* Look into the last emitted insn and see if we can deduce | |
1197 | something for the register. */ | |
1198 | rtx insn, set, note; | |
1199 | insn = get_last_insn (); | |
1200 | if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size)) | |
1201 | { | |
32990d5b | 1202 | if (CONST_INT_P (SET_SRC (set))) |
d3c12306 EB |
1203 | stack_usage_size = INTVAL (SET_SRC (set)); |
1204 | else if ((note = find_reg_equal_equiv_note (insn)) | |
32990d5b | 1205 | && CONST_INT_P (XEXP (note, 0))) |
d3c12306 EB |
1206 | stack_usage_size = INTVAL (XEXP (note, 0)); |
1207 | } | |
1208 | } | |
1209 | ||
1210 | /* If the size is not constant, we can't say anything. */ | |
1211 | if (stack_usage_size == -1) | |
1212 | { | |
1213 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1214 | stack_usage_size = 0; | |
1215 | } | |
1216 | } | |
1217 | ||
18ca7dab RK |
1218 | /* Ensure the size is in the proper mode. */ |
1219 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1220 | size = convert_to_mode (Pmode, size, 1); | |
1221 | ||
3a42502d RH |
1222 | /* Adjust SIZE_ALIGN, if needed. */ |
1223 | if (CONST_INT_P (size)) | |
1224 | { | |
1225 | unsigned HOST_WIDE_INT lsb; | |
1226 | ||
1227 | lsb = INTVAL (size); | |
1228 | lsb &= -lsb; | |
1229 | ||
1230 | /* Watch out for overflow truncating to "unsigned". */ | |
1231 | if (lsb > UINT_MAX / BITS_PER_UNIT) | |
1232 | size_align = 1u << (HOST_BITS_PER_INT - 1); | |
1233 | else | |
1234 | size_align = (unsigned)lsb * BITS_PER_UNIT; | |
1235 | } | |
1236 | else if (size_align < BITS_PER_UNIT) | |
1237 | size_align = BITS_PER_UNIT; | |
1238 | ||
18ca7dab | 1239 | /* We will need to ensure that the address we return is aligned to |
70ce4a25 RH |
1240 | REQUIRED_ALIGN. If that alignment is no larger than |
1241 | PREFERRED_STACK_BOUNDARY, we can handle everything without an | |
1242 | explicit alignment. */ | |
1243 | if (required_align <= PREFERRED_STACK_BOUNDARY) | |
d3c12306 | 1244 | { |
70ce4a25 RH |
1245 | if (crtl->preferred_stack_boundary < required_align) |
1246 | crtl->preferred_stack_boundary = required_align; | |
1247 | if (crtl->max_dynamic_stack_alignment < required_align) | |
1248 | crtl->max_dynamic_stack_alignment = required_align; | |
1249 | must_align = false; | |
1ecad98e | 1250 | } |
70ce4a25 RH |
1251 | else |
1252 | { | |
1253 | unsigned extra, extra_align; | |
1ecad98e | 1254 | |
70ce4a25 RH |
1255 | crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; |
1256 | crtl->max_dynamic_stack_alignment = PREFERRED_STACK_BOUNDARY; | |
1ecad98e | 1257 | |
70ce4a25 RH |
1258 | extra_align = PREFERRED_STACK_BOUNDARY; |
1259 | extra = (required_align - extra_align) / BITS_PER_UNIT; | |
3a42502d | 1260 | |
0a81f074 | 1261 | size = plus_constant (Pmode, size, extra); |
3a42502d | 1262 | size = force_operand (size, NULL_RTX); |
d3c12306 | 1263 | |
a11e0df4 | 1264 | if (flag_stack_usage_info) |
3a42502d | 1265 | stack_usage_size += extra; |
3a42502d RH |
1266 | if (extra && size_align > extra_align) |
1267 | size_align = extra_align; | |
70ce4a25 | 1268 | must_align = true; |
d3c12306 | 1269 | } |
1d9d04f8 | 1270 | |
18ca7dab | 1271 | /* Round the size to a multiple of the required stack alignment. |
70ce4a25 | 1272 | Since the stack is presumed to be rounded before this allocation, |
18ca7dab RK |
1273 | this will maintain the required alignment. |
1274 | ||
1275 | If the stack grows downward, we could save an insn by subtracting | |
1276 | SIZE from the stack pointer and then aligning the stack pointer. | |
1277 | The problem with this is that the stack pointer may be unaligned | |
1278 | between the execution of the subtraction and alignment insns and | |
1279 | some machines do not allow this. Even on those that do, some | |
1280 | signal handlers malfunction if a signal should occur between those | |
1281 | insns. Since this is an extremely rare event, we have no reliable | |
1282 | way of knowing which systems have this problem. So we avoid even | |
1283 | momentarily mis-aligning the stack. */ | |
3a42502d | 1284 | if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0) |
d3c12306 EB |
1285 | { |
1286 | size = round_push (size); | |
18ca7dab | 1287 | |
a11e0df4 | 1288 | if (flag_stack_usage_info) |
d3c12306 | 1289 | { |
32990d5b | 1290 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
d3c12306 EB |
1291 | stack_usage_size = (stack_usage_size + align - 1) / align * align; |
1292 | } | |
1293 | } | |
1294 | ||
3a42502d | 1295 | target = gen_reg_rtx (Pmode); |
7458026b | 1296 | |
d3c12306 EB |
1297 | /* The size is supposed to be fully adjusted at this point so record it |
1298 | if stack usage info is requested. */ | |
a11e0df4 | 1299 | if (flag_stack_usage_info) |
d3c12306 EB |
1300 | { |
1301 | current_function_dynamic_stack_size += stack_usage_size; | |
1302 | ||
1303 | /* ??? This is gross but the only safe stance in the absence | |
1304 | of stack usage oriented flow analysis. */ | |
1305 | if (!cannot_accumulate) | |
1306 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1307 | } | |
18ca7dab | 1308 | |
7458026b ILT |
1309 | final_label = NULL_RTX; |
1310 | final_target = NULL_RTX; | |
1311 | ||
1312 | /* If we are splitting the stack, we need to ask the backend whether | |
1313 | there is enough room on the current stack. If there isn't, or if | |
1314 | the backend doesn't know how to tell is, then we need to call a | |
1315 | function to allocate memory in some other way. This memory will | |
1316 | be released when we release the current stack segment. The | |
1317 | effect is that stack allocation becomes less efficient, but at | |
1318 | least it doesn't cause a stack overflow. */ | |
1319 | if (flag_split_stack) | |
1320 | { | |
c3928dde | 1321 | rtx available_label, ask, space, func; |
7458026b ILT |
1322 | |
1323 | available_label = NULL_RTX; | |
1324 | ||
1325 | #ifdef HAVE_split_stack_space_check | |
1326 | if (HAVE_split_stack_space_check) | |
1327 | { | |
1328 | available_label = gen_label_rtx (); | |
1329 | ||
1330 | /* This instruction will branch to AVAILABLE_LABEL if there | |
1331 | are SIZE bytes available on the stack. */ | |
1332 | emit_insn (gen_split_stack_space_check (size, available_label)); | |
1333 | } | |
1334 | #endif | |
1335 | ||
c3928dde | 1336 | /* The __morestack_allocate_stack_space function will allocate |
c070a3b9 ILT |
1337 | memory using malloc. If the alignment of the memory returned |
1338 | by malloc does not meet REQUIRED_ALIGN, we increase SIZE to | |
1339 | make sure we allocate enough space. */ | |
1340 | if (MALLOC_ABI_ALIGNMENT >= required_align) | |
1341 | ask = size; | |
1342 | else | |
1343 | { | |
1344 | ask = expand_binop (Pmode, add_optab, size, | |
1345 | GEN_INT (required_align / BITS_PER_UNIT - 1), | |
1346 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
1347 | must_align = true; | |
1348 | } | |
c3928dde | 1349 | |
7458026b ILT |
1350 | func = init_one_libfunc ("__morestack_allocate_stack_space"); |
1351 | ||
1352 | space = emit_library_call_value (func, target, LCT_NORMAL, Pmode, | |
c3928dde | 1353 | 1, ask, Pmode); |
7458026b ILT |
1354 | |
1355 | if (available_label == NULL_RTX) | |
1356 | return space; | |
1357 | ||
1358 | final_target = gen_reg_rtx (Pmode); | |
7458026b ILT |
1359 | |
1360 | emit_move_insn (final_target, space); | |
1361 | ||
1362 | final_label = gen_label_rtx (); | |
1363 | emit_jump (final_label); | |
1364 | ||
1365 | emit_label (available_label); | |
1366 | } | |
1367 | ||
18ca7dab RK |
1368 | do_pending_stack_adjust (); |
1369 | ||
1503a7ec | 1370 | /* We ought to be called always on the toplevel and stack ought to be aligned |
a1f300c0 | 1371 | properly. */ |
5b0264cb NS |
1372 | gcc_assert (!(stack_pointer_delta |
1373 | % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); | |
1503a7ec | 1374 | |
d809253a EB |
1375 | /* If needed, check that we have the required amount of stack. Take into |
1376 | account what has already been checked. */ | |
1377 | if (STACK_CHECK_MOVING_SP) | |
1378 | ; | |
1379 | else if (flag_stack_check == GENERIC_STACK_CHECK) | |
b38f3813 EB |
1380 | probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE, |
1381 | size); | |
1382 | else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK) | |
1383 | probe_stack_range (STACK_CHECK_PROTECT, size); | |
edff2491 | 1384 | |
efec771a RH |
1385 | /* Don't let anti_adjust_stack emit notes. */ |
1386 | suppress_reg_args_size = true; | |
1387 | ||
18ca7dab RK |
1388 | /* Perform the required allocation from the stack. Some systems do |
1389 | this differently than simply incrementing/decrementing from the | |
38a448ca | 1390 | stack pointer, such as acquiring the space by calling malloc(). */ |
18ca7dab RK |
1391 | #ifdef HAVE_allocate_stack |
1392 | if (HAVE_allocate_stack) | |
1393 | { | |
a5c7d693 | 1394 | struct expand_operand ops[2]; |
4b6c1672 RK |
1395 | /* We don't have to check against the predicate for operand 0 since |
1396 | TARGET is known to be a pseudo of the proper mode, which must | |
a5c7d693 RS |
1397 | be valid for the operand. */ |
1398 | create_fixed_operand (&ops[0], target); | |
1399 | create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true); | |
1400 | expand_insn (CODE_FOR_allocate_stack, 2, ops); | |
18ca7dab RK |
1401 | } |
1402 | else | |
1403 | #endif | |
ea534b63 | 1404 | { |
32990d5b JJ |
1405 | int saved_stack_pointer_delta; |
1406 | ||
38a448ca RH |
1407 | #ifndef STACK_GROWS_DOWNWARD |
1408 | emit_move_insn (target, virtual_stack_dynamic_rtx); | |
1409 | #endif | |
a157febd GK |
1410 | |
1411 | /* Check stack bounds if necessary. */ | |
e3b5732b | 1412 | if (crtl->limit_stack) |
a157febd GK |
1413 | { |
1414 | rtx available; | |
1415 | rtx space_available = gen_label_rtx (); | |
1416 | #ifdef STACK_GROWS_DOWNWARD | |
d9b3eb63 | 1417 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1418 | stack_pointer_rtx, stack_limit_rtx, |
1419 | NULL_RTX, 1, OPTAB_WIDEN); | |
1420 | #else | |
d9b3eb63 | 1421 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1422 | stack_limit_rtx, stack_pointer_rtx, |
1423 | NULL_RTX, 1, OPTAB_WIDEN); | |
1424 | #endif | |
1425 | emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1, | |
a06ef755 | 1426 | space_available); |
a157febd GK |
1427 | #ifdef HAVE_trap |
1428 | if (HAVE_trap) | |
1429 | emit_insn (gen_trap ()); | |
1430 | else | |
1431 | #endif | |
1432 | error ("stack limits not supported on this target"); | |
1433 | emit_barrier (); | |
1434 | emit_label (space_available); | |
1435 | } | |
1436 | ||
32990d5b | 1437 | saved_stack_pointer_delta = stack_pointer_delta; |
9a08d230 | 1438 | |
d809253a | 1439 | if (flag_stack_check && STACK_CHECK_MOVING_SP) |
c35af30f | 1440 | anti_adjust_stack_and_probe (size, false); |
d809253a EB |
1441 | else |
1442 | anti_adjust_stack (size); | |
9a08d230 | 1443 | |
32990d5b JJ |
1444 | /* Even if size is constant, don't modify stack_pointer_delta. |
1445 | The constant size alloca should preserve | |
1446 | crtl->preferred_stack_boundary alignment. */ | |
1447 | stack_pointer_delta = saved_stack_pointer_delta; | |
d5457140 | 1448 | |
18ca7dab | 1449 | #ifdef STACK_GROWS_DOWNWARD |
ca56cd30 | 1450 | emit_move_insn (target, virtual_stack_dynamic_rtx); |
18ca7dab | 1451 | #endif |
38a448ca | 1452 | } |
18ca7dab | 1453 | |
efec771a RH |
1454 | suppress_reg_args_size = false; |
1455 | ||
3a42502d RH |
1456 | /* Finish up the split stack handling. */ |
1457 | if (final_label != NULL_RTX) | |
1458 | { | |
1459 | gcc_assert (flag_split_stack); | |
1460 | emit_move_insn (final_target, target); | |
1461 | emit_label (final_label); | |
1462 | target = final_target; | |
1463 | } | |
1464 | ||
1465 | if (must_align) | |
091ad0b9 | 1466 | { |
5244db05 | 1467 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
0f41302f MS |
1468 | but we know it can't. So add ourselves and then do |
1469 | TRUNC_DIV_EXPR. */ | |
0f56a403 | 1470 | target = expand_binop (Pmode, add_optab, target, |
3a42502d | 1471 | GEN_INT (required_align / BITS_PER_UNIT - 1), |
5244db05 RK |
1472 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1473 | target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target, | |
3a42502d | 1474 | GEN_INT (required_align / BITS_PER_UNIT), |
b1ec3c92 | 1475 | NULL_RTX, 1); |
091ad0b9 | 1476 | target = expand_mult (Pmode, target, |
3a42502d | 1477 | GEN_INT (required_align / BITS_PER_UNIT), |
b1ec3c92 | 1478 | NULL_RTX, 1); |
091ad0b9 | 1479 | } |
d9b3eb63 | 1480 | |
3a42502d RH |
1481 | /* Now that we've committed to a return value, mark its alignment. */ |
1482 | mark_reg_pointer (target, required_align); | |
1483 | ||
15fc0026 | 1484 | /* Record the new stack level for nonlocal gotos. */ |
6de9cd9a DN |
1485 | if (cfun->nonlocal_goto_save_area != 0) |
1486 | update_nonlocal_goto_save_area (); | |
15fc0026 | 1487 | |
18ca7dab RK |
1488 | return target; |
1489 | } | |
1490 | \f | |
d9b3eb63 | 1491 | /* A front end may want to override GCC's stack checking by providing a |
14a774a9 RK |
1492 | run-time routine to call to check the stack, so provide a mechanism for |
1493 | calling that routine. */ | |
1494 | ||
e2500fed | 1495 | static GTY(()) rtx stack_check_libfunc; |
14a774a9 RK |
1496 | |
1497 | void | |
d477d1fe | 1498 | set_stack_check_libfunc (const char *libfunc_name) |
14a774a9 | 1499 | { |
d477d1fe SB |
1500 | gcc_assert (stack_check_libfunc == NULL_RTX); |
1501 | stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name); | |
14a774a9 RK |
1502 | } |
1503 | \f | |
edff2491 RK |
1504 | /* Emit one stack probe at ADDRESS, an address within the stack. */ |
1505 | ||
260c8ba3 | 1506 | void |
502b8322 | 1507 | emit_stack_probe (rtx address) |
edff2491 | 1508 | { |
7b84aac0 EB |
1509 | #ifdef HAVE_probe_stack_address |
1510 | if (HAVE_probe_stack_address) | |
1511 | emit_insn (gen_probe_stack_address (address)); | |
1512 | else | |
1513 | #endif | |
1514 | { | |
1515 | rtx memref = gen_rtx_MEM (word_mode, address); | |
edff2491 | 1516 | |
7b84aac0 | 1517 | MEM_VOLATILE_P (memref) = 1; |
edff2491 | 1518 | |
7b84aac0 | 1519 | /* See if we have an insn to probe the stack. */ |
d809253a | 1520 | #ifdef HAVE_probe_stack |
7b84aac0 EB |
1521 | if (HAVE_probe_stack) |
1522 | emit_insn (gen_probe_stack (memref)); | |
1523 | else | |
d809253a | 1524 | #endif |
7b84aac0 EB |
1525 | emit_move_insn (memref, const0_rtx); |
1526 | } | |
edff2491 RK |
1527 | } |
1528 | ||
d9b3eb63 | 1529 | /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. |
d809253a EB |
1530 | FIRST is a constant and size is a Pmode RTX. These are offsets from |
1531 | the current stack pointer. STACK_GROWS_DOWNWARD says whether to add | |
1532 | or subtract them from the stack pointer. */ | |
1533 | ||
1534 | #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP) | |
edff2491 RK |
1535 | |
1536 | #ifdef STACK_GROWS_DOWNWARD | |
1537 | #define STACK_GROW_OP MINUS | |
d809253a EB |
1538 | #define STACK_GROW_OPTAB sub_optab |
1539 | #define STACK_GROW_OFF(off) -(off) | |
edff2491 RK |
1540 | #else |
1541 | #define STACK_GROW_OP PLUS | |
d809253a EB |
1542 | #define STACK_GROW_OPTAB add_optab |
1543 | #define STACK_GROW_OFF(off) (off) | |
edff2491 RK |
1544 | #endif |
1545 | ||
1546 | void | |
502b8322 | 1547 | probe_stack_range (HOST_WIDE_INT first, rtx size) |
edff2491 | 1548 | { |
4b6c1672 RK |
1549 | /* First ensure SIZE is Pmode. */ |
1550 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1551 | size = convert_to_mode (Pmode, size, 1); | |
1552 | ||
d809253a EB |
1553 | /* Next see if we have a function to check the stack. */ |
1554 | if (stack_check_libfunc) | |
f5f5363f | 1555 | { |
d809253a | 1556 | rtx addr = memory_address (Pmode, |
2b3aadfc RH |
1557 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1558 | stack_pointer_rtx, | |
0a81f074 RS |
1559 | plus_constant (Pmode, |
1560 | size, first))); | |
949fa04c EB |
1561 | emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr, |
1562 | Pmode); | |
f5f5363f | 1563 | } |
14a774a9 | 1564 | |
d809253a | 1565 | /* Next see if we have an insn to check the stack. */ |
edff2491 | 1566 | #ifdef HAVE_check_stack |
d6a6a07a | 1567 | else if (HAVE_check_stack) |
edff2491 | 1568 | { |
a5c7d693 | 1569 | struct expand_operand ops[1]; |
d809253a EB |
1570 | rtx addr = memory_address (Pmode, |
1571 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1572 | stack_pointer_rtx, | |
0a81f074 RS |
1573 | plus_constant (Pmode, |
1574 | size, first))); | |
d6a6a07a | 1575 | bool success; |
a5c7d693 | 1576 | create_input_operand (&ops[0], addr, Pmode); |
d6a6a07a EB |
1577 | success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops); |
1578 | gcc_assert (success); | |
edff2491 RK |
1579 | } |
1580 | #endif | |
1581 | ||
d809253a EB |
1582 | /* Otherwise we have to generate explicit probes. If we have a constant |
1583 | small number of them to generate, that's the easy case. */ | |
1584 | else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) | |
edff2491 | 1585 | { |
d809253a EB |
1586 | HOST_WIDE_INT isize = INTVAL (size), i; |
1587 | rtx addr; | |
1588 | ||
1589 | /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until | |
1590 | it exceeds SIZE. If only one probe is needed, this will not | |
1591 | generate any code. Then probe at FIRST + SIZE. */ | |
1592 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1593 | { | |
1594 | addr = memory_address (Pmode, | |
0a81f074 | 1595 | plus_constant (Pmode, stack_pointer_rtx, |
d809253a EB |
1596 | STACK_GROW_OFF (first + i))); |
1597 | emit_stack_probe (addr); | |
1598 | } | |
1599 | ||
1600 | addr = memory_address (Pmode, | |
0a81f074 | 1601 | plus_constant (Pmode, stack_pointer_rtx, |
d809253a EB |
1602 | STACK_GROW_OFF (first + isize))); |
1603 | emit_stack_probe (addr); | |
edff2491 RK |
1604 | } |
1605 | ||
d809253a EB |
1606 | /* In the variable case, do the same as above, but in a loop. Note that we |
1607 | must be extra careful with variables wrapping around because we might be | |
1608 | at the very top (or the very bottom) of the address space and we have to | |
1609 | be able to handle this case properly; in particular, we use an equality | |
1610 | test for the loop condition. */ | |
edff2491 RK |
1611 | else |
1612 | { | |
d809253a | 1613 | rtx rounded_size, rounded_size_op, test_addr, last_addr, temp; |
edff2491 | 1614 | rtx loop_lab = gen_label_rtx (); |
edff2491 | 1615 | rtx end_lab = gen_label_rtx (); |
edff2491 | 1616 | |
edff2491 | 1617 | |
d809253a EB |
1618 | /* Step 1: round SIZE to the previous multiple of the interval. */ |
1619 | ||
1620 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1621 | rounded_size | |
1622 | = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL)); | |
1623 | rounded_size_op = force_operand (rounded_size, NULL_RTX); | |
1624 | ||
1625 | ||
1626 | /* Step 2: compute initial and final value of the loop counter. */ | |
1627 | ||
1628 | /* TEST_ADDR = SP + FIRST. */ | |
1629 | test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1630 | stack_pointer_rtx, | |
1631 | GEN_INT (first)), NULL_RTX); | |
1632 | ||
1633 | /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */ | |
1634 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1635 | test_addr, | |
1636 | rounded_size_op), NULL_RTX); | |
1637 | ||
1638 | ||
1639 | /* Step 3: the loop | |
1640 | ||
1641 | while (TEST_ADDR != LAST_ADDR) | |
1642 | { | |
1643 | TEST_ADDR = TEST_ADDR + PROBE_INTERVAL | |
1644 | probe at TEST_ADDR | |
1645 | } | |
1646 | ||
1647 | probes at FIRST + N * PROBE_INTERVAL for values of N from 1 | |
1648 | until it is equal to ROUNDED_SIZE. */ | |
edff2491 RK |
1649 | |
1650 | emit_label (loop_lab); | |
edff2491 | 1651 | |
d809253a EB |
1652 | /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */ |
1653 | emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1, | |
1654 | end_lab); | |
1655 | ||
1656 | /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */ | |
1657 | temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr, | |
1658 | GEN_INT (PROBE_INTERVAL), test_addr, | |
edff2491 | 1659 | 1, OPTAB_WIDEN); |
edff2491 | 1660 | |
5b0264cb | 1661 | gcc_assert (temp == test_addr); |
edff2491 | 1662 | |
d809253a EB |
1663 | /* Probe at TEST_ADDR. */ |
1664 | emit_stack_probe (test_addr); | |
1665 | ||
1666 | emit_jump (loop_lab); | |
1667 | ||
edff2491 RK |
1668 | emit_label (end_lab); |
1669 | ||
d809253a EB |
1670 | |
1671 | /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time | |
1672 | that SIZE is equal to ROUNDED_SIZE. */ | |
1673 | ||
1674 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1675 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1676 | if (temp != const0_rtx) | |
1677 | { | |
1678 | rtx addr; | |
1679 | ||
32990d5b | 1680 | if (CONST_INT_P (temp)) |
d809253a EB |
1681 | { |
1682 | /* Use [base + disp} addressing mode if supported. */ | |
1683 | HOST_WIDE_INT offset = INTVAL (temp); | |
1684 | addr = memory_address (Pmode, | |
0a81f074 | 1685 | plus_constant (Pmode, last_addr, |
d809253a EB |
1686 | STACK_GROW_OFF (offset))); |
1687 | } | |
1688 | else | |
1689 | { | |
1690 | /* Manual CSE if the difference is not known at compile-time. */ | |
1691 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1692 | addr = memory_address (Pmode, | |
1693 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1694 | last_addr, temp)); | |
1695 | } | |
1696 | ||
1697 | emit_stack_probe (addr); | |
1698 | } | |
edff2491 RK |
1699 | } |
1700 | } | |
d809253a | 1701 | |
c35af30f EB |
1702 | /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) |
1703 | while probing it. This pushes when SIZE is positive. SIZE need not | |
1704 | be constant. If ADJUST_BACK is true, adjust back the stack pointer | |
1705 | by plus SIZE at the end. */ | |
d809253a | 1706 | |
c35af30f EB |
1707 | void |
1708 | anti_adjust_stack_and_probe (rtx size, bool adjust_back) | |
d809253a | 1709 | { |
c35af30f EB |
1710 | /* We skip the probe for the first interval + a small dope of 4 words and |
1711 | probe that many bytes past the specified size to maintain a protection | |
1712 | area at the botton of the stack. */ | |
d809253a EB |
1713 | const int dope = 4 * UNITS_PER_WORD; |
1714 | ||
1715 | /* First ensure SIZE is Pmode. */ | |
1716 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1717 | size = convert_to_mode (Pmode, size, 1); | |
1718 | ||
1719 | /* If we have a constant small number of probes to generate, that's the | |
1720 | easy case. */ | |
32990d5b | 1721 | if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) |
d809253a EB |
1722 | { |
1723 | HOST_WIDE_INT isize = INTVAL (size), i; | |
1724 | bool first_probe = true; | |
1725 | ||
260c8ba3 | 1726 | /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1727 | values of N from 1 until it exceeds SIZE. If only one probe is |
1728 | needed, this will not generate any code. Then adjust and probe | |
1729 | to PROBE_INTERVAL + SIZE. */ | |
1730 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1731 | { | |
1732 | if (first_probe) | |
1733 | { | |
1734 | anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope)); | |
1735 | first_probe = false; | |
1736 | } | |
1737 | else | |
1738 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1739 | emit_stack_probe (stack_pointer_rtx); | |
1740 | } | |
1741 | ||
1742 | if (first_probe) | |
0a81f074 | 1743 | anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
d809253a | 1744 | else |
0a81f074 | 1745 | anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i)); |
d809253a EB |
1746 | emit_stack_probe (stack_pointer_rtx); |
1747 | } | |
1748 | ||
1749 | /* In the variable case, do the same as above, but in a loop. Note that we | |
1750 | must be extra careful with variables wrapping around because we might be | |
1751 | at the very top (or the very bottom) of the address space and we have to | |
1752 | be able to handle this case properly; in particular, we use an equality | |
1753 | test for the loop condition. */ | |
1754 | else | |
1755 | { | |
1756 | rtx rounded_size, rounded_size_op, last_addr, temp; | |
1757 | rtx loop_lab = gen_label_rtx (); | |
1758 | rtx end_lab = gen_label_rtx (); | |
1759 | ||
1760 | ||
1761 | /* Step 1: round SIZE to the previous multiple of the interval. */ | |
1762 | ||
1763 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1764 | rounded_size | |
1765 | = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL)); | |
1766 | rounded_size_op = force_operand (rounded_size, NULL_RTX); | |
1767 | ||
1768 | ||
1769 | /* Step 2: compute initial and final value of the loop counter. */ | |
1770 | ||
1771 | /* SP = SP_0 + PROBE_INTERVAL. */ | |
1772 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
1773 | ||
1774 | /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */ | |
1775 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1776 | stack_pointer_rtx, | |
1777 | rounded_size_op), NULL_RTX); | |
1778 | ||
1779 | ||
1780 | /* Step 3: the loop | |
1781 | ||
260c8ba3 EB |
1782 | while (SP != LAST_ADDR) |
1783 | { | |
1784 | SP = SP + PROBE_INTERVAL | |
1785 | probe at SP | |
1786 | } | |
d809253a | 1787 | |
260c8ba3 | 1788 | adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1789 | values of N from 1 until it is equal to ROUNDED_SIZE. */ |
1790 | ||
1791 | emit_label (loop_lab); | |
1792 | ||
1793 | /* Jump to END_LAB if SP == LAST_ADDR. */ | |
1794 | emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX, | |
1795 | Pmode, 1, end_lab); | |
1796 | ||
1797 | /* SP = SP + PROBE_INTERVAL and probe at SP. */ | |
1798 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1799 | emit_stack_probe (stack_pointer_rtx); | |
1800 | ||
1801 | emit_jump (loop_lab); | |
1802 | ||
1803 | emit_label (end_lab); | |
1804 | ||
1805 | ||
260c8ba3 | 1806 | /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot |
d809253a EB |
1807 | assert at compile-time that SIZE is equal to ROUNDED_SIZE. */ |
1808 | ||
1809 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1810 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1811 | if (temp != const0_rtx) | |
1812 | { | |
1813 | /* Manual CSE if the difference is not known at compile-time. */ | |
1814 | if (GET_CODE (temp) != CONST_INT) | |
1815 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1816 | anti_adjust_stack (temp); | |
1817 | emit_stack_probe (stack_pointer_rtx); | |
1818 | } | |
1819 | } | |
1820 | ||
c35af30f EB |
1821 | /* Adjust back and account for the additional first interval. */ |
1822 | if (adjust_back) | |
0a81f074 | 1823 | adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
c35af30f EB |
1824 | else |
1825 | adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
d809253a EB |
1826 | } |
1827 | ||
18ca7dab RK |
1828 | /* Return an rtx representing the register or memory location |
1829 | in which a scalar value of data type VALTYPE | |
1830 | was returned by a function call to function FUNC. | |
1d636cc6 RG |
1831 | FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise |
1832 | function is known, otherwise 0. | |
4dc07bd7 JJ |
1833 | OUTGOING is 1 if on a machine with register windows this function |
1834 | should return the register in which the function will put its result | |
30f7a378 | 1835 | and 0 otherwise. */ |
18ca7dab RK |
1836 | |
1837 | rtx | |
586de218 | 1838 | hard_function_value (const_tree valtype, const_tree func, const_tree fntype, |
502b8322 | 1839 | int outgoing ATTRIBUTE_UNUSED) |
18ca7dab | 1840 | { |
4dc07bd7 | 1841 | rtx val; |
770ae6cc | 1842 | |
1d636cc6 | 1843 | val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing); |
770ae6cc | 1844 | |
f8cfc6aa | 1845 | if (REG_P (val) |
e1a4071f JL |
1846 | && GET_MODE (val) == BLKmode) |
1847 | { | |
770ae6cc | 1848 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype); |
e1a4071f | 1849 | enum machine_mode tmpmode; |
770ae6cc | 1850 | |
d9b3eb63 | 1851 | /* int_size_in_bytes can return -1. We don't need a check here |
535a42b1 NS |
1852 | since the value of bytes will then be large enough that no |
1853 | mode will match anyway. */ | |
d9b3eb63 | 1854 | |
e1a4071f | 1855 | for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
0fb7aeda KH |
1856 | tmpmode != VOIDmode; |
1857 | tmpmode = GET_MODE_WIDER_MODE (tmpmode)) | |
1858 | { | |
1859 | /* Have we found a large enough mode? */ | |
1860 | if (GET_MODE_SIZE (tmpmode) >= bytes) | |
1861 | break; | |
1862 | } | |
e1a4071f JL |
1863 | |
1864 | /* No suitable mode found. */ | |
5b0264cb | 1865 | gcc_assert (tmpmode != VOIDmode); |
e1a4071f JL |
1866 | |
1867 | PUT_MODE (val, tmpmode); | |
d9b3eb63 | 1868 | } |
e1a4071f | 1869 | return val; |
18ca7dab RK |
1870 | } |
1871 | ||
1872 | /* Return an rtx representing the register or memory location | |
1873 | in which a scalar value of mode MODE was returned by a library call. */ | |
1874 | ||
1875 | rtx | |
390b17c2 | 1876 | hard_libcall_value (enum machine_mode mode, rtx fun) |
18ca7dab | 1877 | { |
390b17c2 | 1878 | return targetm.calls.libcall_value (mode, fun); |
18ca7dab | 1879 | } |
0c5e217d RS |
1880 | |
1881 | /* Look up the tree code for a given rtx code | |
1882 | to provide the arithmetic operation for REAL_ARITHMETIC. | |
1883 | The function returns an int because the caller may not know | |
1884 | what `enum tree_code' means. */ | |
1885 | ||
1886 | int | |
502b8322 | 1887 | rtx_to_tree_code (enum rtx_code code) |
0c5e217d RS |
1888 | { |
1889 | enum tree_code tcode; | |
1890 | ||
1891 | switch (code) | |
1892 | { | |
1893 | case PLUS: | |
1894 | tcode = PLUS_EXPR; | |
1895 | break; | |
1896 | case MINUS: | |
1897 | tcode = MINUS_EXPR; | |
1898 | break; | |
1899 | case MULT: | |
1900 | tcode = MULT_EXPR; | |
1901 | break; | |
1902 | case DIV: | |
1903 | tcode = RDIV_EXPR; | |
1904 | break; | |
1905 | case SMIN: | |
1906 | tcode = MIN_EXPR; | |
1907 | break; | |
1908 | case SMAX: | |
1909 | tcode = MAX_EXPR; | |
1910 | break; | |
1911 | default: | |
1912 | tcode = LAST_AND_UNUSED_TREE_CODE; | |
1913 | break; | |
1914 | } | |
1915 | return ((int) tcode); | |
1916 | } | |
e2500fed GK |
1917 | |
1918 | #include "gt-explow.h" |