]>
Commit | Line | Data |
---|---|---|
18ca7dab | 1 | /* Subroutines for manipulating rtx's in semantically interesting ways. |
5624e564 | 2 | Copyright (C) 1987-2015 Free Software Foundation, Inc. |
18ca7dab | 3 | |
1322177d | 4 | This file is part of GCC. |
18ca7dab | 5 | |
1322177d LB |
6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
9dcd6f09 | 8 | Software Foundation; either version 3, or (at your option) any later |
1322177d | 9 | version. |
18ca7dab | 10 | |
1322177d LB |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
18ca7dab RK |
15 | |
16 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
18ca7dab RK |
19 | |
20 | ||
21 | #include "config.h" | |
670ee920 | 22 | #include "system.h" |
4977bab6 ZW |
23 | #include "coretypes.h" |
24 | #include "tm.h" | |
718f9c0f | 25 | #include "diagnostic-core.h" |
18ca7dab | 26 | #include "rtl.h" |
40e23961 MC |
27 | #include "hash-set.h" |
28 | #include "machmode.h" | |
29 | #include "vec.h" | |
30 | #include "double-int.h" | |
31 | #include "input.h" | |
32 | #include "alias.h" | |
33 | #include "symtab.h" | |
34 | #include "wide-int.h" | |
35 | #include "inchash.h" | |
36 | #include "real.h" | |
18ca7dab | 37 | #include "tree.h" |
d8a2d370 | 38 | #include "stor-layout.h" |
6baf1cc8 | 39 | #include "tm_p.h" |
18ca7dab | 40 | #include "flags.h" |
b38f3813 | 41 | #include "except.h" |
83685514 | 42 | #include "hard-reg-set.h" |
49ad7cfa | 43 | #include "function.h" |
36566b39 PK |
44 | #include "hashtab.h" |
45 | #include "statistics.h" | |
46 | #include "fixed-value.h" | |
47 | #include "insn-config.h" | |
48 | #include "expmed.h" | |
49 | #include "dojump.h" | |
50 | #include "explow.h" | |
51 | #include "calls.h" | |
52 | #include "emit-rtl.h" | |
53 | #include "varasm.h" | |
54 | #include "stmt.h" | |
18ca7dab | 55 | #include "expr.h" |
b0710fe1 | 56 | #include "insn-codes.h" |
e78d8e51 | 57 | #include "optabs.h" |
d477d1fe | 58 | #include "libfuncs.h" |
1d974ca7 | 59 | #include "ggc.h" |
18ca7dab | 60 | #include "recog.h" |
a77a9a18 | 61 | #include "langhooks.h" |
1d636cc6 | 62 | #include "target.h" |
677f3fa8 | 63 | #include "common/common-target.h" |
aacd3885 | 64 | #include "output.h" |
18ca7dab | 65 | |
502b8322 | 66 | static rtx break_out_memory_refs (rtx); |
7e4ce834 RH |
67 | |
68 | ||
69 | /* Truncate and perhaps sign-extend C as appropriate for MODE. */ | |
70 | ||
71 | HOST_WIDE_INT | |
ef4bddc2 | 72 | trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode) |
7e4ce834 | 73 | { |
5511bc5a | 74 | int width = GET_MODE_PRECISION (mode); |
7e4ce834 | 75 | |
71012d97 | 76 | /* You want to truncate to a _what_? */ |
d5e254e1 IE |
77 | gcc_assert (SCALAR_INT_MODE_P (mode) |
78 | || POINTER_BOUNDS_MODE_P (mode)); | |
71012d97 | 79 | |
1f3f36d1 RH |
80 | /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ |
81 | if (mode == BImode) | |
82 | return c & 1 ? STORE_FLAG_VALUE : 0; | |
83 | ||
5b0d91c3 AO |
84 | /* Sign-extend for the requested mode. */ |
85 | ||
86 | if (width < HOST_BITS_PER_WIDE_INT) | |
87 | { | |
88 | HOST_WIDE_INT sign = 1; | |
89 | sign <<= width - 1; | |
90 | c &= (sign << 1) - 1; | |
91 | c ^= sign; | |
92 | c -= sign; | |
93 | } | |
7e4ce834 RH |
94 | |
95 | return c; | |
96 | } | |
97 | ||
929e10f4 | 98 | /* Return an rtx for the sum of X and the integer C, given that X has |
23b33725 RS |
99 | mode MODE. INPLACE is true if X can be modified inplace or false |
100 | if it must be treated as immutable. */ | |
18ca7dab RK |
101 | |
102 | rtx | |
ef4bddc2 | 103 | plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c, |
23b33725 | 104 | bool inplace) |
18ca7dab | 105 | { |
b3694847 | 106 | RTX_CODE code; |
17ab7c59 | 107 | rtx y; |
b3694847 | 108 | rtx tem; |
18ca7dab RK |
109 | int all_constant = 0; |
110 | ||
0a81f074 RS |
111 | gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode); |
112 | ||
18ca7dab RK |
113 | if (c == 0) |
114 | return x; | |
115 | ||
116 | restart: | |
117 | ||
118 | code = GET_CODE (x); | |
17ab7c59 RK |
119 | y = x; |
120 | ||
18ca7dab RK |
121 | switch (code) |
122 | { | |
807e902e KZ |
123 | CASE_CONST_SCALAR_INT: |
124 | return immed_wide_int_const (wi::add (std::make_pair (x, mode), c), | |
125 | mode); | |
18ca7dab RK |
126 | case MEM: |
127 | /* If this is a reference to the constant pool, try replacing it with | |
128 | a reference to a new constant. If the resulting address isn't | |
129 | valid, don't return it because we have no way to validize it. */ | |
130 | if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF | |
131 | && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0))) | |
132 | { | |
0a81f074 | 133 | tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c); |
929e10f4 | 134 | tem = force_const_mem (GET_MODE (x), tem); |
18ca7dab RK |
135 | if (memory_address_p (GET_MODE (tem), XEXP (tem, 0))) |
136 | return tem; | |
137 | } | |
138 | break; | |
139 | ||
140 | case CONST: | |
141 | /* If adding to something entirely constant, set a flag | |
142 | so that we can add a CONST around the result. */ | |
23b33725 RS |
143 | if (inplace && shared_const_p (x)) |
144 | inplace = false; | |
18ca7dab RK |
145 | x = XEXP (x, 0); |
146 | all_constant = 1; | |
147 | goto restart; | |
148 | ||
149 | case SYMBOL_REF: | |
150 | case LABEL_REF: | |
151 | all_constant = 1; | |
152 | break; | |
153 | ||
154 | case PLUS: | |
929e10f4 MS |
155 | /* The interesting case is adding the integer to a sum. Look |
156 | for constant term in the sum and combine with C. For an | |
157 | integer constant term or a constant term that is not an | |
158 | explicit integer, we combine or group them together anyway. | |
03d937fc R |
159 | |
160 | We may not immediately return from the recursive call here, lest | |
161 | all_constant gets lost. */ | |
e5671f2b | 162 | |
929e10f4 | 163 | if (CONSTANT_P (XEXP (x, 1))) |
03d937fc | 164 | { |
23b33725 RS |
165 | rtx term = plus_constant (mode, XEXP (x, 1), c, inplace); |
166 | if (term == const0_rtx) | |
167 | x = XEXP (x, 0); | |
168 | else if (inplace) | |
169 | XEXP (x, 1) = term; | |
170 | else | |
171 | x = gen_rtx_PLUS (mode, XEXP (x, 0), term); | |
03d937fc R |
172 | c = 0; |
173 | } | |
23b33725 | 174 | else if (rtx *const_loc = find_constant_term_loc (&y)) |
03d937fc | 175 | { |
23b33725 RS |
176 | if (!inplace) |
177 | { | |
178 | /* We need to be careful since X may be shared and we can't | |
179 | modify it in place. */ | |
180 | x = copy_rtx (x); | |
181 | const_loc = find_constant_term_loc (&x); | |
182 | } | |
183 | *const_loc = plus_constant (mode, *const_loc, c, true); | |
03d937fc R |
184 | c = 0; |
185 | } | |
38a448ca | 186 | break; |
ed8908e7 | 187 | |
38a448ca RH |
188 | default: |
189 | break; | |
18ca7dab RK |
190 | } |
191 | ||
192 | if (c != 0) | |
4789c0ce | 193 | x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode)); |
18ca7dab RK |
194 | |
195 | if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) | |
196 | return x; | |
197 | else if (all_constant) | |
38a448ca | 198 | return gen_rtx_CONST (mode, x); |
18ca7dab RK |
199 | else |
200 | return x; | |
201 | } | |
18ca7dab RK |
202 | \f |
203 | /* If X is a sum, return a new sum like X but lacking any constant terms. | |
204 | Add all the removed constant terms into *CONSTPTR. | |
205 | X itself is not altered. The result != X if and only if | |
206 | it is not isomorphic to X. */ | |
207 | ||
208 | rtx | |
502b8322 | 209 | eliminate_constant_term (rtx x, rtx *constptr) |
18ca7dab | 210 | { |
b3694847 | 211 | rtx x0, x1; |
18ca7dab RK |
212 | rtx tem; |
213 | ||
214 | if (GET_CODE (x) != PLUS) | |
215 | return x; | |
216 | ||
217 | /* First handle constants appearing at this level explicitly. */ | |
481683e1 | 218 | if (CONST_INT_P (XEXP (x, 1)) |
18ca7dab RK |
219 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr, |
220 | XEXP (x, 1))) | |
481683e1 | 221 | && CONST_INT_P (tem)) |
18ca7dab RK |
222 | { |
223 | *constptr = tem; | |
224 | return eliminate_constant_term (XEXP (x, 0), constptr); | |
225 | } | |
226 | ||
227 | tem = const0_rtx; | |
228 | x0 = eliminate_constant_term (XEXP (x, 0), &tem); | |
229 | x1 = eliminate_constant_term (XEXP (x, 1), &tem); | |
230 | if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0)) | |
231 | && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), | |
232 | *constptr, tem)) | |
481683e1 | 233 | && CONST_INT_P (tem)) |
18ca7dab RK |
234 | { |
235 | *constptr = tem; | |
38a448ca | 236 | return gen_rtx_PLUS (GET_MODE (x), x0, x1); |
18ca7dab RK |
237 | } |
238 | ||
239 | return x; | |
240 | } | |
241 | ||
18ca7dab RK |
242 | \f |
243 | /* Return a copy of X in which all memory references | |
244 | and all constants that involve symbol refs | |
245 | have been replaced with new temporary registers. | |
246 | Also emit code to load the memory locations and constants | |
247 | into those registers. | |
248 | ||
249 | If X contains no such constants or memory references, | |
250 | X itself (not a copy) is returned. | |
251 | ||
252 | If a constant is found in the address that is not a legitimate constant | |
253 | in an insn, it is left alone in the hope that it might be valid in the | |
254 | address. | |
255 | ||
256 | X may contain no arithmetic except addition, subtraction and multiplication. | |
257 | Values returned by expand_expr with 1 for sum_ok fit this constraint. */ | |
258 | ||
259 | static rtx | |
502b8322 | 260 | break_out_memory_refs (rtx x) |
18ca7dab | 261 | { |
3c0cb5de | 262 | if (MEM_P (x) |
cabeca29 | 263 | || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x) |
18ca7dab | 264 | && GET_MODE (x) != VOIDmode)) |
2cca6e3f | 265 | x = force_reg (GET_MODE (x), x); |
18ca7dab RK |
266 | else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS |
267 | || GET_CODE (x) == MULT) | |
268 | { | |
b3694847 SS |
269 | rtx op0 = break_out_memory_refs (XEXP (x, 0)); |
270 | rtx op1 = break_out_memory_refs (XEXP (x, 1)); | |
2cca6e3f | 271 | |
18ca7dab | 272 | if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) |
d4ebfa65 | 273 | x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1); |
18ca7dab | 274 | } |
2cca6e3f | 275 | |
18ca7dab RK |
276 | return x; |
277 | } | |
278 | ||
d4ebfa65 BE |
279 | /* Given X, a memory address in address space AS' pointer mode, convert it to |
280 | an address in the address space's address mode, or vice versa (TO_MODE says | |
281 | which way). We take advantage of the fact that pointers are not allowed to | |
282 | overflow by commuting arithmetic operations over conversions so that address | |
7745730f AP |
283 | arithmetic insns can be used. IN_CONST is true if this conversion is inside |
284 | a CONST. */ | |
ea534b63 | 285 | |
7745730f | 286 | static rtx |
ef4bddc2 | 287 | convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED, |
7745730f | 288 | rtx x, addr_space_t as ATTRIBUTE_UNUSED, |
c582bb15 | 289 | bool in_const ATTRIBUTE_UNUSED) |
ea534b63 | 290 | { |
5ae6cd0d | 291 | #ifndef POINTERS_EXTEND_UNSIGNED |
7c137931 | 292 | gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode); |
5ae6cd0d MM |
293 | return x; |
294 | #else /* defined(POINTERS_EXTEND_UNSIGNED) */ | |
ef4bddc2 | 295 | machine_mode pointer_mode, address_mode, from_mode; |
498b529f | 296 | rtx temp; |
aa0f70e6 | 297 | enum rtx_code code; |
498b529f | 298 | |
5ae6cd0d MM |
299 | /* If X already has the right mode, just return it. */ |
300 | if (GET_MODE (x) == to_mode) | |
301 | return x; | |
302 | ||
d4ebfa65 BE |
303 | pointer_mode = targetm.addr_space.pointer_mode (as); |
304 | address_mode = targetm.addr_space.address_mode (as); | |
305 | from_mode = to_mode == pointer_mode ? address_mode : pointer_mode; | |
5ae6cd0d | 306 | |
0b04ec8c RK |
307 | /* Here we handle some special cases. If none of them apply, fall through |
308 | to the default case. */ | |
ea534b63 RK |
309 | switch (GET_CODE (x)) |
310 | { | |
d8116890 | 311 | CASE_CONST_SCALAR_INT: |
aa0f70e6 SE |
312 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)) |
313 | code = TRUNCATE; | |
314 | else if (POINTERS_EXTEND_UNSIGNED < 0) | |
315 | break; | |
316 | else if (POINTERS_EXTEND_UNSIGNED > 0) | |
317 | code = ZERO_EXTEND; | |
318 | else | |
319 | code = SIGN_EXTEND; | |
320 | temp = simplify_unary_operation (code, to_mode, x, from_mode); | |
321 | if (temp) | |
322 | return temp; | |
323 | break; | |
498b529f | 324 | |
d1405722 | 325 | case SUBREG: |
5da4f548 | 326 | if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x))) |
6dd12198 | 327 | && GET_MODE (SUBREG_REG (x)) == to_mode) |
d1405722 RK |
328 | return SUBREG_REG (x); |
329 | break; | |
330 | ||
ea534b63 | 331 | case LABEL_REF: |
a827d9b1 | 332 | temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x)); |
5da4f548 SE |
333 | LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); |
334 | return temp; | |
6dd12198 | 335 | break; |
498b529f | 336 | |
ea534b63 | 337 | case SYMBOL_REF: |
ce02ba25 EC |
338 | temp = shallow_copy_rtx (x); |
339 | PUT_MODE (temp, to_mode); | |
5da4f548 | 340 | return temp; |
6dd12198 | 341 | break; |
ea534b63 | 342 | |
498b529f | 343 | case CONST: |
5da4f548 | 344 | return gen_rtx_CONST (to_mode, |
7745730f AP |
345 | convert_memory_address_addr_space_1 |
346 | (to_mode, XEXP (x, 0), as, true)); | |
6dd12198 | 347 | break; |
ea534b63 | 348 | |
0b04ec8c RK |
349 | case PLUS: |
350 | case MULT: | |
ceeb2cbc AP |
351 | /* For addition we can safely permute the conversion and addition |
352 | operation if one operand is a constant and converting the constant | |
353 | does not change it or if one operand is a constant and we are | |
354 | using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0). | |
17939c98 | 355 | We can always safely permute them if we are making the address |
7745730f AP |
356 | narrower. Inside a CONST RTL, this is safe for both pointers |
357 | zero or sign extended as pointers cannot wrap. */ | |
aa0f70e6 SE |
358 | if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode) |
359 | || (GET_CODE (x) == PLUS | |
481683e1 | 360 | && CONST_INT_P (XEXP (x, 1)) |
7745730f AP |
361 | && ((in_const && POINTERS_EXTEND_UNSIGNED != 0) |
362 | || XEXP (x, 1) == convert_memory_address_addr_space_1 | |
363 | (to_mode, XEXP (x, 1), as, in_const) | |
364 | || POINTERS_EXTEND_UNSIGNED < 0))) | |
d9b3eb63 | 365 | return gen_rtx_fmt_ee (GET_CODE (x), to_mode, |
7745730f AP |
366 | convert_memory_address_addr_space_1 |
367 | (to_mode, XEXP (x, 0), as, in_const), | |
aa0f70e6 | 368 | XEXP (x, 1)); |
38a448ca | 369 | break; |
d9b3eb63 | 370 | |
38a448ca RH |
371 | default: |
372 | break; | |
ea534b63 | 373 | } |
0b04ec8c RK |
374 | |
375 | return convert_modes (to_mode, from_mode, | |
376 | x, POINTERS_EXTEND_UNSIGNED); | |
5ae6cd0d | 377 | #endif /* defined(POINTERS_EXTEND_UNSIGNED) */ |
ea534b63 | 378 | } |
7745730f AP |
379 | |
380 | /* Given X, a memory address in address space AS' pointer mode, convert it to | |
381 | an address in the address space's address mode, or vice versa (TO_MODE says | |
382 | which way). We take advantage of the fact that pointers are not allowed to | |
383 | overflow by commuting arithmetic operations over conversions so that address | |
384 | arithmetic insns can be used. */ | |
385 | ||
386 | rtx | |
ef4bddc2 | 387 | convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as) |
7745730f AP |
388 | { |
389 | return convert_memory_address_addr_space_1 (to_mode, x, as, false); | |
390 | } | |
18ca7dab | 391 | \f |
36566b39 | 392 | |
09e881c9 BE |
393 | /* Return something equivalent to X but valid as a memory address for something |
394 | of mode MODE in the named address space AS. When X is not itself valid, | |
395 | this works by copying X or subexpressions of it into registers. */ | |
18ca7dab RK |
396 | |
397 | rtx | |
ef4bddc2 | 398 | memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as) |
18ca7dab | 399 | { |
b3694847 | 400 | rtx oldx = x; |
ef4bddc2 | 401 | machine_mode address_mode = targetm.addr_space.address_mode (as); |
18ca7dab | 402 | |
d4ebfa65 | 403 | x = convert_memory_address_addr_space (address_mode, x, as); |
ea534b63 | 404 | |
ba228239 | 405 | /* By passing constant addresses through registers |
18ca7dab | 406 | we get a chance to cse them. */ |
cabeca29 | 407 | if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)) |
d4ebfa65 | 408 | x = force_reg (address_mode, x); |
18ca7dab | 409 | |
18ca7dab RK |
410 | /* We get better cse by rejecting indirect addressing at this stage. |
411 | Let the combiner create indirect addresses where appropriate. | |
412 | For now, generate the code so that the subexpressions useful to share | |
413 | are visible. But not if cse won't be done! */ | |
18b9ca6f | 414 | else |
18ca7dab | 415 | { |
f8cfc6aa | 416 | if (! cse_not_expected && !REG_P (x)) |
18b9ca6f RK |
417 | x = break_out_memory_refs (x); |
418 | ||
419 | /* At this point, any valid address is accepted. */ | |
09e881c9 | 420 | if (memory_address_addr_space_p (mode, x, as)) |
3de5e93a | 421 | goto done; |
18b9ca6f RK |
422 | |
423 | /* If it was valid before but breaking out memory refs invalidated it, | |
424 | use it the old way. */ | |
09e881c9 | 425 | if (memory_address_addr_space_p (mode, oldx, as)) |
3de5e93a SB |
426 | { |
427 | x = oldx; | |
428 | goto done; | |
429 | } | |
18b9ca6f RK |
430 | |
431 | /* Perform machine-dependent transformations on X | |
432 | in certain cases. This is not necessary since the code | |
433 | below can handle all possible cases, but machine-dependent | |
434 | transformations can make better code. */ | |
506d7b68 | 435 | { |
09e881c9 BE |
436 | rtx orig_x = x; |
437 | x = targetm.addr_space.legitimize_address (x, oldx, mode, as); | |
438 | if (orig_x != x && memory_address_addr_space_p (mode, x, as)) | |
506d7b68 PB |
439 | goto done; |
440 | } | |
18b9ca6f RK |
441 | |
442 | /* PLUS and MULT can appear in special ways | |
443 | as the result of attempts to make an address usable for indexing. | |
444 | Usually they are dealt with by calling force_operand, below. | |
445 | But a sum containing constant terms is special | |
446 | if removing them makes the sum a valid address: | |
447 | then we generate that address in a register | |
448 | and index off of it. We do this because it often makes | |
449 | shorter code, and because the addresses thus generated | |
450 | in registers often become common subexpressions. */ | |
451 | if (GET_CODE (x) == PLUS) | |
452 | { | |
453 | rtx constant_term = const0_rtx; | |
454 | rtx y = eliminate_constant_term (x, &constant_term); | |
455 | if (constant_term == const0_rtx | |
09e881c9 | 456 | || ! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
457 | x = force_operand (x, NULL_RTX); |
458 | else | |
459 | { | |
38a448ca | 460 | y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term); |
09e881c9 | 461 | if (! memory_address_addr_space_p (mode, y, as)) |
18b9ca6f RK |
462 | x = force_operand (x, NULL_RTX); |
463 | else | |
464 | x = y; | |
465 | } | |
466 | } | |
18ca7dab | 467 | |
e475ed2a | 468 | else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS) |
18b9ca6f | 469 | x = force_operand (x, NULL_RTX); |
18ca7dab | 470 | |
18b9ca6f RK |
471 | /* If we have a register that's an invalid address, |
472 | it must be a hard reg of the wrong class. Copy it to a pseudo. */ | |
f8cfc6aa | 473 | else if (REG_P (x)) |
18b9ca6f RK |
474 | x = copy_to_reg (x); |
475 | ||
476 | /* Last resort: copy the value to a register, since | |
477 | the register is a valid address. */ | |
478 | else | |
d4ebfa65 | 479 | x = force_reg (address_mode, x); |
18ca7dab | 480 | } |
18b9ca6f RK |
481 | |
482 | done: | |
483 | ||
09e881c9 | 484 | gcc_assert (memory_address_addr_space_p (mode, x, as)); |
2cca6e3f RK |
485 | /* If we didn't change the address, we are done. Otherwise, mark |
486 | a reg as a pointer if we have REG or REG + CONST_INT. */ | |
487 | if (oldx == x) | |
488 | return x; | |
f8cfc6aa | 489 | else if (REG_P (x)) |
bdb429a5 | 490 | mark_reg_pointer (x, BITS_PER_UNIT); |
2cca6e3f | 491 | else if (GET_CODE (x) == PLUS |
f8cfc6aa | 492 | && REG_P (XEXP (x, 0)) |
481683e1 | 493 | && CONST_INT_P (XEXP (x, 1))) |
bdb429a5 | 494 | mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT); |
2cca6e3f | 495 | |
18b9ca6f RK |
496 | /* OLDX may have been the address on a temporary. Update the address |
497 | to indicate that X is now used. */ | |
498 | update_temp_slot_address (oldx, x); | |
499 | ||
18ca7dab RK |
500 | return x; |
501 | } | |
502 | ||
1a8cb155 RS |
503 | /* If REF is a MEM with an invalid address, change it into a valid address. |
504 | Pass through anything else unchanged. REF must be an unshared rtx and | |
505 | the function may modify it in-place. */ | |
18ca7dab RK |
506 | |
507 | rtx | |
502b8322 | 508 | validize_mem (rtx ref) |
18ca7dab | 509 | { |
3c0cb5de | 510 | if (!MEM_P (ref)) |
18ca7dab | 511 | return ref; |
aacd3885 | 512 | ref = use_anchored_address (ref); |
09e881c9 BE |
513 | if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0), |
514 | MEM_ADDR_SPACE (ref))) | |
18ca7dab | 515 | return ref; |
792760b9 | 516 | |
1a8cb155 | 517 | return replace_equiv_address (ref, XEXP (ref, 0), true); |
18ca7dab | 518 | } |
aacd3885 RS |
519 | |
520 | /* If X is a memory reference to a member of an object block, try rewriting | |
521 | it to use an anchor instead. Return the new memory reference on success | |
522 | and the old one on failure. */ | |
523 | ||
524 | rtx | |
525 | use_anchored_address (rtx x) | |
526 | { | |
527 | rtx base; | |
528 | HOST_WIDE_INT offset; | |
ef4bddc2 | 529 | machine_mode mode; |
aacd3885 RS |
530 | |
531 | if (!flag_section_anchors) | |
532 | return x; | |
533 | ||
534 | if (!MEM_P (x)) | |
535 | return x; | |
536 | ||
537 | /* Split the address into a base and offset. */ | |
538 | base = XEXP (x, 0); | |
539 | offset = 0; | |
540 | if (GET_CODE (base) == CONST | |
541 | && GET_CODE (XEXP (base, 0)) == PLUS | |
481683e1 | 542 | && CONST_INT_P (XEXP (XEXP (base, 0), 1))) |
aacd3885 RS |
543 | { |
544 | offset += INTVAL (XEXP (XEXP (base, 0), 1)); | |
545 | base = XEXP (XEXP (base, 0), 0); | |
546 | } | |
547 | ||
548 | /* Check whether BASE is suitable for anchors. */ | |
549 | if (GET_CODE (base) != SYMBOL_REF | |
3fa9c136 | 550 | || !SYMBOL_REF_HAS_BLOCK_INFO_P (base) |
aacd3885 | 551 | || SYMBOL_REF_ANCHOR_P (base) |
434aeebb | 552 | || SYMBOL_REF_BLOCK (base) == NULL |
aacd3885 RS |
553 | || !targetm.use_anchors_for_symbol_p (base)) |
554 | return x; | |
555 | ||
556 | /* Decide where BASE is going to be. */ | |
557 | place_block_symbol (base); | |
558 | ||
559 | /* Get the anchor we need to use. */ | |
560 | offset += SYMBOL_REF_BLOCK_OFFSET (base); | |
561 | base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset, | |
562 | SYMBOL_REF_TLS_MODEL (base)); | |
563 | ||
564 | /* Work out the offset from the anchor. */ | |
565 | offset -= SYMBOL_REF_BLOCK_OFFSET (base); | |
566 | ||
567 | /* If we're going to run a CSE pass, force the anchor into a register. | |
568 | We will then be able to reuse registers for several accesses, if the | |
569 | target costs say that that's worthwhile. */ | |
0a81f074 | 570 | mode = GET_MODE (base); |
aacd3885 | 571 | if (!cse_not_expected) |
0a81f074 | 572 | base = force_reg (mode, base); |
aacd3885 | 573 | |
0a81f074 | 574 | return replace_equiv_address (x, plus_constant (mode, base, offset)); |
aacd3885 | 575 | } |
18ca7dab | 576 | \f |
18ca7dab RK |
577 | /* Copy the value or contents of X to a new temp reg and return that reg. */ |
578 | ||
579 | rtx | |
502b8322 | 580 | copy_to_reg (rtx x) |
18ca7dab | 581 | { |
b3694847 | 582 | rtx temp = gen_reg_rtx (GET_MODE (x)); |
d9b3eb63 | 583 | |
18ca7dab | 584 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 585 | do the computation. */ |
18ca7dab RK |
586 | if (! general_operand (x, VOIDmode)) |
587 | x = force_operand (x, temp); | |
d9b3eb63 | 588 | |
18ca7dab RK |
589 | if (x != temp) |
590 | emit_move_insn (temp, x); | |
591 | ||
592 | return temp; | |
593 | } | |
594 | ||
595 | /* Like copy_to_reg but always give the new register mode Pmode | |
596 | in case X is a constant. */ | |
597 | ||
598 | rtx | |
502b8322 | 599 | copy_addr_to_reg (rtx x) |
18ca7dab RK |
600 | { |
601 | return copy_to_mode_reg (Pmode, x); | |
602 | } | |
603 | ||
604 | /* Like copy_to_reg but always give the new register mode MODE | |
605 | in case X is a constant. */ | |
606 | ||
607 | rtx | |
ef4bddc2 | 608 | copy_to_mode_reg (machine_mode mode, rtx x) |
18ca7dab | 609 | { |
b3694847 | 610 | rtx temp = gen_reg_rtx (mode); |
d9b3eb63 | 611 | |
18ca7dab | 612 | /* If not an operand, must be an address with PLUS and MULT so |
d9b3eb63 | 613 | do the computation. */ |
18ca7dab RK |
614 | if (! general_operand (x, VOIDmode)) |
615 | x = force_operand (x, temp); | |
616 | ||
5b0264cb | 617 | gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); |
18ca7dab RK |
618 | if (x != temp) |
619 | emit_move_insn (temp, x); | |
620 | return temp; | |
621 | } | |
622 | ||
623 | /* Load X into a register if it is not already one. | |
624 | Use mode MODE for the register. | |
625 | X should be valid for mode MODE, but it may be a constant which | |
626 | is valid for all integer modes; that's why caller must specify MODE. | |
627 | ||
628 | The caller must not alter the value in the register we return, | |
629 | since we mark it as a "constant" register. */ | |
630 | ||
631 | rtx | |
ef4bddc2 | 632 | force_reg (machine_mode mode, rtx x) |
18ca7dab | 633 | { |
528a80c1 DM |
634 | rtx temp, set; |
635 | rtx_insn *insn; | |
18ca7dab | 636 | |
f8cfc6aa | 637 | if (REG_P (x)) |
18ca7dab | 638 | return x; |
d9b3eb63 | 639 | |
e3c8ea67 RH |
640 | if (general_operand (x, mode)) |
641 | { | |
642 | temp = gen_reg_rtx (mode); | |
643 | insn = emit_move_insn (temp, x); | |
644 | } | |
645 | else | |
646 | { | |
647 | temp = force_operand (x, NULL_RTX); | |
f8cfc6aa | 648 | if (REG_P (temp)) |
e3c8ea67 RH |
649 | insn = get_last_insn (); |
650 | else | |
651 | { | |
652 | rtx temp2 = gen_reg_rtx (mode); | |
653 | insn = emit_move_insn (temp2, temp); | |
654 | temp = temp2; | |
655 | } | |
656 | } | |
62874575 | 657 | |
18ca7dab | 658 | /* Let optimizers know that TEMP's value never changes |
62874575 RK |
659 | and that X can be substituted for it. Don't get confused |
660 | if INSN set something else (such as a SUBREG of TEMP). */ | |
661 | if (CONSTANT_P (x) | |
662 | && (set = single_set (insn)) != 0 | |
fd7acc30 RS |
663 | && SET_DEST (set) == temp |
664 | && ! rtx_equal_p (x, SET_SRC (set))) | |
3d238248 | 665 | set_unique_reg_note (insn, REG_EQUAL, x); |
e3c8ea67 | 666 | |
4a4f95d9 RH |
667 | /* Let optimizers know that TEMP is a pointer, and if so, the |
668 | known alignment of that pointer. */ | |
669 | { | |
670 | unsigned align = 0; | |
671 | if (GET_CODE (x) == SYMBOL_REF) | |
672 | { | |
673 | align = BITS_PER_UNIT; | |
674 | if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x))) | |
675 | align = DECL_ALIGN (SYMBOL_REF_DECL (x)); | |
676 | } | |
677 | else if (GET_CODE (x) == LABEL_REF) | |
678 | align = BITS_PER_UNIT; | |
679 | else if (GET_CODE (x) == CONST | |
680 | && GET_CODE (XEXP (x, 0)) == PLUS | |
681 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF | |
481683e1 | 682 | && CONST_INT_P (XEXP (XEXP (x, 0), 1))) |
4a4f95d9 RH |
683 | { |
684 | rtx s = XEXP (XEXP (x, 0), 0); | |
685 | rtx c = XEXP (XEXP (x, 0), 1); | |
686 | unsigned sa, ca; | |
687 | ||
688 | sa = BITS_PER_UNIT; | |
689 | if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s))) | |
690 | sa = DECL_ALIGN (SYMBOL_REF_DECL (s)); | |
691 | ||
bd95721f RH |
692 | if (INTVAL (c) == 0) |
693 | align = sa; | |
694 | else | |
695 | { | |
696 | ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT; | |
697 | align = MIN (sa, ca); | |
698 | } | |
4a4f95d9 RH |
699 | } |
700 | ||
0a317111 | 701 | if (align || (MEM_P (x) && MEM_POINTER (x))) |
4a4f95d9 RH |
702 | mark_reg_pointer (temp, align); |
703 | } | |
704 | ||
18ca7dab RK |
705 | return temp; |
706 | } | |
707 | ||
708 | /* If X is a memory ref, copy its contents to a new temp reg and return | |
709 | that reg. Otherwise, return X. */ | |
710 | ||
711 | rtx | |
502b8322 | 712 | force_not_mem (rtx x) |
18ca7dab | 713 | { |
b3694847 | 714 | rtx temp; |
fe3439b0 | 715 | |
3c0cb5de | 716 | if (!MEM_P (x) || GET_MODE (x) == BLKmode) |
18ca7dab | 717 | return x; |
fe3439b0 | 718 | |
18ca7dab | 719 | temp = gen_reg_rtx (GET_MODE (x)); |
f8ad8d7c ZD |
720 | |
721 | if (MEM_POINTER (x)) | |
722 | REG_POINTER (temp) = 1; | |
723 | ||
18ca7dab RK |
724 | emit_move_insn (temp, x); |
725 | return temp; | |
726 | } | |
727 | ||
728 | /* Copy X to TARGET (if it's nonzero and a reg) | |
729 | or to a new temp reg and return that reg. | |
730 | MODE is the mode to use for X in case it is a constant. */ | |
731 | ||
732 | rtx | |
ef4bddc2 | 733 | copy_to_suggested_reg (rtx x, rtx target, machine_mode mode) |
18ca7dab | 734 | { |
b3694847 | 735 | rtx temp; |
18ca7dab | 736 | |
f8cfc6aa | 737 | if (target && REG_P (target)) |
18ca7dab RK |
738 | temp = target; |
739 | else | |
740 | temp = gen_reg_rtx (mode); | |
741 | ||
742 | emit_move_insn (temp, x); | |
743 | return temp; | |
744 | } | |
745 | \f | |
cde0f3fd | 746 | /* Return the mode to use to pass or return a scalar of TYPE and MODE. |
9ff65789 RK |
747 | PUNSIGNEDP points to the signedness of the type and may be adjusted |
748 | to show what signedness to use on extension operations. | |
749 | ||
cde0f3fd PB |
750 | FOR_RETURN is nonzero if the caller is promoting the return value |
751 | of FNDECL, else it is for promoting args. */ | |
9ff65789 | 752 | |
ef4bddc2 RS |
753 | machine_mode |
754 | promote_function_mode (const_tree type, machine_mode mode, int *punsignedp, | |
cde0f3fd PB |
755 | const_tree funtype, int for_return) |
756 | { | |
5e617be8 AK |
757 | /* Called without a type node for a libcall. */ |
758 | if (type == NULL_TREE) | |
759 | { | |
760 | if (INTEGRAL_MODE_P (mode)) | |
761 | return targetm.calls.promote_function_mode (NULL_TREE, mode, | |
762 | punsignedp, funtype, | |
763 | for_return); | |
764 | else | |
765 | return mode; | |
766 | } | |
767 | ||
cde0f3fd PB |
768 | switch (TREE_CODE (type)) |
769 | { | |
770 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: | |
771 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: | |
772 | case POINTER_TYPE: case REFERENCE_TYPE: | |
773 | return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype, | |
774 | for_return); | |
775 | ||
776 | default: | |
777 | return mode; | |
778 | } | |
779 | } | |
780 | /* Return the mode to use to store a scalar of TYPE and MODE. | |
781 | PUNSIGNEDP points to the signedness of the type and may be adjusted | |
782 | to show what signedness to use on extension operations. */ | |
d4453b7a | 783 | |
ef4bddc2 RS |
784 | machine_mode |
785 | promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode, | |
b1680483 | 786 | int *punsignedp ATTRIBUTE_UNUSED) |
9ff65789 | 787 | { |
1e3287d0 RG |
788 | #ifdef PROMOTE_MODE |
789 | enum tree_code code; | |
790 | int unsignedp; | |
791 | #endif | |
792 | ||
5e617be8 AK |
793 | /* For libcalls this is invoked without TYPE from the backends |
794 | TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that | |
795 | case. */ | |
796 | if (type == NULL_TREE) | |
797 | return mode; | |
798 | ||
cde0f3fd PB |
799 | /* FIXME: this is the same logic that was there until GCC 4.4, but we |
800 | probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE | |
801 | is not defined. The affected targets are M32C, S390, SPARC. */ | |
802 | #ifdef PROMOTE_MODE | |
1e3287d0 RG |
803 | code = TREE_CODE (type); |
804 | unsignedp = *punsignedp; | |
9ff65789 | 805 | |
9ff65789 RK |
806 | switch (code) |
807 | { | |
9ff65789 | 808 | case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: |
325217ed | 809 | case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE: |
cde0f3fd PB |
810 | PROMOTE_MODE (mode, unsignedp, type); |
811 | *punsignedp = unsignedp; | |
812 | return mode; | |
9ff65789 | 813 | break; |
9ff65789 | 814 | |
ea534b63 | 815 | #ifdef POINTERS_EXTEND_UNSIGNED |
56a4c9e2 | 816 | case REFERENCE_TYPE: |
9ff65789 | 817 | case POINTER_TYPE: |
cde0f3fd | 818 | *punsignedp = POINTERS_EXTEND_UNSIGNED; |
d4ebfa65 BE |
819 | return targetm.addr_space.address_mode |
820 | (TYPE_ADDR_SPACE (TREE_TYPE (type))); | |
9ff65789 | 821 | break; |
ea534b63 | 822 | #endif |
d9b3eb63 | 823 | |
38a448ca | 824 | default: |
cde0f3fd | 825 | return mode; |
9ff65789 | 826 | } |
cde0f3fd | 827 | #else |
9ff65789 | 828 | return mode; |
cde0f3fd | 829 | #endif |
9ff65789 | 830 | } |
cde0f3fd PB |
831 | |
832 | ||
833 | /* Use one of promote_mode or promote_function_mode to find the promoted | |
834 | mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness | |
835 | of DECL after promotion. */ | |
836 | ||
ef4bddc2 | 837 | machine_mode |
cde0f3fd PB |
838 | promote_decl_mode (const_tree decl, int *punsignedp) |
839 | { | |
840 | tree type = TREE_TYPE (decl); | |
841 | int unsignedp = TYPE_UNSIGNED (type); | |
ef4bddc2 RS |
842 | machine_mode mode = DECL_MODE (decl); |
843 | machine_mode pmode; | |
cde0f3fd | 844 | |
666e3ceb PB |
845 | if (TREE_CODE (decl) == RESULT_DECL |
846 | || TREE_CODE (decl) == PARM_DECL) | |
cde0f3fd | 847 | pmode = promote_function_mode (type, mode, &unsignedp, |
666e3ceb | 848 | TREE_TYPE (current_function_decl), 2); |
cde0f3fd PB |
849 | else |
850 | pmode = promote_mode (type, mode, &unsignedp); | |
851 | ||
852 | if (punsignedp) | |
853 | *punsignedp = unsignedp; | |
854 | return pmode; | |
855 | } | |
856 | ||
9ff65789 | 857 | \f |
9a08d230 RH |
858 | /* Controls the behaviour of {anti_,}adjust_stack. */ |
859 | static bool suppress_reg_args_size; | |
860 | ||
861 | /* A helper for adjust_stack and anti_adjust_stack. */ | |
862 | ||
863 | static void | |
864 | adjust_stack_1 (rtx adjust, bool anti_p) | |
865 | { | |
528a80c1 DM |
866 | rtx temp; |
867 | rtx_insn *insn; | |
9a08d230 RH |
868 | |
869 | #ifndef STACK_GROWS_DOWNWARD | |
870 | /* Hereafter anti_p means subtract_p. */ | |
871 | anti_p = !anti_p; | |
872 | #endif | |
873 | ||
874 | temp = expand_binop (Pmode, | |
875 | anti_p ? sub_optab : add_optab, | |
876 | stack_pointer_rtx, adjust, stack_pointer_rtx, 0, | |
877 | OPTAB_LIB_WIDEN); | |
878 | ||
879 | if (temp != stack_pointer_rtx) | |
880 | insn = emit_move_insn (stack_pointer_rtx, temp); | |
881 | else | |
882 | { | |
883 | insn = get_last_insn (); | |
884 | temp = single_set (insn); | |
885 | gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx); | |
886 | } | |
887 | ||
888 | if (!suppress_reg_args_size) | |
889 | add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta)); | |
890 | } | |
891 | ||
18ca7dab RK |
892 | /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes). |
893 | This pops when ADJUST is positive. ADJUST need not be constant. */ | |
894 | ||
895 | void | |
502b8322 | 896 | adjust_stack (rtx adjust) |
18ca7dab | 897 | { |
18ca7dab RK |
898 | if (adjust == const0_rtx) |
899 | return; | |
900 | ||
1503a7ec JH |
901 | /* We expect all variable sized adjustments to be multiple of |
902 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 903 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
904 | stack_pointer_delta -= INTVAL (adjust); |
905 | ||
9a08d230 | 906 | adjust_stack_1 (adjust, false); |
18ca7dab RK |
907 | } |
908 | ||
909 | /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes). | |
910 | This pushes when ADJUST is positive. ADJUST need not be constant. */ | |
911 | ||
912 | void | |
502b8322 | 913 | anti_adjust_stack (rtx adjust) |
18ca7dab | 914 | { |
18ca7dab RK |
915 | if (adjust == const0_rtx) |
916 | return; | |
917 | ||
1503a7ec JH |
918 | /* We expect all variable sized adjustments to be multiple of |
919 | PREFERRED_STACK_BOUNDARY. */ | |
481683e1 | 920 | if (CONST_INT_P (adjust)) |
1503a7ec JH |
921 | stack_pointer_delta += INTVAL (adjust); |
922 | ||
9a08d230 | 923 | adjust_stack_1 (adjust, true); |
18ca7dab RK |
924 | } |
925 | ||
926 | /* Round the size of a block to be pushed up to the boundary required | |
927 | by this machine. SIZE is the desired size, which need not be constant. */ | |
928 | ||
4dd9b044 | 929 | static rtx |
502b8322 | 930 | round_push (rtx size) |
18ca7dab | 931 | { |
32990d5b | 932 | rtx align_rtx, alignm1_rtx; |
41ee3204 | 933 | |
32990d5b JJ |
934 | if (!SUPPORTS_STACK_ALIGNMENT |
935 | || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT) | |
18ca7dab | 936 | { |
32990d5b JJ |
937 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
938 | ||
939 | if (align == 1) | |
940 | return size; | |
941 | ||
942 | if (CONST_INT_P (size)) | |
943 | { | |
944 | HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align; | |
41ee3204 | 945 | |
32990d5b JJ |
946 | if (INTVAL (size) != new_size) |
947 | size = GEN_INT (new_size); | |
948 | return size; | |
949 | } | |
950 | ||
951 | align_rtx = GEN_INT (align); | |
952 | alignm1_rtx = GEN_INT (align - 1); | |
18ca7dab RK |
953 | } |
954 | else | |
955 | { | |
32990d5b JJ |
956 | /* If crtl->preferred_stack_boundary might still grow, use |
957 | virtual_preferred_stack_boundary_rtx instead. This will be | |
958 | substituted by the right value in vregs pass and optimized | |
959 | during combine. */ | |
960 | align_rtx = virtual_preferred_stack_boundary_rtx; | |
0a81f074 RS |
961 | alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1), |
962 | NULL_RTX); | |
18ca7dab | 963 | } |
41ee3204 | 964 | |
32990d5b JJ |
965 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
966 | but we know it can't. So add ourselves and then do | |
967 | TRUNC_DIV_EXPR. */ | |
968 | size = expand_binop (Pmode, add_optab, size, alignm1_rtx, | |
969 | NULL_RTX, 1, OPTAB_LIB_WIDEN); | |
970 | size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx, | |
971 | NULL_RTX, 1); | |
972 | size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1); | |
973 | ||
18ca7dab RK |
974 | return size; |
975 | } | |
976 | \f | |
59257ff7 RK |
977 | /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer |
978 | to a previously-created save area. If no save area has been allocated, | |
979 | this function will allocate one. If a save area is specified, it | |
9eac0f2a | 980 | must be of the proper mode. */ |
59257ff7 RK |
981 | |
982 | void | |
9eac0f2a | 983 | emit_stack_save (enum save_level save_level, rtx *psave) |
59257ff7 RK |
984 | { |
985 | rtx sa = *psave; | |
986 | /* The default is that we use a move insn and save in a Pmode object. */ | |
1476d1bd | 987 | rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast; |
ef4bddc2 | 988 | machine_mode mode = STACK_SAVEAREA_MODE (save_level); |
59257ff7 RK |
989 | |
990 | /* See if this machine has anything special to do for this kind of save. */ | |
991 | switch (save_level) | |
992 | { | |
993 | #ifdef HAVE_save_stack_block | |
994 | case SAVE_BLOCK: | |
995 | if (HAVE_save_stack_block) | |
a260abc9 | 996 | fcn = gen_save_stack_block; |
59257ff7 RK |
997 | break; |
998 | #endif | |
999 | #ifdef HAVE_save_stack_function | |
1000 | case SAVE_FUNCTION: | |
1001 | if (HAVE_save_stack_function) | |
a260abc9 | 1002 | fcn = gen_save_stack_function; |
59257ff7 RK |
1003 | break; |
1004 | #endif | |
1005 | #ifdef HAVE_save_stack_nonlocal | |
1006 | case SAVE_NONLOCAL: | |
1007 | if (HAVE_save_stack_nonlocal) | |
a260abc9 | 1008 | fcn = gen_save_stack_nonlocal; |
59257ff7 RK |
1009 | break; |
1010 | #endif | |
38a448ca RH |
1011 | default: |
1012 | break; | |
59257ff7 RK |
1013 | } |
1014 | ||
1015 | /* If there is no save area and we have to allocate one, do so. Otherwise | |
1016 | verify the save area is the proper mode. */ | |
1017 | ||
1018 | if (sa == 0) | |
1019 | { | |
1020 | if (mode != VOIDmode) | |
1021 | { | |
1022 | if (save_level == SAVE_NONLOCAL) | |
1023 | *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0); | |
1024 | else | |
1025 | *psave = sa = gen_reg_rtx (mode); | |
1026 | } | |
1027 | } | |
59257ff7 | 1028 | |
9eac0f2a RH |
1029 | do_pending_stack_adjust (); |
1030 | if (sa != 0) | |
1031 | sa = validize_mem (sa); | |
1032 | emit_insn (fcn (sa, stack_pointer_rtx)); | |
59257ff7 RK |
1033 | } |
1034 | ||
1035 | /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save | |
9eac0f2a | 1036 | area made by emit_stack_save. If it is zero, we have nothing to do. */ |
59257ff7 RK |
1037 | |
1038 | void | |
9eac0f2a | 1039 | emit_stack_restore (enum save_level save_level, rtx sa) |
59257ff7 RK |
1040 | { |
1041 | /* The default is that we use a move insn. */ | |
1476d1bd | 1042 | rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast; |
59257ff7 | 1043 | |
50025f91 TV |
1044 | /* If stack_realign_drap, the x86 backend emits a prologue that aligns both |
1045 | STACK_POINTER and HARD_FRAME_POINTER. | |
1046 | If stack_realign_fp, the x86 backend emits a prologue that aligns only | |
1047 | STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing | |
1048 | aligned variables, which is reflected in ix86_can_eliminate. | |
1049 | We normally still have the realigned STACK_POINTER that we can use. | |
1050 | But if there is a stack restore still present at reload, it can trigger | |
1051 | mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate | |
1052 | FRAME_POINTER into a hard reg. | |
1053 | To prevent this situation, we force need_drap if we emit a stack | |
1054 | restore. */ | |
1055 | if (SUPPORTS_STACK_ALIGNMENT) | |
1056 | crtl->need_drap = true; | |
1057 | ||
59257ff7 RK |
1058 | /* See if this machine has anything special to do for this kind of save. */ |
1059 | switch (save_level) | |
1060 | { | |
1061 | #ifdef HAVE_restore_stack_block | |
1062 | case SAVE_BLOCK: | |
1063 | if (HAVE_restore_stack_block) | |
1064 | fcn = gen_restore_stack_block; | |
1065 | break; | |
1066 | #endif | |
1067 | #ifdef HAVE_restore_stack_function | |
1068 | case SAVE_FUNCTION: | |
1069 | if (HAVE_restore_stack_function) | |
1070 | fcn = gen_restore_stack_function; | |
1071 | break; | |
1072 | #endif | |
1073 | #ifdef HAVE_restore_stack_nonlocal | |
59257ff7 RK |
1074 | case SAVE_NONLOCAL: |
1075 | if (HAVE_restore_stack_nonlocal) | |
1076 | fcn = gen_restore_stack_nonlocal; | |
1077 | break; | |
1078 | #endif | |
38a448ca RH |
1079 | default: |
1080 | break; | |
59257ff7 RK |
1081 | } |
1082 | ||
d072107f | 1083 | if (sa != 0) |
260f91c2 DJ |
1084 | { |
1085 | sa = validize_mem (sa); | |
1086 | /* These clobbers prevent the scheduler from moving | |
1087 | references to variable arrays below the code | |
4b7e68e7 | 1088 | that deletes (pops) the arrays. */ |
c41c1387 RS |
1089 | emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))); |
1090 | emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx)); | |
260f91c2 | 1091 | } |
d072107f | 1092 | |
a494ed43 EB |
1093 | discard_pending_stack_adjust (); |
1094 | ||
9eac0f2a | 1095 | emit_insn (fcn (stack_pointer_rtx, sa)); |
59257ff7 | 1096 | } |
6de9cd9a DN |
1097 | |
1098 | /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current | |
d33606c3 EB |
1099 | function. This should be called whenever we allocate or deallocate |
1100 | dynamic stack space. */ | |
6de9cd9a DN |
1101 | |
1102 | void | |
1103 | update_nonlocal_goto_save_area (void) | |
1104 | { | |
1105 | tree t_save; | |
1106 | rtx r_save; | |
1107 | ||
1108 | /* The nonlocal_goto_save_area object is an array of N pointers. The | |
1109 | first one is used for the frame pointer save; the rest are sized by | |
1110 | STACK_SAVEAREA_MODE. Create a reference to array index 1, the first | |
1111 | of the stack save area slots. */ | |
6bbec3e1 L |
1112 | t_save = build4 (ARRAY_REF, |
1113 | TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)), | |
1114 | cfun->nonlocal_goto_save_area, | |
3244e67d | 1115 | integer_one_node, NULL_TREE, NULL_TREE); |
6de9cd9a DN |
1116 | r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE); |
1117 | ||
9eac0f2a | 1118 | emit_stack_save (SAVE_NONLOCAL, &r_save); |
6de9cd9a | 1119 | } |
d33606c3 EB |
1120 | |
1121 | /* Record a new stack level for the current function. This should be called | |
1122 | whenever we allocate or deallocate dynamic stack space. */ | |
1123 | ||
1124 | void | |
1125 | record_new_stack_level (void) | |
1126 | { | |
1127 | /* Record the new stack level for nonlocal gotos. */ | |
1128 | if (cfun->nonlocal_goto_save_area) | |
1129 | update_nonlocal_goto_save_area (); | |
1130 | ||
1131 | /* Record the new stack level for SJLJ exceptions. */ | |
1132 | if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ) | |
1133 | update_sjlj_context (); | |
1134 | } | |
59257ff7 | 1135 | \f |
18ca7dab | 1136 | /* Return an rtx representing the address of an area of memory dynamically |
3a42502d | 1137 | pushed on the stack. |
18ca7dab RK |
1138 | |
1139 | Any required stack pointer alignment is preserved. | |
1140 | ||
1141 | SIZE is an rtx representing the size of the area. | |
091ad0b9 | 1142 | |
3a42502d RH |
1143 | SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This |
1144 | parameter may be zero. If so, a proper value will be extracted | |
1145 | from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed. | |
1146 | ||
1147 | REQUIRED_ALIGN is the alignment (in bits) required for the region | |
1148 | of memory. | |
d3c12306 EB |
1149 | |
1150 | If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the | |
1151 | stack space allocated by the generated code cannot be added with itself | |
1152 | in the course of the execution of the function. It is always safe to | |
1153 | pass FALSE here and the following criterion is sufficient in order to | |
1154 | pass TRUE: every path in the CFG that starts at the allocation point and | |
1155 | loops to it executes the associated deallocation code. */ | |
18ca7dab RK |
1156 | |
1157 | rtx | |
3a42502d RH |
1158 | allocate_dynamic_stack_space (rtx size, unsigned size_align, |
1159 | unsigned required_align, bool cannot_accumulate) | |
18ca7dab | 1160 | { |
d3c12306 | 1161 | HOST_WIDE_INT stack_usage_size = -1; |
528a80c1 DM |
1162 | rtx_code_label *final_label; |
1163 | rtx final_target, target; | |
34831f3e | 1164 | unsigned extra_align = 0; |
3a42502d | 1165 | bool must_align; |
d3c12306 | 1166 | |
15fc0026 | 1167 | /* If we're asking for zero bytes, it doesn't matter what we point |
9faa82d8 | 1168 | to since we can't dereference it. But return a reasonable |
15fc0026 RK |
1169 | address anyway. */ |
1170 | if (size == const0_rtx) | |
1171 | return virtual_stack_dynamic_rtx; | |
1172 | ||
1173 | /* Otherwise, show we're calling alloca or equivalent. */ | |
e3b5732b | 1174 | cfun->calls_alloca = 1; |
15fc0026 | 1175 | |
d3c12306 EB |
1176 | /* If stack usage info is requested, look into the size we are passed. |
1177 | We need to do so this early to avoid the obfuscation that may be | |
1178 | introduced later by the various alignment operations. */ | |
a11e0df4 | 1179 | if (flag_stack_usage_info) |
d3c12306 | 1180 | { |
32990d5b | 1181 | if (CONST_INT_P (size)) |
d3c12306 | 1182 | stack_usage_size = INTVAL (size); |
32990d5b | 1183 | else if (REG_P (size)) |
d3c12306 EB |
1184 | { |
1185 | /* Look into the last emitted insn and see if we can deduce | |
1186 | something for the register. */ | |
528a80c1 DM |
1187 | rtx_insn *insn; |
1188 | rtx set, note; | |
d3c12306 EB |
1189 | insn = get_last_insn (); |
1190 | if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size)) | |
1191 | { | |
32990d5b | 1192 | if (CONST_INT_P (SET_SRC (set))) |
d3c12306 EB |
1193 | stack_usage_size = INTVAL (SET_SRC (set)); |
1194 | else if ((note = find_reg_equal_equiv_note (insn)) | |
32990d5b | 1195 | && CONST_INT_P (XEXP (note, 0))) |
d3c12306 EB |
1196 | stack_usage_size = INTVAL (XEXP (note, 0)); |
1197 | } | |
1198 | } | |
1199 | ||
1200 | /* If the size is not constant, we can't say anything. */ | |
1201 | if (stack_usage_size == -1) | |
1202 | { | |
1203 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1204 | stack_usage_size = 0; | |
1205 | } | |
1206 | } | |
1207 | ||
18ca7dab RK |
1208 | /* Ensure the size is in the proper mode. */ |
1209 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1210 | size = convert_to_mode (Pmode, size, 1); | |
1211 | ||
3a42502d RH |
1212 | /* Adjust SIZE_ALIGN, if needed. */ |
1213 | if (CONST_INT_P (size)) | |
1214 | { | |
1215 | unsigned HOST_WIDE_INT lsb; | |
1216 | ||
1217 | lsb = INTVAL (size); | |
1218 | lsb &= -lsb; | |
1219 | ||
1220 | /* Watch out for overflow truncating to "unsigned". */ | |
1221 | if (lsb > UINT_MAX / BITS_PER_UNIT) | |
1222 | size_align = 1u << (HOST_BITS_PER_INT - 1); | |
1223 | else | |
1224 | size_align = (unsigned)lsb * BITS_PER_UNIT; | |
1225 | } | |
1226 | else if (size_align < BITS_PER_UNIT) | |
1227 | size_align = BITS_PER_UNIT; | |
1228 | ||
34831f3e RH |
1229 | /* We can't attempt to minimize alignment necessary, because we don't |
1230 | know the final value of preferred_stack_boundary yet while executing | |
1231 | this code. */ | |
1232 | if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY) | |
1233 | crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
1234 | ||
18ca7dab | 1235 | /* We will need to ensure that the address we return is aligned to |
34831f3e RH |
1236 | REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't |
1237 | always know its final value at this point in the compilation (it | |
1238 | might depend on the size of the outgoing parameter lists, for | |
1239 | example), so we must align the value to be returned in that case. | |
1240 | (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if | |
1241 | STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined). | |
1242 | We must also do an alignment operation on the returned value if | |
1243 | the stack pointer alignment is less strict than REQUIRED_ALIGN. | |
1244 | ||
1245 | If we have to align, we must leave space in SIZE for the hole | |
1246 | that might result from the alignment operation. */ | |
1247 | ||
1248 | must_align = (crtl->preferred_stack_boundary < required_align); | |
1249 | if (must_align) | |
d3c12306 | 1250 | { |
34831f3e RH |
1251 | if (required_align > PREFERRED_STACK_BOUNDARY) |
1252 | extra_align = PREFERRED_STACK_BOUNDARY; | |
1253 | else if (required_align > STACK_BOUNDARY) | |
1254 | extra_align = STACK_BOUNDARY; | |
1255 | else | |
1256 | extra_align = BITS_PER_UNIT; | |
1ecad98e EB |
1257 | } |
1258 | ||
34831f3e RH |
1259 | /* ??? STACK_POINTER_OFFSET is always defined now. */ |
1260 | #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) | |
1261 | must_align = true; | |
1262 | extra_align = BITS_PER_UNIT; | |
1263 | #endif | |
1ecad98e | 1264 | |
34831f3e RH |
1265 | if (must_align) |
1266 | { | |
1267 | unsigned extra = (required_align - extra_align) / BITS_PER_UNIT; | |
3a42502d | 1268 | |
0a81f074 | 1269 | size = plus_constant (Pmode, size, extra); |
3a42502d | 1270 | size = force_operand (size, NULL_RTX); |
d3c12306 | 1271 | |
a11e0df4 | 1272 | if (flag_stack_usage_info) |
3a42502d | 1273 | stack_usage_size += extra; |
34831f3e | 1274 | |
3a42502d RH |
1275 | if (extra && size_align > extra_align) |
1276 | size_align = extra_align; | |
d3c12306 | 1277 | } |
1d9d04f8 | 1278 | |
18ca7dab | 1279 | /* Round the size to a multiple of the required stack alignment. |
34831f3e | 1280 | Since the stack if presumed to be rounded before this allocation, |
18ca7dab RK |
1281 | this will maintain the required alignment. |
1282 | ||
1283 | If the stack grows downward, we could save an insn by subtracting | |
1284 | SIZE from the stack pointer and then aligning the stack pointer. | |
1285 | The problem with this is that the stack pointer may be unaligned | |
1286 | between the execution of the subtraction and alignment insns and | |
1287 | some machines do not allow this. Even on those that do, some | |
1288 | signal handlers malfunction if a signal should occur between those | |
1289 | insns. Since this is an extremely rare event, we have no reliable | |
1290 | way of knowing which systems have this problem. So we avoid even | |
1291 | momentarily mis-aligning the stack. */ | |
3a42502d | 1292 | if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0) |
d3c12306 EB |
1293 | { |
1294 | size = round_push (size); | |
18ca7dab | 1295 | |
a11e0df4 | 1296 | if (flag_stack_usage_info) |
d3c12306 | 1297 | { |
32990d5b | 1298 | int align = crtl->preferred_stack_boundary / BITS_PER_UNIT; |
d3c12306 EB |
1299 | stack_usage_size = (stack_usage_size + align - 1) / align * align; |
1300 | } | |
1301 | } | |
1302 | ||
3a42502d | 1303 | target = gen_reg_rtx (Pmode); |
7458026b | 1304 | |
d3c12306 EB |
1305 | /* The size is supposed to be fully adjusted at this point so record it |
1306 | if stack usage info is requested. */ | |
a11e0df4 | 1307 | if (flag_stack_usage_info) |
d3c12306 EB |
1308 | { |
1309 | current_function_dynamic_stack_size += stack_usage_size; | |
1310 | ||
1311 | /* ??? This is gross but the only safe stance in the absence | |
1312 | of stack usage oriented flow analysis. */ | |
1313 | if (!cannot_accumulate) | |
1314 | current_function_has_unbounded_dynamic_stack_size = 1; | |
1315 | } | |
18ca7dab | 1316 | |
528a80c1 | 1317 | final_label = NULL; |
7458026b ILT |
1318 | final_target = NULL_RTX; |
1319 | ||
1320 | /* If we are splitting the stack, we need to ask the backend whether | |
1321 | there is enough room on the current stack. If there isn't, or if | |
1322 | the backend doesn't know how to tell is, then we need to call a | |
1323 | function to allocate memory in some other way. This memory will | |
1324 | be released when we release the current stack segment. The | |
1325 | effect is that stack allocation becomes less efficient, but at | |
1326 | least it doesn't cause a stack overflow. */ | |
1327 | if (flag_split_stack) | |
1328 | { | |
528a80c1 DM |
1329 | rtx_code_label *available_label; |
1330 | rtx ask, space, func; | |
7458026b | 1331 | |
528a80c1 | 1332 | available_label = NULL; |
7458026b ILT |
1333 | |
1334 | #ifdef HAVE_split_stack_space_check | |
1335 | if (HAVE_split_stack_space_check) | |
1336 | { | |
1337 | available_label = gen_label_rtx (); | |
1338 | ||
1339 | /* This instruction will branch to AVAILABLE_LABEL if there | |
1340 | are SIZE bytes available on the stack. */ | |
1341 | emit_insn (gen_split_stack_space_check (size, available_label)); | |
1342 | } | |
1343 | #endif | |
1344 | ||
c3928dde | 1345 | /* The __morestack_allocate_stack_space function will allocate |
c070a3b9 ILT |
1346 | memory using malloc. If the alignment of the memory returned |
1347 | by malloc does not meet REQUIRED_ALIGN, we increase SIZE to | |
1348 | make sure we allocate enough space. */ | |
1349 | if (MALLOC_ABI_ALIGNMENT >= required_align) | |
1350 | ask = size; | |
1351 | else | |
1352 | { | |
1353 | ask = expand_binop (Pmode, add_optab, size, | |
2f1cd2eb RS |
1354 | gen_int_mode (required_align / BITS_PER_UNIT - 1, |
1355 | Pmode), | |
c070a3b9 ILT |
1356 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1357 | must_align = true; | |
1358 | } | |
c3928dde | 1359 | |
7458026b ILT |
1360 | func = init_one_libfunc ("__morestack_allocate_stack_space"); |
1361 | ||
1362 | space = emit_library_call_value (func, target, LCT_NORMAL, Pmode, | |
c3928dde | 1363 | 1, ask, Pmode); |
7458026b ILT |
1364 | |
1365 | if (available_label == NULL_RTX) | |
1366 | return space; | |
1367 | ||
1368 | final_target = gen_reg_rtx (Pmode); | |
7458026b ILT |
1369 | |
1370 | emit_move_insn (final_target, space); | |
1371 | ||
1372 | final_label = gen_label_rtx (); | |
1373 | emit_jump (final_label); | |
1374 | ||
1375 | emit_label (available_label); | |
1376 | } | |
1377 | ||
18ca7dab RK |
1378 | do_pending_stack_adjust (); |
1379 | ||
1503a7ec | 1380 | /* We ought to be called always on the toplevel and stack ought to be aligned |
a1f300c0 | 1381 | properly. */ |
5b0264cb NS |
1382 | gcc_assert (!(stack_pointer_delta |
1383 | % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); | |
1503a7ec | 1384 | |
d809253a EB |
1385 | /* If needed, check that we have the required amount of stack. Take into |
1386 | account what has already been checked. */ | |
1387 | if (STACK_CHECK_MOVING_SP) | |
1388 | ; | |
1389 | else if (flag_stack_check == GENERIC_STACK_CHECK) | |
b38f3813 EB |
1390 | probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE, |
1391 | size); | |
1392 | else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK) | |
1393 | probe_stack_range (STACK_CHECK_PROTECT, size); | |
edff2491 | 1394 | |
efec771a RH |
1395 | /* Don't let anti_adjust_stack emit notes. */ |
1396 | suppress_reg_args_size = true; | |
1397 | ||
18ca7dab RK |
1398 | /* Perform the required allocation from the stack. Some systems do |
1399 | this differently than simply incrementing/decrementing from the | |
38a448ca | 1400 | stack pointer, such as acquiring the space by calling malloc(). */ |
18ca7dab RK |
1401 | #ifdef HAVE_allocate_stack |
1402 | if (HAVE_allocate_stack) | |
1403 | { | |
a5c7d693 | 1404 | struct expand_operand ops[2]; |
4b6c1672 RK |
1405 | /* We don't have to check against the predicate for operand 0 since |
1406 | TARGET is known to be a pseudo of the proper mode, which must | |
a5c7d693 RS |
1407 | be valid for the operand. */ |
1408 | create_fixed_operand (&ops[0], target); | |
1409 | create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true); | |
1410 | expand_insn (CODE_FOR_allocate_stack, 2, ops); | |
18ca7dab RK |
1411 | } |
1412 | else | |
1413 | #endif | |
ea534b63 | 1414 | { |
32990d5b JJ |
1415 | int saved_stack_pointer_delta; |
1416 | ||
38a448ca RH |
1417 | #ifndef STACK_GROWS_DOWNWARD |
1418 | emit_move_insn (target, virtual_stack_dynamic_rtx); | |
1419 | #endif | |
a157febd GK |
1420 | |
1421 | /* Check stack bounds if necessary. */ | |
e3b5732b | 1422 | if (crtl->limit_stack) |
a157febd GK |
1423 | { |
1424 | rtx available; | |
528a80c1 | 1425 | rtx_code_label *space_available = gen_label_rtx (); |
a157febd | 1426 | #ifdef STACK_GROWS_DOWNWARD |
d9b3eb63 | 1427 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1428 | stack_pointer_rtx, stack_limit_rtx, |
1429 | NULL_RTX, 1, OPTAB_WIDEN); | |
1430 | #else | |
d9b3eb63 | 1431 | available = expand_binop (Pmode, sub_optab, |
a157febd GK |
1432 | stack_limit_rtx, stack_pointer_rtx, |
1433 | NULL_RTX, 1, OPTAB_WIDEN); | |
1434 | #endif | |
1435 | emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1, | |
a06ef755 | 1436 | space_available); |
a157febd GK |
1437 | #ifdef HAVE_trap |
1438 | if (HAVE_trap) | |
1439 | emit_insn (gen_trap ()); | |
1440 | else | |
1441 | #endif | |
1442 | error ("stack limits not supported on this target"); | |
1443 | emit_barrier (); | |
1444 | emit_label (space_available); | |
1445 | } | |
1446 | ||
32990d5b | 1447 | saved_stack_pointer_delta = stack_pointer_delta; |
9a08d230 | 1448 | |
d809253a | 1449 | if (flag_stack_check && STACK_CHECK_MOVING_SP) |
c35af30f | 1450 | anti_adjust_stack_and_probe (size, false); |
d809253a EB |
1451 | else |
1452 | anti_adjust_stack (size); | |
9a08d230 | 1453 | |
32990d5b JJ |
1454 | /* Even if size is constant, don't modify stack_pointer_delta. |
1455 | The constant size alloca should preserve | |
1456 | crtl->preferred_stack_boundary alignment. */ | |
1457 | stack_pointer_delta = saved_stack_pointer_delta; | |
d5457140 | 1458 | |
18ca7dab | 1459 | #ifdef STACK_GROWS_DOWNWARD |
ca56cd30 | 1460 | emit_move_insn (target, virtual_stack_dynamic_rtx); |
18ca7dab | 1461 | #endif |
38a448ca | 1462 | } |
18ca7dab | 1463 | |
efec771a RH |
1464 | suppress_reg_args_size = false; |
1465 | ||
3a42502d RH |
1466 | /* Finish up the split stack handling. */ |
1467 | if (final_label != NULL_RTX) | |
1468 | { | |
1469 | gcc_assert (flag_split_stack); | |
1470 | emit_move_insn (final_target, target); | |
1471 | emit_label (final_label); | |
1472 | target = final_target; | |
1473 | } | |
1474 | ||
1475 | if (must_align) | |
091ad0b9 | 1476 | { |
5244db05 | 1477 | /* CEIL_DIV_EXPR needs to worry about the addition overflowing, |
0f41302f MS |
1478 | but we know it can't. So add ourselves and then do |
1479 | TRUNC_DIV_EXPR. */ | |
0f56a403 | 1480 | target = expand_binop (Pmode, add_optab, target, |
2f1cd2eb RS |
1481 | gen_int_mode (required_align / BITS_PER_UNIT - 1, |
1482 | Pmode), | |
5244db05 RK |
1483 | NULL_RTX, 1, OPTAB_LIB_WIDEN); |
1484 | target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target, | |
2f1cd2eb RS |
1485 | gen_int_mode (required_align / BITS_PER_UNIT, |
1486 | Pmode), | |
b1ec3c92 | 1487 | NULL_RTX, 1); |
091ad0b9 | 1488 | target = expand_mult (Pmode, target, |
2f1cd2eb RS |
1489 | gen_int_mode (required_align / BITS_PER_UNIT, |
1490 | Pmode), | |
b1ec3c92 | 1491 | NULL_RTX, 1); |
091ad0b9 | 1492 | } |
d9b3eb63 | 1493 | |
3a42502d RH |
1494 | /* Now that we've committed to a return value, mark its alignment. */ |
1495 | mark_reg_pointer (target, required_align); | |
1496 | ||
d33606c3 EB |
1497 | /* Record the new stack level. */ |
1498 | record_new_stack_level (); | |
15fc0026 | 1499 | |
18ca7dab RK |
1500 | return target; |
1501 | } | |
1502 | \f | |
d9b3eb63 | 1503 | /* A front end may want to override GCC's stack checking by providing a |
14a774a9 RK |
1504 | run-time routine to call to check the stack, so provide a mechanism for |
1505 | calling that routine. */ | |
1506 | ||
e2500fed | 1507 | static GTY(()) rtx stack_check_libfunc; |
14a774a9 RK |
1508 | |
1509 | void | |
d477d1fe | 1510 | set_stack_check_libfunc (const char *libfunc_name) |
14a774a9 | 1511 | { |
d477d1fe SB |
1512 | gcc_assert (stack_check_libfunc == NULL_RTX); |
1513 | stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name); | |
14a774a9 RK |
1514 | } |
1515 | \f | |
edff2491 RK |
1516 | /* Emit one stack probe at ADDRESS, an address within the stack. */ |
1517 | ||
260c8ba3 | 1518 | void |
502b8322 | 1519 | emit_stack_probe (rtx address) |
edff2491 | 1520 | { |
7b84aac0 EB |
1521 | #ifdef HAVE_probe_stack_address |
1522 | if (HAVE_probe_stack_address) | |
1523 | emit_insn (gen_probe_stack_address (address)); | |
1524 | else | |
1525 | #endif | |
1526 | { | |
1527 | rtx memref = gen_rtx_MEM (word_mode, address); | |
edff2491 | 1528 | |
7b84aac0 | 1529 | MEM_VOLATILE_P (memref) = 1; |
edff2491 | 1530 | |
7b84aac0 | 1531 | /* See if we have an insn to probe the stack. */ |
d809253a | 1532 | #ifdef HAVE_probe_stack |
7b84aac0 EB |
1533 | if (HAVE_probe_stack) |
1534 | emit_insn (gen_probe_stack (memref)); | |
1535 | else | |
d809253a | 1536 | #endif |
7b84aac0 EB |
1537 | emit_move_insn (memref, const0_rtx); |
1538 | } | |
edff2491 RK |
1539 | } |
1540 | ||
d9b3eb63 | 1541 | /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive. |
d809253a EB |
1542 | FIRST is a constant and size is a Pmode RTX. These are offsets from |
1543 | the current stack pointer. STACK_GROWS_DOWNWARD says whether to add | |
1544 | or subtract them from the stack pointer. */ | |
1545 | ||
1546 | #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP) | |
edff2491 RK |
1547 | |
1548 | #ifdef STACK_GROWS_DOWNWARD | |
1549 | #define STACK_GROW_OP MINUS | |
d809253a EB |
1550 | #define STACK_GROW_OPTAB sub_optab |
1551 | #define STACK_GROW_OFF(off) -(off) | |
edff2491 RK |
1552 | #else |
1553 | #define STACK_GROW_OP PLUS | |
d809253a EB |
1554 | #define STACK_GROW_OPTAB add_optab |
1555 | #define STACK_GROW_OFF(off) (off) | |
edff2491 RK |
1556 | #endif |
1557 | ||
1558 | void | |
502b8322 | 1559 | probe_stack_range (HOST_WIDE_INT first, rtx size) |
edff2491 | 1560 | { |
4b6c1672 RK |
1561 | /* First ensure SIZE is Pmode. */ |
1562 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1563 | size = convert_to_mode (Pmode, size, 1); | |
1564 | ||
d809253a EB |
1565 | /* Next see if we have a function to check the stack. */ |
1566 | if (stack_check_libfunc) | |
f5f5363f | 1567 | { |
d809253a | 1568 | rtx addr = memory_address (Pmode, |
2b3aadfc RH |
1569 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, |
1570 | stack_pointer_rtx, | |
0a81f074 RS |
1571 | plus_constant (Pmode, |
1572 | size, first))); | |
949fa04c EB |
1573 | emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr, |
1574 | Pmode); | |
f5f5363f | 1575 | } |
14a774a9 | 1576 | |
d809253a | 1577 | /* Next see if we have an insn to check the stack. */ |
edff2491 | 1578 | #ifdef HAVE_check_stack |
d6a6a07a | 1579 | else if (HAVE_check_stack) |
edff2491 | 1580 | { |
a5c7d693 | 1581 | struct expand_operand ops[1]; |
d809253a EB |
1582 | rtx addr = memory_address (Pmode, |
1583 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1584 | stack_pointer_rtx, | |
0a81f074 RS |
1585 | plus_constant (Pmode, |
1586 | size, first))); | |
d6a6a07a | 1587 | bool success; |
a5c7d693 | 1588 | create_input_operand (&ops[0], addr, Pmode); |
d6a6a07a EB |
1589 | success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops); |
1590 | gcc_assert (success); | |
edff2491 RK |
1591 | } |
1592 | #endif | |
1593 | ||
d809253a EB |
1594 | /* Otherwise we have to generate explicit probes. If we have a constant |
1595 | small number of them to generate, that's the easy case. */ | |
1596 | else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) | |
edff2491 | 1597 | { |
d809253a EB |
1598 | HOST_WIDE_INT isize = INTVAL (size), i; |
1599 | rtx addr; | |
1600 | ||
1601 | /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until | |
1602 | it exceeds SIZE. If only one probe is needed, this will not | |
1603 | generate any code. Then probe at FIRST + SIZE. */ | |
1604 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1605 | { | |
1606 | addr = memory_address (Pmode, | |
0a81f074 | 1607 | plus_constant (Pmode, stack_pointer_rtx, |
d809253a EB |
1608 | STACK_GROW_OFF (first + i))); |
1609 | emit_stack_probe (addr); | |
1610 | } | |
1611 | ||
1612 | addr = memory_address (Pmode, | |
0a81f074 | 1613 | plus_constant (Pmode, stack_pointer_rtx, |
d809253a EB |
1614 | STACK_GROW_OFF (first + isize))); |
1615 | emit_stack_probe (addr); | |
edff2491 RK |
1616 | } |
1617 | ||
d809253a EB |
1618 | /* In the variable case, do the same as above, but in a loop. Note that we |
1619 | must be extra careful with variables wrapping around because we might be | |
1620 | at the very top (or the very bottom) of the address space and we have to | |
1621 | be able to handle this case properly; in particular, we use an equality | |
1622 | test for the loop condition. */ | |
edff2491 RK |
1623 | else |
1624 | { | |
d809253a | 1625 | rtx rounded_size, rounded_size_op, test_addr, last_addr, temp; |
528a80c1 DM |
1626 | rtx_code_label *loop_lab = gen_label_rtx (); |
1627 | rtx_code_label *end_lab = gen_label_rtx (); | |
edff2491 | 1628 | |
d809253a EB |
1629 | /* Step 1: round SIZE to the previous multiple of the interval. */ |
1630 | ||
1631 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1632 | rounded_size | |
69a59f0f RS |
1633 | = simplify_gen_binary (AND, Pmode, size, |
1634 | gen_int_mode (-PROBE_INTERVAL, Pmode)); | |
d809253a EB |
1635 | rounded_size_op = force_operand (rounded_size, NULL_RTX); |
1636 | ||
1637 | ||
1638 | /* Step 2: compute initial and final value of the loop counter. */ | |
1639 | ||
1640 | /* TEST_ADDR = SP + FIRST. */ | |
1641 | test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1642 | stack_pointer_rtx, | |
4789c0ce RS |
1643 | gen_int_mode (first, Pmode)), |
1644 | NULL_RTX); | |
d809253a EB |
1645 | |
1646 | /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */ | |
1647 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1648 | test_addr, | |
1649 | rounded_size_op), NULL_RTX); | |
1650 | ||
1651 | ||
1652 | /* Step 3: the loop | |
1653 | ||
1654 | while (TEST_ADDR != LAST_ADDR) | |
1655 | { | |
1656 | TEST_ADDR = TEST_ADDR + PROBE_INTERVAL | |
1657 | probe at TEST_ADDR | |
1658 | } | |
1659 | ||
1660 | probes at FIRST + N * PROBE_INTERVAL for values of N from 1 | |
1661 | until it is equal to ROUNDED_SIZE. */ | |
edff2491 RK |
1662 | |
1663 | emit_label (loop_lab); | |
edff2491 | 1664 | |
d809253a EB |
1665 | /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */ |
1666 | emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1, | |
1667 | end_lab); | |
1668 | ||
1669 | /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */ | |
1670 | temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr, | |
2f1cd2eb | 1671 | gen_int_mode (PROBE_INTERVAL, Pmode), test_addr, |
edff2491 | 1672 | 1, OPTAB_WIDEN); |
edff2491 | 1673 | |
5b0264cb | 1674 | gcc_assert (temp == test_addr); |
edff2491 | 1675 | |
d809253a EB |
1676 | /* Probe at TEST_ADDR. */ |
1677 | emit_stack_probe (test_addr); | |
1678 | ||
1679 | emit_jump (loop_lab); | |
1680 | ||
edff2491 RK |
1681 | emit_label (end_lab); |
1682 | ||
d809253a EB |
1683 | |
1684 | /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time | |
1685 | that SIZE is equal to ROUNDED_SIZE. */ | |
1686 | ||
1687 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1688 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1689 | if (temp != const0_rtx) | |
1690 | { | |
1691 | rtx addr; | |
1692 | ||
32990d5b | 1693 | if (CONST_INT_P (temp)) |
d809253a EB |
1694 | { |
1695 | /* Use [base + disp} addressing mode if supported. */ | |
1696 | HOST_WIDE_INT offset = INTVAL (temp); | |
1697 | addr = memory_address (Pmode, | |
0a81f074 | 1698 | plus_constant (Pmode, last_addr, |
d809253a EB |
1699 | STACK_GROW_OFF (offset))); |
1700 | } | |
1701 | else | |
1702 | { | |
1703 | /* Manual CSE if the difference is not known at compile-time. */ | |
1704 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1705 | addr = memory_address (Pmode, | |
1706 | gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1707 | last_addr, temp)); | |
1708 | } | |
1709 | ||
1710 | emit_stack_probe (addr); | |
1711 | } | |
edff2491 | 1712 | } |
eabcc725 EB |
1713 | |
1714 | /* Make sure nothing is scheduled before we are done. */ | |
1715 | emit_insn (gen_blockage ()); | |
edff2491 | 1716 | } |
d809253a | 1717 | |
c35af30f EB |
1718 | /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes) |
1719 | while probing it. This pushes when SIZE is positive. SIZE need not | |
1720 | be constant. If ADJUST_BACK is true, adjust back the stack pointer | |
1721 | by plus SIZE at the end. */ | |
d809253a | 1722 | |
c35af30f EB |
1723 | void |
1724 | anti_adjust_stack_and_probe (rtx size, bool adjust_back) | |
d809253a | 1725 | { |
c35af30f EB |
1726 | /* We skip the probe for the first interval + a small dope of 4 words and |
1727 | probe that many bytes past the specified size to maintain a protection | |
1728 | area at the botton of the stack. */ | |
d809253a EB |
1729 | const int dope = 4 * UNITS_PER_WORD; |
1730 | ||
1731 | /* First ensure SIZE is Pmode. */ | |
1732 | if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode) | |
1733 | size = convert_to_mode (Pmode, size, 1); | |
1734 | ||
1735 | /* If we have a constant small number of probes to generate, that's the | |
1736 | easy case. */ | |
32990d5b | 1737 | if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL) |
d809253a EB |
1738 | { |
1739 | HOST_WIDE_INT isize = INTVAL (size), i; | |
1740 | bool first_probe = true; | |
1741 | ||
260c8ba3 | 1742 | /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1743 | values of N from 1 until it exceeds SIZE. If only one probe is |
1744 | needed, this will not generate any code. Then adjust and probe | |
1745 | to PROBE_INTERVAL + SIZE. */ | |
1746 | for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL) | |
1747 | { | |
1748 | if (first_probe) | |
1749 | { | |
1750 | anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope)); | |
1751 | first_probe = false; | |
1752 | } | |
1753 | else | |
1754 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1755 | emit_stack_probe (stack_pointer_rtx); | |
1756 | } | |
1757 | ||
1758 | if (first_probe) | |
0a81f074 | 1759 | anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
d809253a | 1760 | else |
0a81f074 | 1761 | anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i)); |
d809253a EB |
1762 | emit_stack_probe (stack_pointer_rtx); |
1763 | } | |
1764 | ||
1765 | /* In the variable case, do the same as above, but in a loop. Note that we | |
1766 | must be extra careful with variables wrapping around because we might be | |
1767 | at the very top (or the very bottom) of the address space and we have to | |
1768 | be able to handle this case properly; in particular, we use an equality | |
1769 | test for the loop condition. */ | |
1770 | else | |
1771 | { | |
1772 | rtx rounded_size, rounded_size_op, last_addr, temp; | |
528a80c1 DM |
1773 | rtx_code_label *loop_lab = gen_label_rtx (); |
1774 | rtx_code_label *end_lab = gen_label_rtx (); | |
d809253a EB |
1775 | |
1776 | ||
1777 | /* Step 1: round SIZE to the previous multiple of the interval. */ | |
1778 | ||
1779 | /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */ | |
1780 | rounded_size | |
69a59f0f RS |
1781 | = simplify_gen_binary (AND, Pmode, size, |
1782 | gen_int_mode (-PROBE_INTERVAL, Pmode)); | |
d809253a EB |
1783 | rounded_size_op = force_operand (rounded_size, NULL_RTX); |
1784 | ||
1785 | ||
1786 | /* Step 2: compute initial and final value of the loop counter. */ | |
1787 | ||
1788 | /* SP = SP_0 + PROBE_INTERVAL. */ | |
1789 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
1790 | ||
1791 | /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */ | |
1792 | last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode, | |
1793 | stack_pointer_rtx, | |
1794 | rounded_size_op), NULL_RTX); | |
1795 | ||
1796 | ||
1797 | /* Step 3: the loop | |
1798 | ||
260c8ba3 EB |
1799 | while (SP != LAST_ADDR) |
1800 | { | |
1801 | SP = SP + PROBE_INTERVAL | |
1802 | probe at SP | |
1803 | } | |
d809253a | 1804 | |
260c8ba3 | 1805 | adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for |
d809253a EB |
1806 | values of N from 1 until it is equal to ROUNDED_SIZE. */ |
1807 | ||
1808 | emit_label (loop_lab); | |
1809 | ||
1810 | /* Jump to END_LAB if SP == LAST_ADDR. */ | |
1811 | emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX, | |
1812 | Pmode, 1, end_lab); | |
1813 | ||
1814 | /* SP = SP + PROBE_INTERVAL and probe at SP. */ | |
1815 | anti_adjust_stack (GEN_INT (PROBE_INTERVAL)); | |
1816 | emit_stack_probe (stack_pointer_rtx); | |
1817 | ||
1818 | emit_jump (loop_lab); | |
1819 | ||
1820 | emit_label (end_lab); | |
1821 | ||
1822 | ||
260c8ba3 | 1823 | /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot |
d809253a EB |
1824 | assert at compile-time that SIZE is equal to ROUNDED_SIZE. */ |
1825 | ||
1826 | /* TEMP = SIZE - ROUNDED_SIZE. */ | |
1827 | temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size); | |
1828 | if (temp != const0_rtx) | |
1829 | { | |
1830 | /* Manual CSE if the difference is not known at compile-time. */ | |
1831 | if (GET_CODE (temp) != CONST_INT) | |
1832 | temp = gen_rtx_MINUS (Pmode, size, rounded_size_op); | |
1833 | anti_adjust_stack (temp); | |
1834 | emit_stack_probe (stack_pointer_rtx); | |
1835 | } | |
1836 | } | |
1837 | ||
c35af30f EB |
1838 | /* Adjust back and account for the additional first interval. */ |
1839 | if (adjust_back) | |
0a81f074 | 1840 | adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope)); |
c35af30f EB |
1841 | else |
1842 | adjust_stack (GEN_INT (PROBE_INTERVAL + dope)); | |
d809253a EB |
1843 | } |
1844 | ||
18ca7dab RK |
1845 | /* Return an rtx representing the register or memory location |
1846 | in which a scalar value of data type VALTYPE | |
1847 | was returned by a function call to function FUNC. | |
1d636cc6 RG |
1848 | FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise |
1849 | function is known, otherwise 0. | |
4dc07bd7 JJ |
1850 | OUTGOING is 1 if on a machine with register windows this function |
1851 | should return the register in which the function will put its result | |
30f7a378 | 1852 | and 0 otherwise. */ |
18ca7dab RK |
1853 | |
1854 | rtx | |
586de218 | 1855 | hard_function_value (const_tree valtype, const_tree func, const_tree fntype, |
502b8322 | 1856 | int outgoing ATTRIBUTE_UNUSED) |
18ca7dab | 1857 | { |
4dc07bd7 | 1858 | rtx val; |
770ae6cc | 1859 | |
1d636cc6 | 1860 | val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing); |
770ae6cc | 1861 | |
f8cfc6aa | 1862 | if (REG_P (val) |
e1a4071f JL |
1863 | && GET_MODE (val) == BLKmode) |
1864 | { | |
770ae6cc | 1865 | unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype); |
ef4bddc2 | 1866 | machine_mode tmpmode; |
770ae6cc | 1867 | |
d9b3eb63 | 1868 | /* int_size_in_bytes can return -1. We don't need a check here |
535a42b1 NS |
1869 | since the value of bytes will then be large enough that no |
1870 | mode will match anyway. */ | |
d9b3eb63 | 1871 | |
e1a4071f | 1872 | for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
0fb7aeda KH |
1873 | tmpmode != VOIDmode; |
1874 | tmpmode = GET_MODE_WIDER_MODE (tmpmode)) | |
1875 | { | |
1876 | /* Have we found a large enough mode? */ | |
1877 | if (GET_MODE_SIZE (tmpmode) >= bytes) | |
1878 | break; | |
1879 | } | |
e1a4071f JL |
1880 | |
1881 | /* No suitable mode found. */ | |
5b0264cb | 1882 | gcc_assert (tmpmode != VOIDmode); |
e1a4071f JL |
1883 | |
1884 | PUT_MODE (val, tmpmode); | |
d9b3eb63 | 1885 | } |
e1a4071f | 1886 | return val; |
18ca7dab RK |
1887 | } |
1888 | ||
1889 | /* Return an rtx representing the register or memory location | |
1890 | in which a scalar value of mode MODE was returned by a library call. */ | |
1891 | ||
1892 | rtx | |
ef4bddc2 | 1893 | hard_libcall_value (machine_mode mode, rtx fun) |
18ca7dab | 1894 | { |
390b17c2 | 1895 | return targetm.calls.libcall_value (mode, fun); |
18ca7dab | 1896 | } |
0c5e217d RS |
1897 | |
1898 | /* Look up the tree code for a given rtx code | |
1899 | to provide the arithmetic operation for REAL_ARITHMETIC. | |
1900 | The function returns an int because the caller may not know | |
1901 | what `enum tree_code' means. */ | |
1902 | ||
1903 | int | |
502b8322 | 1904 | rtx_to_tree_code (enum rtx_code code) |
0c5e217d RS |
1905 | { |
1906 | enum tree_code tcode; | |
1907 | ||
1908 | switch (code) | |
1909 | { | |
1910 | case PLUS: | |
1911 | tcode = PLUS_EXPR; | |
1912 | break; | |
1913 | case MINUS: | |
1914 | tcode = MINUS_EXPR; | |
1915 | break; | |
1916 | case MULT: | |
1917 | tcode = MULT_EXPR; | |
1918 | break; | |
1919 | case DIV: | |
1920 | tcode = RDIV_EXPR; | |
1921 | break; | |
1922 | case SMIN: | |
1923 | tcode = MIN_EXPR; | |
1924 | break; | |
1925 | case SMAX: | |
1926 | tcode = MAX_EXPR; | |
1927 | break; | |
1928 | default: | |
1929 | tcode = LAST_AND_UNUSED_TREE_CODE; | |
1930 | break; | |
1931 | } | |
1932 | return ((int) tcode); | |
1933 | } | |
e2500fed GK |
1934 | |
1935 | #include "gt-explow.h" |