]>
Commit | Line | Data |
---|---|---|
bccafa26 | 1 | /* Emit RTL for the GCC expander. |
3aea1f79 | 2 | Copyright (C) 1987-2014 Free Software Foundation, Inc. |
15bbde2b | 3 | |
f12b58b3 | 4 | This file is part of GCC. |
15bbde2b | 5 | |
f12b58b3 | 6 | GCC is free software; you can redistribute it and/or modify it under |
7 | the terms of the GNU General Public License as published by the Free | |
8c4c00c1 | 8 | Software Foundation; either version 3, or (at your option) any later |
f12b58b3 | 9 | version. |
15bbde2b | 10 | |
f12b58b3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
12 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15bbde2b | 15 | |
16 | You should have received a copy of the GNU General Public License | |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
15bbde2b | 19 | |
20 | ||
21 | /* Middle-to-low level generation of rtx code and insns. | |
22 | ||
74efa612 | 23 | This file contains support functions for creating rtl expressions |
24 | and manipulating them in the doubly-linked chain of insns. | |
15bbde2b | 25 | |
26 | The patterns of the insns are created by machine-dependent | |
27 | routines in insn-emit.c, which is generated automatically from | |
74efa612 | 28 | the machine description. These routines make the individual rtx's |
29 | of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch], | |
30 | which are automatically generated from rtl.def; what is machine | |
8fd5918e | 31 | dependent is the kind of rtx's they make and what arguments they |
32 | use. */ | |
15bbde2b | 33 | |
34 | #include "config.h" | |
405711de | 35 | #include "system.h" |
805e22b2 | 36 | #include "coretypes.h" |
37 | #include "tm.h" | |
0b205f4c | 38 | #include "diagnostic-core.h" |
15bbde2b | 39 | #include "rtl.h" |
3fd7e17f | 40 | #include "tree.h" |
9ed99284 | 41 | #include "varasm.h" |
bc61cadb | 42 | #include "basic-block.h" |
43 | #include "tree-eh.h" | |
7953c610 | 44 | #include "tm_p.h" |
15bbde2b | 45 | #include "flags.h" |
46 | #include "function.h" | |
9ed99284 | 47 | #include "stringpool.h" |
15bbde2b | 48 | #include "expr.h" |
49 | #include "regs.h" | |
c6b6c51f | 50 | #include "hard-reg-set.h" |
73f5c1e3 | 51 | #include "hashtab.h" |
15bbde2b | 52 | #include "insn-config.h" |
0dbd1c74 | 53 | #include "recog.h" |
a3426c4c | 54 | #include "bitmap.h" |
b29760a8 | 55 | #include "debug.h" |
b0278d39 | 56 | #include "langhooks.h" |
3072d30e | 57 | #include "df.h" |
9845d120 | 58 | #include "params.h" |
98155838 | 59 | #include "target.h" |
f7715905 | 60 | #include "builtins.h" |
649d8da6 | 61 | |
679bcc8d | 62 | struct target_rtl default_target_rtl; |
63 | #if SWITCHABLE_TARGET | |
64 | struct target_rtl *this_target_rtl = &default_target_rtl; | |
65 | #endif | |
66 | ||
67 | #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx) | |
68 | ||
399d45d3 | 69 | /* Commonly used modes. */ |
70 | ||
a92771b8 | 71 | enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */ |
72 | enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */ | |
9e042f31 | 73 | enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */ |
a92771b8 | 74 | enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */ |
399d45d3 | 75 | |
b079a207 | 76 | /* Datastructures maintained for currently processed function in RTL form. */ |
77 | ||
fd6ffb7c | 78 | struct rtl_data x_rtl; |
b079a207 | 79 | |
80 | /* Indexed by pseudo register number, gives the rtx for that pseudo. | |
48e1416a | 81 | Allocated in parallel with regno_pointer_align. |
b079a207 | 82 | FIXME: We could put it into emit_status struct, but gengtype is not able to deal |
83 | with length attribute nested in top level structures. */ | |
84 | ||
85 | rtx * regno_reg_rtx; | |
15bbde2b | 86 | |
87 | /* This is *not* reset after each function. It gives each CODE_LABEL | |
88 | in the entire compilation a unique label number. */ | |
89 | ||
9105005a | 90 | static GTY(()) int label_num = 1; |
15bbde2b | 91 | |
15bbde2b | 92 | /* We record floating-point CONST_DOUBLEs in each floating-point mode for |
93 | the values of 0, 1, and 2. For the integer entries and VOIDmode, we | |
ba8dfb08 | 94 | record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX |
95 | is set only for MODE_INT and MODE_VECTOR_INT modes. */ | |
15bbde2b | 96 | |
ba8dfb08 | 97 | rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE]; |
15bbde2b | 98 | |
1a60f06a | 99 | rtx const_true_rtx; |
100 | ||
15bbde2b | 101 | REAL_VALUE_TYPE dconst0; |
102 | REAL_VALUE_TYPE dconst1; | |
103 | REAL_VALUE_TYPE dconst2; | |
104 | REAL_VALUE_TYPE dconstm1; | |
77e89269 | 105 | REAL_VALUE_TYPE dconsthalf; |
15bbde2b | 106 | |
06f0b99c | 107 | /* Record fixed-point constant 0 and 1. */ |
108 | FIXED_VALUE_TYPE fconst0[MAX_FCONST0]; | |
109 | FIXED_VALUE_TYPE fconst1[MAX_FCONST1]; | |
110 | ||
15bbde2b | 111 | /* We make one copy of (const_int C) where C is in |
112 | [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT] | |
113 | to save space during the compilation and simplify comparisons of | |
114 | integers. */ | |
115 | ||
57c097d5 | 116 | rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1]; |
15bbde2b | 117 | |
7d7b0bac | 118 | /* Standard pieces of rtx, to be substituted directly into things. */ |
119 | rtx pc_rtx; | |
120 | rtx ret_rtx; | |
121 | rtx simple_return_rtx; | |
122 | rtx cc0_rtx; | |
123 | ||
73f5c1e3 | 124 | /* A hash table storing CONST_INTs whose absolute value is greater |
125 | than MAX_SAVED_CONST_INT. */ | |
126 | ||
1f3233d1 | 127 | static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) |
128 | htab_t const_int_htab; | |
73f5c1e3 | 129 | |
e913b5cd | 130 | static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) |
131 | htab_t const_wide_int_htab; | |
132 | ||
ca74b940 | 133 | /* A hash table storing register attribute structures. */ |
134 | static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs))) | |
135 | htab_t reg_attrs_htab; | |
136 | ||
2ff23ed0 | 137 | /* A hash table storing all CONST_DOUBLEs. */ |
1f3233d1 | 138 | static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) |
139 | htab_t const_double_htab; | |
2ff23ed0 | 140 | |
e397ad8e | 141 | /* A hash table storing all CONST_FIXEDs. */ |
142 | static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def))) | |
143 | htab_t const_fixed_htab; | |
144 | ||
fd6ffb7c | 145 | #define cur_insn_uid (crtl->emit.x_cur_insn_uid) |
9845d120 | 146 | #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid) |
fd6ffb7c | 147 | #define first_label_num (crtl->emit.x_first_label_num) |
15bbde2b | 148 | |
265be050 | 149 | static void set_used_decls (tree); |
35cb5232 | 150 | static void mark_label_nuses (rtx); |
151 | static hashval_t const_int_htab_hash (const void *); | |
152 | static int const_int_htab_eq (const void *, const void *); | |
e913b5cd | 153 | #if TARGET_SUPPORTS_WIDE_INT |
154 | static hashval_t const_wide_int_htab_hash (const void *); | |
155 | static int const_wide_int_htab_eq (const void *, const void *); | |
156 | static rtx lookup_const_wide_int (rtx); | |
157 | #endif | |
35cb5232 | 158 | static hashval_t const_double_htab_hash (const void *); |
159 | static int const_double_htab_eq (const void *, const void *); | |
160 | static rtx lookup_const_double (rtx); | |
e397ad8e | 161 | static hashval_t const_fixed_htab_hash (const void *); |
162 | static int const_fixed_htab_eq (const void *, const void *); | |
163 | static rtx lookup_const_fixed (rtx); | |
35cb5232 | 164 | static hashval_t reg_attrs_htab_hash (const void *); |
165 | static int reg_attrs_htab_eq (const void *, const void *); | |
166 | static reg_attrs *get_reg_attrs (tree, int); | |
6e68dcb2 | 167 | static rtx gen_const_vector (enum machine_mode, int); |
0e0727c4 | 168 | static void copy_rtx_if_shared_1 (rtx *orig); |
73f5c1e3 | 169 | |
3cd757b1 | 170 | /* Probability of the conditional branch currently proceeded by try_split. |
171 | Set to -1 otherwise. */ | |
172 | int split_branch_probability = -1; | |
649d8da6 | 173 | \f |
73f5c1e3 | 174 | /* Returns a hash code for X (which is a really a CONST_INT). */ |
175 | ||
176 | static hashval_t | |
35cb5232 | 177 | const_int_htab_hash (const void *x) |
73f5c1e3 | 178 | { |
dd9b9fc5 | 179 | return (hashval_t) INTVAL ((const_rtx) x); |
73f5c1e3 | 180 | } |
181 | ||
6ef828f9 | 182 | /* Returns nonzero if the value represented by X (which is really a |
73f5c1e3 | 183 | CONST_INT) is the same as that given by Y (which is really a |
184 | HOST_WIDE_INT *). */ | |
185 | ||
186 | static int | |
35cb5232 | 187 | const_int_htab_eq (const void *x, const void *y) |
73f5c1e3 | 188 | { |
dd9b9fc5 | 189 | return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y)); |
2ff23ed0 | 190 | } |
191 | ||
e913b5cd | 192 | #if TARGET_SUPPORTS_WIDE_INT |
193 | /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */ | |
194 | ||
195 | static hashval_t | |
196 | const_wide_int_htab_hash (const void *x) | |
197 | { | |
198 | int i; | |
199 | HOST_WIDE_INT hash = 0; | |
200 | const_rtx xr = (const_rtx) x; | |
201 | ||
202 | for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++) | |
203 | hash += CONST_WIDE_INT_ELT (xr, i); | |
204 | ||
205 | return (hashval_t) hash; | |
206 | } | |
207 | ||
208 | /* Returns nonzero if the value represented by X (which is really a | |
209 | CONST_WIDE_INT) is the same as that given by Y (which is really a | |
210 | CONST_WIDE_INT). */ | |
211 | ||
212 | static int | |
213 | const_wide_int_htab_eq (const void *x, const void *y) | |
214 | { | |
215 | int i; | |
b3fba3cd | 216 | const_rtx xr = (const_rtx) x; |
217 | const_rtx yr = (const_rtx) y; | |
e913b5cd | 218 | if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr)) |
219 | return 0; | |
220 | ||
221 | for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++) | |
222 | if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i)) | |
223 | return 0; | |
ddb1be65 | 224 | |
e913b5cd | 225 | return 1; |
226 | } | |
227 | #endif | |
228 | ||
2ff23ed0 | 229 | /* Returns a hash code for X (which is really a CONST_DOUBLE). */ |
230 | static hashval_t | |
35cb5232 | 231 | const_double_htab_hash (const void *x) |
2ff23ed0 | 232 | { |
dd9b9fc5 | 233 | const_rtx const value = (const_rtx) x; |
3393215f | 234 | hashval_t h; |
2ff23ed0 | 235 | |
e913b5cd | 236 | if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode) |
3393215f | 237 | h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value); |
238 | else | |
a5760913 | 239 | { |
e2e205b3 | 240 | h = real_hash (CONST_DOUBLE_REAL_VALUE (value)); |
a5760913 | 241 | /* MODE is used in the comparison, so it should be in the hash. */ |
242 | h ^= GET_MODE (value); | |
243 | } | |
2ff23ed0 | 244 | return h; |
245 | } | |
246 | ||
6ef828f9 | 247 | /* Returns nonzero if the value represented by X (really a ...) |
2ff23ed0 | 248 | is the same as that represented by Y (really a ...) */ |
249 | static int | |
35cb5232 | 250 | const_double_htab_eq (const void *x, const void *y) |
2ff23ed0 | 251 | { |
dd9b9fc5 | 252 | const_rtx const a = (const_rtx)x, b = (const_rtx)y; |
2ff23ed0 | 253 | |
254 | if (GET_MODE (a) != GET_MODE (b)) | |
255 | return 0; | |
e913b5cd | 256 | if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode) |
f82a103d | 257 | return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b) |
258 | && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b)); | |
259 | else | |
260 | return real_identical (CONST_DOUBLE_REAL_VALUE (a), | |
261 | CONST_DOUBLE_REAL_VALUE (b)); | |
73f5c1e3 | 262 | } |
263 | ||
e397ad8e | 264 | /* Returns a hash code for X (which is really a CONST_FIXED). */ |
265 | ||
266 | static hashval_t | |
267 | const_fixed_htab_hash (const void *x) | |
268 | { | |
a9f1838b | 269 | const_rtx const value = (const_rtx) x; |
e397ad8e | 270 | hashval_t h; |
271 | ||
272 | h = fixed_hash (CONST_FIXED_VALUE (value)); | |
273 | /* MODE is used in the comparison, so it should be in the hash. */ | |
274 | h ^= GET_MODE (value); | |
275 | return h; | |
276 | } | |
277 | ||
278 | /* Returns nonzero if the value represented by X (really a ...) | |
279 | is the same as that represented by Y (really a ...). */ | |
280 | ||
281 | static int | |
282 | const_fixed_htab_eq (const void *x, const void *y) | |
283 | { | |
a9f1838b | 284 | const_rtx const a = (const_rtx) x, b = (const_rtx) y; |
e397ad8e | 285 | |
286 | if (GET_MODE (a) != GET_MODE (b)) | |
287 | return 0; | |
288 | return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b)); | |
289 | } | |
290 | ||
d72886b5 | 291 | /* Return true if the given memory attributes are equal. */ |
73f5c1e3 | 292 | |
7e304b71 | 293 | bool |
d72886b5 | 294 | mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q) |
73f5c1e3 | 295 | { |
7e304b71 | 296 | if (p == q) |
297 | return true; | |
298 | if (!p || !q) | |
299 | return false; | |
6d58bcba | 300 | return (p->alias == q->alias |
301 | && p->offset_known_p == q->offset_known_p | |
302 | && (!p->offset_known_p || p->offset == q->offset) | |
303 | && p->size_known_p == q->size_known_p | |
304 | && (!p->size_known_p || p->size == q->size) | |
305 | && p->align == q->align | |
bd1a81f7 | 306 | && p->addrspace == q->addrspace |
2f16183e | 307 | && (p->expr == q->expr |
308 | || (p->expr != NULL_TREE && q->expr != NULL_TREE | |
309 | && operand_equal_p (p->expr, q->expr, 0)))); | |
73f5c1e3 | 310 | } |
311 | ||
d72886b5 | 312 | /* Set MEM's memory attributes so that they are the same as ATTRS. */ |
5cc193e7 | 313 | |
d72886b5 | 314 | static void |
315 | set_mem_attrs (rtx mem, mem_attrs *attrs) | |
316 | { | |
d72886b5 | 317 | /* If everything is the default, we can just clear the attributes. */ |
318 | if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)])) | |
319 | { | |
320 | MEM_ATTRS (mem) = 0; | |
321 | return; | |
322 | } | |
c6259b83 | 323 | |
8dc3230c | 324 | if (!MEM_ATTRS (mem) |
325 | || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem))) | |
c6259b83 | 326 | { |
25a27413 | 327 | MEM_ATTRS (mem) = ggc_alloc<mem_attrs> (); |
8dc3230c | 328 | memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs)); |
c6259b83 | 329 | } |
73f5c1e3 | 330 | } |
331 | ||
ca74b940 | 332 | /* Returns a hash code for X (which is a really a reg_attrs *). */ |
333 | ||
334 | static hashval_t | |
35cb5232 | 335 | reg_attrs_htab_hash (const void *x) |
ca74b940 | 336 | { |
aae87fc3 | 337 | const reg_attrs *const p = (const reg_attrs *) x; |
ca74b940 | 338 | |
e19e0a33 | 339 | return ((p->offset * 1000) ^ (intptr_t) p->decl); |
ca74b940 | 340 | } |
341 | ||
7ef5b942 | 342 | /* Returns nonzero if the value represented by X (which is really a |
ca74b940 | 343 | reg_attrs *) is the same as that given by Y (which is also really a |
344 | reg_attrs *). */ | |
345 | ||
346 | static int | |
35cb5232 | 347 | reg_attrs_htab_eq (const void *x, const void *y) |
ca74b940 | 348 | { |
aae87fc3 | 349 | const reg_attrs *const p = (const reg_attrs *) x; |
350 | const reg_attrs *const q = (const reg_attrs *) y; | |
ca74b940 | 351 | |
352 | return (p->decl == q->decl && p->offset == q->offset); | |
353 | } | |
354 | /* Allocate a new reg_attrs structure and insert it into the hash table if | |
355 | one identical to it is not already in the table. We are doing this for | |
356 | MEM of mode MODE. */ | |
357 | ||
358 | static reg_attrs * | |
35cb5232 | 359 | get_reg_attrs (tree decl, int offset) |
ca74b940 | 360 | { |
361 | reg_attrs attrs; | |
362 | void **slot; | |
363 | ||
364 | /* If everything is the default, we can just return zero. */ | |
365 | if (decl == 0 && offset == 0) | |
366 | return 0; | |
367 | ||
368 | attrs.decl = decl; | |
369 | attrs.offset = offset; | |
370 | ||
371 | slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT); | |
372 | if (*slot == 0) | |
373 | { | |
25a27413 | 374 | *slot = ggc_alloc<reg_attrs> (); |
ca74b940 | 375 | memcpy (*slot, &attrs, sizeof (reg_attrs)); |
376 | } | |
377 | ||
2457c754 | 378 | return (reg_attrs *) *slot; |
ca74b940 | 379 | } |
380 | ||
3072d30e | 381 | |
382 | #if !HAVE_blockage | |
e12b44a3 | 383 | /* Generate an empty ASM_INPUT, which is used to block attempts to schedule, |
384 | and to block register equivalences to be seen across this insn. */ | |
3072d30e | 385 | |
386 | rtx | |
387 | gen_blockage (void) | |
388 | { | |
389 | rtx x = gen_rtx_ASM_INPUT (VOIDmode, ""); | |
390 | MEM_VOLATILE_P (x) = true; | |
391 | return x; | |
392 | } | |
393 | #endif | |
394 | ||
395 | ||
22cf44bc | 396 | /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and |
397 | don't attempt to share with the various global pieces of rtl (such as | |
398 | frame_pointer_rtx). */ | |
399 | ||
400 | rtx | |
35cb5232 | 401 | gen_raw_REG (enum machine_mode mode, int regno) |
22cf44bc | 402 | { |
403 | rtx x = gen_rtx_raw_REG (mode, regno); | |
404 | ORIGINAL_REGNO (x) = regno; | |
405 | return x; | |
406 | } | |
407 | ||
7014838c | 408 | /* There are some RTL codes that require special attention; the generation |
409 | functions do the raw handling. If you add to this list, modify | |
410 | special_rtx in gengenrtl.c as well. */ | |
411 | ||
3ad7bb1c | 412 | rtx |
35cb5232 | 413 | gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg) |
3ad7bb1c | 414 | { |
73f5c1e3 | 415 | void **slot; |
416 | ||
3ad7bb1c | 417 | if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT) |
57c097d5 | 418 | return const_int_rtx[arg + MAX_SAVED_CONST_INT]; |
3ad7bb1c | 419 | |
420 | #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1 | |
421 | if (const_true_rtx && arg == STORE_FLAG_VALUE) | |
422 | return const_true_rtx; | |
423 | #endif | |
424 | ||
73f5c1e3 | 425 | /* Look up the CONST_INT in the hash table. */ |
2b3dbc20 | 426 | slot = htab_find_slot_with_hash (const_int_htab, &arg, |
427 | (hashval_t) arg, INSERT); | |
7f2875d3 | 428 | if (*slot == 0) |
d7c47c0e | 429 | *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg); |
73f5c1e3 | 430 | |
431 | return (rtx) *slot; | |
3ad7bb1c | 432 | } |
433 | ||
2d232d05 | 434 | rtx |
35cb5232 | 435 | gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode) |
2d232d05 | 436 | { |
437 | return GEN_INT (trunc_int_for_mode (c, mode)); | |
438 | } | |
439 | ||
2ff23ed0 | 440 | /* CONST_DOUBLEs might be created from pairs of integers, or from |
441 | REAL_VALUE_TYPEs. Also, their length is known only at run time, | |
442 | so we cannot use gen_rtx_raw_CONST_DOUBLE. */ | |
443 | ||
444 | /* Determine whether REAL, a CONST_DOUBLE, already exists in the | |
445 | hash table. If so, return its counterpart; otherwise add it | |
446 | to the hash table and return it. */ | |
447 | static rtx | |
35cb5232 | 448 | lookup_const_double (rtx real) |
2ff23ed0 | 449 | { |
450 | void **slot = htab_find_slot (const_double_htab, real, INSERT); | |
451 | if (*slot == 0) | |
452 | *slot = real; | |
453 | ||
454 | return (rtx) *slot; | |
455 | } | |
7f2875d3 | 456 | |
2ff23ed0 | 457 | /* Return a CONST_DOUBLE rtx for a floating-point value specified by |
458 | VALUE in mode MODE. */ | |
67f2a2eb | 459 | rtx |
35cb5232 | 460 | const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode) |
67f2a2eb | 461 | { |
2ff23ed0 | 462 | rtx real = rtx_alloc (CONST_DOUBLE); |
463 | PUT_MODE (real, mode); | |
464 | ||
e8aaae4e | 465 | real->u.rv = value; |
2ff23ed0 | 466 | |
467 | return lookup_const_double (real); | |
468 | } | |
469 | ||
e397ad8e | 470 | /* Determine whether FIXED, a CONST_FIXED, already exists in the |
471 | hash table. If so, return its counterpart; otherwise add it | |
472 | to the hash table and return it. */ | |
473 | ||
474 | static rtx | |
475 | lookup_const_fixed (rtx fixed) | |
476 | { | |
477 | void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT); | |
478 | if (*slot == 0) | |
479 | *slot = fixed; | |
480 | ||
481 | return (rtx) *slot; | |
482 | } | |
483 | ||
484 | /* Return a CONST_FIXED rtx for a fixed-point value specified by | |
485 | VALUE in mode MODE. */ | |
486 | ||
487 | rtx | |
488 | const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode) | |
489 | { | |
490 | rtx fixed = rtx_alloc (CONST_FIXED); | |
491 | PUT_MODE (fixed, mode); | |
492 | ||
493 | fixed->u.fv = value; | |
494 | ||
495 | return lookup_const_fixed (fixed); | |
496 | } | |
497 | ||
e913b5cd | 498 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
33274180 | 499 | /* Constructs double_int from rtx CST. */ |
500 | ||
501 | double_int | |
502 | rtx_to_double_int (const_rtx cst) | |
503 | { | |
504 | double_int r; | |
505 | ||
506 | if (CONST_INT_P (cst)) | |
cf8f0e63 | 507 | r = double_int::from_shwi (INTVAL (cst)); |
78f1962f | 508 | else if (CONST_DOUBLE_AS_INT_P (cst)) |
33274180 | 509 | { |
510 | r.low = CONST_DOUBLE_LOW (cst); | |
511 | r.high = CONST_DOUBLE_HIGH (cst); | |
512 | } | |
513 | else | |
514 | gcc_unreachable (); | |
515 | ||
516 | return r; | |
517 | } | |
e913b5cd | 518 | #endif |
519 | ||
520 | #if TARGET_SUPPORTS_WIDE_INT | |
a342dbb2 | 521 | /* Determine whether CONST_WIDE_INT WINT already exists in the hash table. |
522 | If so, return its counterpart; otherwise add it to the hash table and | |
e913b5cd | 523 | return it. */ |
33274180 | 524 | |
e913b5cd | 525 | static rtx |
526 | lookup_const_wide_int (rtx wint) | |
527 | { | |
528 | void **slot = htab_find_slot (const_wide_int_htab, wint, INSERT); | |
529 | if (*slot == 0) | |
530 | *slot = wint; | |
33274180 | 531 | |
e913b5cd | 532 | return (rtx) *slot; |
533 | } | |
534 | #endif | |
3e052aec | 535 | |
a342dbb2 | 536 | /* Return an rtx constant for V, given that the constant has mode MODE. |
537 | The returned rtx will be a CONST_INT if V fits, otherwise it will be | |
538 | a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT | |
539 | (if TARGET_SUPPORTS_WIDE_INT). */ | |
540 | ||
3e052aec | 541 | rtx |
28e557ef | 542 | immed_wide_int_const (const wide_int_ref &v, enum machine_mode mode) |
3e052aec | 543 | { |
e913b5cd | 544 | unsigned int len = v.get_len (); |
545 | unsigned int prec = GET_MODE_PRECISION (mode); | |
546 | ||
547 | /* Allow truncation but not extension since we do not know if the | |
548 | number is signed or unsigned. */ | |
549 | gcc_assert (prec <= v.get_precision ()); | |
550 | ||
551 | if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT) | |
552 | return gen_int_mode (v.elt (0), mode); | |
553 | ||
554 | #if TARGET_SUPPORTS_WIDE_INT | |
555 | { | |
556 | unsigned int i; | |
557 | rtx value; | |
ddb1be65 | 558 | unsigned int blocks_needed |
e913b5cd | 559 | = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT; |
560 | ||
561 | if (len > blocks_needed) | |
562 | len = blocks_needed; | |
563 | ||
564 | value = const_wide_int_alloc (len); | |
565 | ||
566 | /* It is so tempting to just put the mode in here. Must control | |
567 | myself ... */ | |
568 | PUT_MODE (value, VOIDmode); | |
05c25ee6 | 569 | CWI_PUT_NUM_ELEM (value, len); |
e913b5cd | 570 | |
571 | for (i = 0; i < len; i++) | |
05363b4a | 572 | CONST_WIDE_INT_ELT (value, i) = v.elt (i); |
e913b5cd | 573 | |
574 | return lookup_const_wide_int (value); | |
575 | } | |
576 | #else | |
05363b4a | 577 | return immed_double_const (v.elt (0), v.elt (1), mode); |
e913b5cd | 578 | #endif |
3e052aec | 579 | } |
580 | ||
e913b5cd | 581 | #if TARGET_SUPPORTS_WIDE_INT == 0 |
2ff23ed0 | 582 | /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair |
583 | of ints: I0 is the low-order word and I1 is the high-order word. | |
24cd46a7 | 584 | For values that are larger than HOST_BITS_PER_DOUBLE_INT, the |
db20fb47 | 585 | implied upper bits are copies of the high bit of i1. The value |
586 | itself is neither signed nor unsigned. Do not use this routine for | |
587 | non-integer modes; convert to REAL_VALUE_TYPE and use | |
588 | CONST_DOUBLE_FROM_REAL_VALUE. */ | |
2ff23ed0 | 589 | |
590 | rtx | |
35cb5232 | 591 | immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode) |
2ff23ed0 | 592 | { |
593 | rtx value; | |
594 | unsigned int i; | |
595 | ||
b1ca4af4 | 596 | /* There are the following cases (note that there are no modes with |
24cd46a7 | 597 | HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT): |
b1ca4af4 | 598 | |
599 | 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use | |
600 | gen_int_mode. | |
db20fb47 | 601 | 2) If the value of the integer fits into HOST_WIDE_INT anyway |
602 | (i.e., i1 consists only from copies of the sign bit, and sign | |
603 | of i0 and i1 are the same), then we return a CONST_INT for i0. | |
b1ca4af4 | 604 | 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */ |
2ff23ed0 | 605 | if (mode != VOIDmode) |
606 | { | |
611234b4 | 607 | gcc_assert (GET_MODE_CLASS (mode) == MODE_INT |
608 | || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT | |
609 | /* We can get a 0 for an error mark. */ | |
610 | || GET_MODE_CLASS (mode) == MODE_VECTOR_INT | |
611 | || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT); | |
2ff23ed0 | 612 | |
b1ca4af4 | 613 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) |
614 | return gen_int_mode (i0, mode); | |
2ff23ed0 | 615 | } |
616 | ||
617 | /* If this integer fits in one word, return a CONST_INT. */ | |
618 | if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0)) | |
619 | return GEN_INT (i0); | |
620 | ||
621 | /* We use VOIDmode for integers. */ | |
622 | value = rtx_alloc (CONST_DOUBLE); | |
623 | PUT_MODE (value, VOIDmode); | |
624 | ||
625 | CONST_DOUBLE_LOW (value) = i0; | |
626 | CONST_DOUBLE_HIGH (value) = i1; | |
627 | ||
628 | for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++) | |
629 | XWINT (value, i) = 0; | |
630 | ||
631 | return lookup_const_double (value); | |
67f2a2eb | 632 | } |
e913b5cd | 633 | #endif |
67f2a2eb | 634 | |
3ad7bb1c | 635 | rtx |
35cb5232 | 636 | gen_rtx_REG (enum machine_mode mode, unsigned int regno) |
3ad7bb1c | 637 | { |
638 | /* In case the MD file explicitly references the frame pointer, have | |
639 | all such references point to the same frame pointer. This is | |
640 | used during frame pointer elimination to distinguish the explicit | |
641 | references to these registers from pseudos that happened to be | |
642 | assigned to them. | |
643 | ||
644 | If we have eliminated the frame pointer or arg pointer, we will | |
645 | be using it as a normal register, for example as a spill | |
646 | register. In such cases, we might be accessing it in a mode that | |
647 | is not Pmode and therefore cannot use the pre-allocated rtx. | |
648 | ||
649 | Also don't do this when we are making new REGs in reload, since | |
650 | we don't want to get confused with the real pointers. */ | |
651 | ||
c6a6cdaa | 652 | if (mode == Pmode && !reload_in_progress && !lra_in_progress) |
3ad7bb1c | 653 | { |
71801afc | 654 | if (regno == FRAME_POINTER_REGNUM |
655 | && (!reload_completed || frame_pointer_needed)) | |
3ad7bb1c | 656 | return frame_pointer_rtx; |
5ae82d58 | 657 | #if !HARD_FRAME_POINTER_IS_FRAME_POINTER |
71801afc | 658 | if (regno == HARD_FRAME_POINTER_REGNUM |
659 | && (!reload_completed || frame_pointer_needed)) | |
3ad7bb1c | 660 | return hard_frame_pointer_rtx; |
661 | #endif | |
5ae82d58 | 662 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER |
e8b59353 | 663 | if (regno == ARG_POINTER_REGNUM) |
3ad7bb1c | 664 | return arg_pointer_rtx; |
665 | #endif | |
666 | #ifdef RETURN_ADDRESS_POINTER_REGNUM | |
e8b59353 | 667 | if (regno == RETURN_ADDRESS_POINTER_REGNUM) |
3ad7bb1c | 668 | return return_address_pointer_rtx; |
669 | #endif | |
3473aefe | 670 | if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM |
8d43ad05 | 671 | && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM |
6ea47475 | 672 | && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) |
d4c5e26d | 673 | return pic_offset_table_rtx; |
e8b59353 | 674 | if (regno == STACK_POINTER_REGNUM) |
3ad7bb1c | 675 | return stack_pointer_rtx; |
676 | } | |
677 | ||
32b53d83 | 678 | #if 0 |
90295bd2 | 679 | /* If the per-function register table has been set up, try to re-use |
32b53d83 | 680 | an existing entry in that table to avoid useless generation of RTL. |
681 | ||
682 | This code is disabled for now until we can fix the various backends | |
683 | which depend on having non-shared hard registers in some cases. Long | |
684 | term we want to re-enable this code as it can significantly cut down | |
71801afc | 685 | on the amount of useless RTL that gets generated. |
686 | ||
687 | We'll also need to fix some code that runs after reload that wants to | |
688 | set ORIGINAL_REGNO. */ | |
689 | ||
90295bd2 | 690 | if (cfun |
691 | && cfun->emit | |
692 | && regno_reg_rtx | |
693 | && regno < FIRST_PSEUDO_REGISTER | |
694 | && reg_raw_mode[regno] == mode) | |
695 | return regno_reg_rtx[regno]; | |
32b53d83 | 696 | #endif |
90295bd2 | 697 | |
22cf44bc | 698 | return gen_raw_REG (mode, regno); |
3ad7bb1c | 699 | } |
700 | ||
b5ba9f3a | 701 | rtx |
35cb5232 | 702 | gen_rtx_MEM (enum machine_mode mode, rtx addr) |
b5ba9f3a | 703 | { |
704 | rtx rt = gen_rtx_raw_MEM (mode, addr); | |
705 | ||
706 | /* This field is not cleared by the mere allocation of the rtx, so | |
707 | we clear it here. */ | |
c6259b83 | 708 | MEM_ATTRS (rt) = 0; |
b5ba9f3a | 709 | |
710 | return rt; | |
711 | } | |
701e46d0 | 712 | |
e265a6da | 713 | /* Generate a memory referring to non-trapping constant memory. */ |
714 | ||
715 | rtx | |
716 | gen_const_mem (enum machine_mode mode, rtx addr) | |
717 | { | |
718 | rtx mem = gen_rtx_MEM (mode, addr); | |
719 | MEM_READONLY_P (mem) = 1; | |
720 | MEM_NOTRAP_P (mem) = 1; | |
721 | return mem; | |
722 | } | |
723 | ||
00060fc2 | 724 | /* Generate a MEM referring to fixed portions of the frame, e.g., register |
725 | save areas. */ | |
726 | ||
727 | rtx | |
728 | gen_frame_mem (enum machine_mode mode, rtx addr) | |
729 | { | |
730 | rtx mem = gen_rtx_MEM (mode, addr); | |
731 | MEM_NOTRAP_P (mem) = 1; | |
732 | set_mem_alias_set (mem, get_frame_alias_set ()); | |
733 | return mem; | |
734 | } | |
735 | ||
736 | /* Generate a MEM referring to a temporary use of the stack, not part | |
737 | of the fixed stack frame. For example, something which is pushed | |
738 | by a target splitter. */ | |
739 | rtx | |
740 | gen_tmp_stack_mem (enum machine_mode mode, rtx addr) | |
741 | { | |
742 | rtx mem = gen_rtx_MEM (mode, addr); | |
743 | MEM_NOTRAP_P (mem) = 1; | |
18d50ae6 | 744 | if (!cfun->calls_alloca) |
00060fc2 | 745 | set_mem_alias_set (mem, get_frame_alias_set ()); |
746 | return mem; | |
747 | } | |
748 | ||
2166bbaa | 749 | /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if |
750 | this construct would be valid, and false otherwise. */ | |
751 | ||
752 | bool | |
753 | validate_subreg (enum machine_mode omode, enum machine_mode imode, | |
7ecb5bb2 | 754 | const_rtx reg, unsigned int offset) |
701e46d0 | 755 | { |
2166bbaa | 756 | unsigned int isize = GET_MODE_SIZE (imode); |
757 | unsigned int osize = GET_MODE_SIZE (omode); | |
758 | ||
759 | /* All subregs must be aligned. */ | |
760 | if (offset % osize != 0) | |
761 | return false; | |
762 | ||
763 | /* The subreg offset cannot be outside the inner object. */ | |
764 | if (offset >= isize) | |
765 | return false; | |
766 | ||
767 | /* ??? This should not be here. Temporarily continue to allow word_mode | |
768 | subregs of anything. The most common offender is (subreg:SI (reg:DF)). | |
769 | Generally, backends are doing something sketchy but it'll take time to | |
770 | fix them all. */ | |
771 | if (omode == word_mode) | |
772 | ; | |
773 | /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field | |
774 | is the culprit here, and not the backends. */ | |
775 | else if (osize >= UNITS_PER_WORD && isize >= osize) | |
776 | ; | |
777 | /* Allow component subregs of complex and vector. Though given the below | |
778 | extraction rules, it's not always clear what that means. */ | |
779 | else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) | |
780 | && GET_MODE_INNER (imode) == omode) | |
781 | ; | |
782 | /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs, | |
783 | i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to | |
784 | represent this. It's questionable if this ought to be represented at | |
785 | all -- why can't this all be hidden in post-reload splitters that make | |
786 | arbitrarily mode changes to the registers themselves. */ | |
787 | else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode) | |
788 | ; | |
789 | /* Subregs involving floating point modes are not allowed to | |
790 | change size. Therefore (subreg:DI (reg:DF) 0) is fine, but | |
791 | (subreg:SI (reg:DF) 0) isn't. */ | |
792 | else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)) | |
793 | { | |
c6a6cdaa | 794 | if (! (isize == osize |
795 | /* LRA can use subreg to store a floating point value in | |
796 | an integer mode. Although the floating point and the | |
797 | integer modes need the same number of hard registers, | |
798 | the size of floating point mode can be less than the | |
799 | integer mode. LRA also uses subregs for a register | |
800 | should be used in different mode in on insn. */ | |
801 | || lra_in_progress)) | |
2166bbaa | 802 | return false; |
803 | } | |
701e46d0 | 804 | |
2166bbaa | 805 | /* Paradoxical subregs must have offset zero. */ |
806 | if (osize > isize) | |
807 | return offset == 0; | |
808 | ||
809 | /* This is a normal subreg. Verify that the offset is representable. */ | |
810 | ||
811 | /* For hard registers, we already have most of these rules collected in | |
812 | subreg_offset_representable_p. */ | |
813 | if (reg && REG_P (reg) && HARD_REGISTER_P (reg)) | |
814 | { | |
815 | unsigned int regno = REGNO (reg); | |
816 | ||
817 | #ifdef CANNOT_CHANGE_MODE_CLASS | |
818 | if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) | |
819 | && GET_MODE_INNER (imode) == omode) | |
820 | ; | |
821 | else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode)) | |
822 | return false; | |
701e46d0 | 823 | #endif |
2166bbaa | 824 | |
825 | return subreg_offset_representable_p (regno, imode, offset, omode); | |
826 | } | |
827 | ||
828 | /* For pseudo registers, we want most of the same checks. Namely: | |
829 | If the register no larger than a word, the subreg must be lowpart. | |
830 | If the register is larger than a word, the subreg must be the lowpart | |
831 | of a subword. A subreg does *not* perform arbitrary bit extraction. | |
832 | Given that we've already checked mode/offset alignment, we only have | |
833 | to check subword subregs here. */ | |
c6a6cdaa | 834 | if (osize < UNITS_PER_WORD |
835 | && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)))) | |
2166bbaa | 836 | { |
837 | enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode; | |
838 | unsigned int low_off = subreg_lowpart_offset (omode, wmode); | |
839 | if (offset % UNITS_PER_WORD != low_off) | |
840 | return false; | |
841 | } | |
842 | return true; | |
843 | } | |
844 | ||
845 | rtx | |
846 | gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset) | |
847 | { | |
848 | gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset)); | |
2ff23ed0 | 849 | return gen_rtx_raw_SUBREG (mode, reg, offset); |
701e46d0 | 850 | } |
851 | ||
c6259b83 | 852 | /* Generate a SUBREG representing the least-significant part of REG if MODE |
853 | is smaller than mode of REG, otherwise paradoxical SUBREG. */ | |
854 | ||
701e46d0 | 855 | rtx |
35cb5232 | 856 | gen_lowpart_SUBREG (enum machine_mode mode, rtx reg) |
701e46d0 | 857 | { |
858 | enum machine_mode inmode; | |
701e46d0 | 859 | |
860 | inmode = GET_MODE (reg); | |
861 | if (inmode == VOIDmode) | |
862 | inmode = mode; | |
81802af6 | 863 | return gen_rtx_SUBREG (mode, reg, |
864 | subreg_lowpart_offset (mode, inmode)); | |
701e46d0 | 865 | } |
e1398578 | 866 | |
867 | rtx | |
868 | gen_rtx_VAR_LOCATION (enum machine_mode mode, tree decl, rtx loc, | |
869 | enum var_init_status status) | |
870 | { | |
871 | rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc); | |
872 | PAT_VAR_LOCATION_STATUS (x) = status; | |
873 | return x; | |
874 | } | |
7014838c | 875 | \f |
15bbde2b | 876 | |
cf9ac040 | 877 | /* Create an rtvec and stores within it the RTXen passed in the arguments. */ |
878 | ||
15bbde2b | 879 | rtvec |
ee582a61 | 880 | gen_rtvec (int n, ...) |
15bbde2b | 881 | { |
cf9ac040 | 882 | int i; |
883 | rtvec rt_val; | |
ee582a61 | 884 | va_list p; |
15bbde2b | 885 | |
ee582a61 | 886 | va_start (p, n); |
15bbde2b | 887 | |
cf9ac040 | 888 | /* Don't allocate an empty rtvec... */ |
15bbde2b | 889 | if (n == 0) |
451c8e2f | 890 | { |
891 | va_end (p); | |
892 | return NULL_RTVEC; | |
893 | } | |
15bbde2b | 894 | |
cf9ac040 | 895 | rt_val = rtvec_alloc (n); |
e5fcd76a | 896 | |
15bbde2b | 897 | for (i = 0; i < n; i++) |
cf9ac040 | 898 | rt_val->elem[i] = va_arg (p, rtx); |
7ad77798 | 899 | |
ee582a61 | 900 | va_end (p); |
cf9ac040 | 901 | return rt_val; |
15bbde2b | 902 | } |
903 | ||
904 | rtvec | |
35cb5232 | 905 | gen_rtvec_v (int n, rtx *argp) |
15bbde2b | 906 | { |
19cb6b50 | 907 | int i; |
908 | rtvec rt_val; | |
15bbde2b | 909 | |
cf9ac040 | 910 | /* Don't allocate an empty rtvec... */ |
15bbde2b | 911 | if (n == 0) |
cf9ac040 | 912 | return NULL_RTVEC; |
15bbde2b | 913 | |
cf9ac040 | 914 | rt_val = rtvec_alloc (n); |
15bbde2b | 915 | |
916 | for (i = 0; i < n; i++) | |
a4070a91 | 917 | rt_val->elem[i] = *argp++; |
15bbde2b | 918 | |
919 | return rt_val; | |
920 | } | |
921 | \f | |
80c70e76 | 922 | /* Return the number of bytes between the start of an OUTER_MODE |
923 | in-memory value and the start of an INNER_MODE in-memory value, | |
924 | given that the former is a lowpart of the latter. It may be a | |
925 | paradoxical lowpart, in which case the offset will be negative | |
926 | on big-endian targets. */ | |
927 | ||
928 | int | |
929 | byte_lowpart_offset (enum machine_mode outer_mode, | |
930 | enum machine_mode inner_mode) | |
931 | { | |
932 | if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode)) | |
933 | return subreg_lowpart_offset (outer_mode, inner_mode); | |
934 | else | |
935 | return -subreg_lowpart_offset (inner_mode, outer_mode); | |
936 | } | |
937 | \f | |
15bbde2b | 938 | /* Generate a REG rtx for a new pseudo register of mode MODE. |
939 | This pseudo is assigned the next sequential register number. */ | |
940 | ||
941 | rtx | |
35cb5232 | 942 | gen_reg_rtx (enum machine_mode mode) |
15bbde2b | 943 | { |
19cb6b50 | 944 | rtx val; |
27a7a23a | 945 | unsigned int align = GET_MODE_ALIGNMENT (mode); |
15bbde2b | 946 | |
1b7ff857 | 947 | gcc_assert (can_create_pseudo_p ()); |
15bbde2b | 948 | |
27a7a23a | 949 | /* If a virtual register with bigger mode alignment is generated, |
950 | increase stack alignment estimation because it might be spilled | |
951 | to stack later. */ | |
48e1416a | 952 | if (SUPPORTS_STACK_ALIGNMENT |
27a7a23a | 953 | && crtl->stack_alignment_estimated < align |
954 | && !crtl->stack_realign_processed) | |
8645d3e7 | 955 | { |
956 | unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align); | |
957 | if (crtl->stack_alignment_estimated < min_align) | |
958 | crtl->stack_alignment_estimated = min_align; | |
959 | } | |
27a7a23a | 960 | |
316bc009 | 961 | if (generating_concat_p |
962 | && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT | |
963 | || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)) | |
76c37538 | 964 | { |
965 | /* For complex modes, don't make a single pseudo. | |
966 | Instead, make a CONCAT of two pseudos. | |
967 | This allows noncontiguous allocation of the real and imaginary parts, | |
968 | which makes much better code. Besides, allocating DCmode | |
969 | pseudos overstrains reload on some machines like the 386. */ | |
970 | rtx realpart, imagpart; | |
e9e12845 | 971 | enum machine_mode partmode = GET_MODE_INNER (mode); |
76c37538 | 972 | |
973 | realpart = gen_reg_rtx (partmode); | |
974 | imagpart = gen_reg_rtx (partmode); | |
3ad7bb1c | 975 | return gen_rtx_CONCAT (mode, realpart, imagpart); |
76c37538 | 976 | } |
977 | ||
b4c6ce9b | 978 | /* Do not call gen_reg_rtx with uninitialized crtl. */ |
979 | gcc_assert (crtl->emit.regno_pointer_align_length); | |
980 | ||
ca74b940 | 981 | /* Make sure regno_pointer_align, and regno_reg_rtx are large |
fcdc122e | 982 | enough to have an element for this pseudo reg number. */ |
15bbde2b | 983 | |
fd6ffb7c | 984 | if (reg_rtx_no == crtl->emit.regno_pointer_align_length) |
15bbde2b | 985 | { |
fd6ffb7c | 986 | int old_size = crtl->emit.regno_pointer_align_length; |
9ce37fa7 | 987 | char *tmp; |
fcdc122e | 988 | rtx *new1; |
fcdc122e | 989 | |
9ce37fa7 | 990 | tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2); |
991 | memset (tmp + old_size, 0, old_size); | |
992 | crtl->emit.regno_pointer_align = (unsigned char *) tmp; | |
0a893c29 | 993 | |
2457c754 | 994 | new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2); |
0a893c29 | 995 | memset (new1 + old_size, 0, old_size * sizeof (rtx)); |
15bbde2b | 996 | regno_reg_rtx = new1; |
997 | ||
fd6ffb7c | 998 | crtl->emit.regno_pointer_align_length = old_size * 2; |
15bbde2b | 999 | } |
1000 | ||
22cf44bc | 1001 | val = gen_raw_REG (mode, reg_rtx_no); |
15bbde2b | 1002 | regno_reg_rtx[reg_rtx_no++] = val; |
1003 | return val; | |
1004 | } | |
1005 | ||
ea239197 | 1006 | /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */ |
1007 | ||
1008 | bool | |
1009 | reg_is_parm_p (rtx reg) | |
1010 | { | |
1011 | tree decl; | |
1012 | ||
1013 | gcc_assert (REG_P (reg)); | |
1014 | decl = REG_EXPR (reg); | |
1015 | return (decl && TREE_CODE (decl) == PARM_DECL); | |
1016 | } | |
1017 | ||
80c70e76 | 1018 | /* Update NEW with the same attributes as REG, but with OFFSET added |
1019 | to the REG_OFFSET. */ | |
ca74b940 | 1020 | |
1a6a0f2a | 1021 | static void |
9ce37fa7 | 1022 | update_reg_offset (rtx new_rtx, rtx reg, int offset) |
ca74b940 | 1023 | { |
9ce37fa7 | 1024 | REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg), |
35cb5232 | 1025 | REG_OFFSET (reg) + offset); |
1a6a0f2a | 1026 | } |
1027 | ||
80c70e76 | 1028 | /* Generate a register with same attributes as REG, but with OFFSET |
1029 | added to the REG_OFFSET. */ | |
1a6a0f2a | 1030 | |
1031 | rtx | |
1032 | gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno, | |
1033 | int offset) | |
1034 | { | |
9ce37fa7 | 1035 | rtx new_rtx = gen_rtx_REG (mode, regno); |
1a6a0f2a | 1036 | |
9ce37fa7 | 1037 | update_reg_offset (new_rtx, reg, offset); |
1038 | return new_rtx; | |
1a6a0f2a | 1039 | } |
1040 | ||
1041 | /* Generate a new pseudo-register with the same attributes as REG, but | |
80c70e76 | 1042 | with OFFSET added to the REG_OFFSET. */ |
1a6a0f2a | 1043 | |
1044 | rtx | |
1045 | gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset) | |
1046 | { | |
9ce37fa7 | 1047 | rtx new_rtx = gen_reg_rtx (mode); |
1a6a0f2a | 1048 | |
9ce37fa7 | 1049 | update_reg_offset (new_rtx, reg, offset); |
1050 | return new_rtx; | |
ca74b940 | 1051 | } |
1052 | ||
80c70e76 | 1053 | /* Adjust REG in-place so that it has mode MODE. It is assumed that the |
1054 | new register is a (possibly paradoxical) lowpart of the old one. */ | |
ca74b940 | 1055 | |
1056 | void | |
80c70e76 | 1057 | adjust_reg_mode (rtx reg, enum machine_mode mode) |
ca74b940 | 1058 | { |
80c70e76 | 1059 | update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg))); |
1060 | PUT_MODE (reg, mode); | |
1061 | } | |
1062 | ||
1063 | /* Copy REG's attributes from X, if X has any attributes. If REG and X | |
1064 | have different modes, REG is a (possibly paradoxical) lowpart of X. */ | |
1065 | ||
1066 | void | |
1067 | set_reg_attrs_from_value (rtx reg, rtx x) | |
1068 | { | |
1069 | int offset; | |
e623c80a | 1070 | bool can_be_reg_pointer = true; |
1071 | ||
1072 | /* Don't call mark_reg_pointer for incompatible pointer sign | |
1073 | extension. */ | |
1074 | while (GET_CODE (x) == SIGN_EXTEND | |
1075 | || GET_CODE (x) == ZERO_EXTEND | |
1076 | || GET_CODE (x) == TRUNCATE | |
1077 | || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x))) | |
1078 | { | |
1079 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) | |
1080 | if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED) | |
1081 | || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED)) | |
1082 | can_be_reg_pointer = false; | |
1083 | #endif | |
1084 | x = XEXP (x, 0); | |
1085 | } | |
80c70e76 | 1086 | |
ac56145e | 1087 | /* Hard registers can be reused for multiple purposes within the same |
1088 | function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN | |
1089 | on them is wrong. */ | |
1090 | if (HARD_REGISTER_P (reg)) | |
1091 | return; | |
1092 | ||
80c70e76 | 1093 | offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x)); |
ae12ddda | 1094 | if (MEM_P (x)) |
1095 | { | |
da443c27 | 1096 | if (MEM_OFFSET_KNOWN_P (x)) |
1097 | REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x), | |
1098 | MEM_OFFSET (x) + offset); | |
e623c80a | 1099 | if (can_be_reg_pointer && MEM_POINTER (x)) |
40b93dba | 1100 | mark_reg_pointer (reg, 0); |
ae12ddda | 1101 | } |
1102 | else if (REG_P (x)) | |
1103 | { | |
1104 | if (REG_ATTRS (x)) | |
1105 | update_reg_offset (reg, x, offset); | |
e623c80a | 1106 | if (can_be_reg_pointer && REG_POINTER (x)) |
ae12ddda | 1107 | mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x))); |
1108 | } | |
1109 | } | |
1110 | ||
1111 | /* Generate a REG rtx for a new pseudo register, copying the mode | |
1112 | and attributes from X. */ | |
1113 | ||
1114 | rtx | |
1115 | gen_reg_rtx_and_attrs (rtx x) | |
1116 | { | |
1117 | rtx reg = gen_reg_rtx (GET_MODE (x)); | |
1118 | set_reg_attrs_from_value (reg, x); | |
1119 | return reg; | |
ca74b940 | 1120 | } |
1121 | ||
263c416c | 1122 | /* Set the register attributes for registers contained in PARM_RTX. |
1123 | Use needed values from memory attributes of MEM. */ | |
1124 | ||
1125 | void | |
35cb5232 | 1126 | set_reg_attrs_for_parm (rtx parm_rtx, rtx mem) |
263c416c | 1127 | { |
8ad4c111 | 1128 | if (REG_P (parm_rtx)) |
80c70e76 | 1129 | set_reg_attrs_from_value (parm_rtx, mem); |
263c416c | 1130 | else if (GET_CODE (parm_rtx) == PARALLEL) |
1131 | { | |
1132 | /* Check for a NULL entry in the first slot, used to indicate that the | |
1133 | parameter goes both on the stack and in registers. */ | |
1134 | int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1; | |
1135 | for (; i < XVECLEN (parm_rtx, 0); i++) | |
1136 | { | |
1137 | rtx x = XVECEXP (parm_rtx, 0, i); | |
8ad4c111 | 1138 | if (REG_P (XEXP (x, 0))) |
263c416c | 1139 | REG_ATTRS (XEXP (x, 0)) |
1140 | = get_reg_attrs (MEM_EXPR (mem), | |
1141 | INTVAL (XEXP (x, 1))); | |
1142 | } | |
1143 | } | |
1144 | } | |
1145 | ||
80c70e76 | 1146 | /* Set the REG_ATTRS for registers in value X, given that X represents |
1147 | decl T. */ | |
ca74b940 | 1148 | |
a8dd994c | 1149 | void |
80c70e76 | 1150 | set_reg_attrs_for_decl_rtl (tree t, rtx x) |
1151 | { | |
1152 | if (GET_CODE (x) == SUBREG) | |
ebfc27f5 | 1153 | { |
80c70e76 | 1154 | gcc_assert (subreg_lowpart_p (x)); |
1155 | x = SUBREG_REG (x); | |
ebfc27f5 | 1156 | } |
8ad4c111 | 1157 | if (REG_P (x)) |
80c70e76 | 1158 | REG_ATTRS (x) |
1159 | = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x), | |
3ff99faa | 1160 | DECL_MODE (t))); |
ca74b940 | 1161 | if (GET_CODE (x) == CONCAT) |
1162 | { | |
1163 | if (REG_P (XEXP (x, 0))) | |
1164 | REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0); | |
1165 | if (REG_P (XEXP (x, 1))) | |
1166 | REG_ATTRS (XEXP (x, 1)) | |
1167 | = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0)))); | |
1168 | } | |
1169 | if (GET_CODE (x) == PARALLEL) | |
1170 | { | |
85d25060 | 1171 | int i, start; |
1172 | ||
1173 | /* Check for a NULL entry, used to indicate that the parameter goes | |
1174 | both on the stack and in registers. */ | |
1175 | if (XEXP (XVECEXP (x, 0, 0), 0)) | |
1176 | start = 0; | |
1177 | else | |
1178 | start = 1; | |
1179 | ||
1180 | for (i = start; i < XVECLEN (x, 0); i++) | |
ca74b940 | 1181 | { |
1182 | rtx y = XVECEXP (x, 0, i); | |
1183 | if (REG_P (XEXP (y, 0))) | |
1184 | REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1))); | |
1185 | } | |
1186 | } | |
1187 | } | |
1188 | ||
80c70e76 | 1189 | /* Assign the RTX X to declaration T. */ |
1190 | ||
1191 | void | |
1192 | set_decl_rtl (tree t, rtx x) | |
1193 | { | |
1194 | DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x; | |
1195 | if (x) | |
1196 | set_reg_attrs_for_decl_rtl (t, x); | |
1197 | } | |
1198 | ||
d91cf567 | 1199 | /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true |
1200 | if the ABI requires the parameter to be passed by reference. */ | |
80c70e76 | 1201 | |
1202 | void | |
d91cf567 | 1203 | set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p) |
80c70e76 | 1204 | { |
1205 | DECL_INCOMING_RTL (t) = x; | |
d91cf567 | 1206 | if (x && !by_reference_p) |
80c70e76 | 1207 | set_reg_attrs_for_decl_rtl (t, x); |
1208 | } | |
1209 | ||
de8ecfb5 | 1210 | /* Identify REG (which may be a CONCAT) as a user register. */ |
1211 | ||
1212 | void | |
35cb5232 | 1213 | mark_user_reg (rtx reg) |
de8ecfb5 | 1214 | { |
1215 | if (GET_CODE (reg) == CONCAT) | |
1216 | { | |
1217 | REG_USERVAR_P (XEXP (reg, 0)) = 1; | |
1218 | REG_USERVAR_P (XEXP (reg, 1)) = 1; | |
1219 | } | |
de8ecfb5 | 1220 | else |
611234b4 | 1221 | { |
1222 | gcc_assert (REG_P (reg)); | |
1223 | REG_USERVAR_P (reg) = 1; | |
1224 | } | |
de8ecfb5 | 1225 | } |
1226 | ||
d4c332ff | 1227 | /* Identify REG as a probable pointer register and show its alignment |
1228 | as ALIGN, if nonzero. */ | |
15bbde2b | 1229 | |
1230 | void | |
35cb5232 | 1231 | mark_reg_pointer (rtx reg, int align) |
15bbde2b | 1232 | { |
e61a0a7f | 1233 | if (! REG_POINTER (reg)) |
612409a6 | 1234 | { |
e61a0a7f | 1235 | REG_POINTER (reg) = 1; |
d4c332ff | 1236 | |
612409a6 | 1237 | if (align) |
1238 | REGNO_POINTER_ALIGN (REGNO (reg)) = align; | |
1239 | } | |
1240 | else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg))) | |
8b332087 | 1241 | /* We can no-longer be sure just how aligned this pointer is. */ |
d4c332ff | 1242 | REGNO_POINTER_ALIGN (REGNO (reg)) = align; |
15bbde2b | 1243 | } |
1244 | ||
1245 | /* Return 1 plus largest pseudo reg number used in the current function. */ | |
1246 | ||
1247 | int | |
35cb5232 | 1248 | max_reg_num (void) |
15bbde2b | 1249 | { |
1250 | return reg_rtx_no; | |
1251 | } | |
1252 | ||
1253 | /* Return 1 + the largest label number used so far in the current function. */ | |
1254 | ||
1255 | int | |
35cb5232 | 1256 | max_label_num (void) |
15bbde2b | 1257 | { |
15bbde2b | 1258 | return label_num; |
1259 | } | |
1260 | ||
1261 | /* Return first label number used in this function (if any were used). */ | |
1262 | ||
1263 | int | |
35cb5232 | 1264 | get_first_label_num (void) |
15bbde2b | 1265 | { |
1266 | return first_label_num; | |
1267 | } | |
4ee9c684 | 1268 | |
1269 | /* If the rtx for label was created during the expansion of a nested | |
1270 | function, then first_label_num won't include this label number. | |
f0b5f617 | 1271 | Fix this now so that array indices work later. */ |
4ee9c684 | 1272 | |
1273 | void | |
1274 | maybe_set_first_label_num (rtx x) | |
1275 | { | |
1276 | if (CODE_LABEL_NUMBER (x) < first_label_num) | |
1277 | first_label_num = CODE_LABEL_NUMBER (x); | |
1278 | } | |
15bbde2b | 1279 | \f |
1280 | /* Return a value representing some low-order bits of X, where the number | |
1281 | of low-order bits is given by MODE. Note that no conversion is done | |
d823ba47 | 1282 | between floating-point and fixed-point values, rather, the bit |
15bbde2b | 1283 | representation is returned. |
1284 | ||
1285 | This function handles the cases in common between gen_lowpart, below, | |
1286 | and two variants in cse.c and combine.c. These are the cases that can | |
1287 | be safely handled at all points in the compilation. | |
1288 | ||
1289 | If this is not a case we can handle, return 0. */ | |
1290 | ||
1291 | rtx | |
35cb5232 | 1292 | gen_lowpart_common (enum machine_mode mode, rtx x) |
15bbde2b | 1293 | { |
701e46d0 | 1294 | int msize = GET_MODE_SIZE (mode); |
791172c5 | 1295 | int xsize; |
701e46d0 | 1296 | int offset = 0; |
791172c5 | 1297 | enum machine_mode innermode; |
1298 | ||
1299 | /* Unfortunately, this routine doesn't take a parameter for the mode of X, | |
1300 | so we have to make one up. Yuk. */ | |
1301 | innermode = GET_MODE (x); | |
971ba038 | 1302 | if (CONST_INT_P (x) |
6c799a83 | 1303 | && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT) |
791172c5 | 1304 | innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); |
1305 | else if (innermode == VOIDmode) | |
24cd46a7 | 1306 | innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0); |
48e1416a | 1307 | |
791172c5 | 1308 | xsize = GET_MODE_SIZE (innermode); |
1309 | ||
611234b4 | 1310 | gcc_assert (innermode != VOIDmode && innermode != BLKmode); |
15bbde2b | 1311 | |
791172c5 | 1312 | if (innermode == mode) |
15bbde2b | 1313 | return x; |
1314 | ||
1315 | /* MODE must occupy no more words than the mode of X. */ | |
791172c5 | 1316 | if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD |
1317 | > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)) | |
15bbde2b | 1318 | return 0; |
1319 | ||
9abe1e73 | 1320 | /* Don't allow generating paradoxical FLOAT_MODE subregs. */ |
cee7491d | 1321 | if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize) |
9abe1e73 | 1322 | return 0; |
1323 | ||
791172c5 | 1324 | offset = subreg_lowpart_offset (mode, innermode); |
15bbde2b | 1325 | |
1326 | if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND) | |
64115b39 | 1327 | && (GET_MODE_CLASS (mode) == MODE_INT |
1328 | || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)) | |
15bbde2b | 1329 | { |
1330 | /* If we are getting the low-order part of something that has been | |
1331 | sign- or zero-extended, we can either just use the object being | |
1332 | extended or make a narrower extension. If we want an even smaller | |
1333 | piece than the size of the object being extended, call ourselves | |
1334 | recursively. | |
1335 | ||
1336 | This case is used mostly by combine and cse. */ | |
1337 | ||
1338 | if (GET_MODE (XEXP (x, 0)) == mode) | |
1339 | return XEXP (x, 0); | |
791172c5 | 1340 | else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))) |
15bbde2b | 1341 | return gen_lowpart_common (mode, XEXP (x, 0)); |
791172c5 | 1342 | else if (msize < xsize) |
3ad7bb1c | 1343 | return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0)); |
15bbde2b | 1344 | } |
8ad4c111 | 1345 | else if (GET_CODE (x) == SUBREG || REG_P (x) |
791172c5 | 1346 | || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR |
efa08fc2 | 1347 | || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)) |
791172c5 | 1348 | return simplify_gen_subreg (mode, x, innermode, offset); |
4a307dd5 | 1349 | |
15bbde2b | 1350 | /* Otherwise, we can't do this. */ |
1351 | return 0; | |
1352 | } | |
1353 | \f | |
d56d0ca2 | 1354 | rtx |
35cb5232 | 1355 | gen_highpart (enum machine_mode mode, rtx x) |
d56d0ca2 | 1356 | { |
701e46d0 | 1357 | unsigned int msize = GET_MODE_SIZE (mode); |
81802af6 | 1358 | rtx result; |
701e46d0 | 1359 | |
d56d0ca2 | 1360 | /* This case loses if X is a subreg. To catch bugs early, |
1361 | complain if an invalid MODE is used even in other cases. */ | |
611234b4 | 1362 | gcc_assert (msize <= UNITS_PER_WORD |
1363 | || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x))); | |
701e46d0 | 1364 | |
81802af6 | 1365 | result = simplify_gen_subreg (mode, x, GET_MODE (x), |
1366 | subreg_highpart_offset (mode, GET_MODE (x))); | |
611234b4 | 1367 | gcc_assert (result); |
48e1416a | 1368 | |
a8c36ab2 | 1369 | /* simplify_gen_subreg is not guaranteed to return a valid operand for |
1370 | the target if we have a MEM. gen_highpart must return a valid operand, | |
1371 | emitting code if necessary to do so. */ | |
611234b4 | 1372 | if (MEM_P (result)) |
1373 | { | |
1374 | result = validize_mem (result); | |
1375 | gcc_assert (result); | |
1376 | } | |
48e1416a | 1377 | |
81802af6 | 1378 | return result; |
1379 | } | |
704fcf2b | 1380 | |
29d56731 | 1381 | /* Like gen_highpart, but accept mode of EXP operand in case EXP can |
704fcf2b | 1382 | be VOIDmode constant. */ |
1383 | rtx | |
35cb5232 | 1384 | gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp) |
704fcf2b | 1385 | { |
1386 | if (GET_MODE (exp) != VOIDmode) | |
1387 | { | |
611234b4 | 1388 | gcc_assert (GET_MODE (exp) == innermode); |
704fcf2b | 1389 | return gen_highpart (outermode, exp); |
1390 | } | |
1391 | return simplify_gen_subreg (outermode, exp, innermode, | |
1392 | subreg_highpart_offset (outermode, innermode)); | |
1393 | } | |
d4c5e26d | 1394 | |
80c70e76 | 1395 | /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */ |
10ef59ac | 1396 | |
81802af6 | 1397 | unsigned int |
35cb5232 | 1398 | subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode) |
81802af6 | 1399 | { |
1400 | unsigned int offset = 0; | |
1401 | int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)); | |
10ef59ac | 1402 | |
81802af6 | 1403 | if (difference > 0) |
d56d0ca2 | 1404 | { |
81802af6 | 1405 | if (WORDS_BIG_ENDIAN) |
1406 | offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; | |
1407 | if (BYTES_BIG_ENDIAN) | |
1408 | offset += difference % UNITS_PER_WORD; | |
d56d0ca2 | 1409 | } |
701e46d0 | 1410 | |
81802af6 | 1411 | return offset; |
d56d0ca2 | 1412 | } |
64ab453f | 1413 | |
81802af6 | 1414 | /* Return offset in bytes to get OUTERMODE high part |
1415 | of the value in mode INNERMODE stored in memory in target format. */ | |
1416 | unsigned int | |
35cb5232 | 1417 | subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode) |
64ab453f | 1418 | { |
1419 | unsigned int offset = 0; | |
1420 | int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)); | |
1421 | ||
611234b4 | 1422 | gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode)); |
81802af6 | 1423 | |
64ab453f | 1424 | if (difference > 0) |
1425 | { | |
81802af6 | 1426 | if (! WORDS_BIG_ENDIAN) |
64ab453f | 1427 | offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD; |
81802af6 | 1428 | if (! BYTES_BIG_ENDIAN) |
64ab453f | 1429 | offset += difference % UNITS_PER_WORD; |
1430 | } | |
1431 | ||
81802af6 | 1432 | return offset; |
64ab453f | 1433 | } |
d56d0ca2 | 1434 | |
15bbde2b | 1435 | /* Return 1 iff X, assumed to be a SUBREG, |
1436 | refers to the least significant part of its containing reg. | |
1437 | If X is not a SUBREG, always return 1 (it is its own low part!). */ | |
1438 | ||
1439 | int | |
b7bf20db | 1440 | subreg_lowpart_p (const_rtx x) |
15bbde2b | 1441 | { |
1442 | if (GET_CODE (x) != SUBREG) | |
1443 | return 1; | |
7e14c1bf | 1444 | else if (GET_MODE (SUBREG_REG (x)) == VOIDmode) |
1445 | return 0; | |
15bbde2b | 1446 | |
81802af6 | 1447 | return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x))) |
1448 | == SUBREG_BYTE (x)); | |
15bbde2b | 1449 | } |
b537bfdb | 1450 | |
1451 | /* Return true if X is a paradoxical subreg, false otherwise. */ | |
1452 | bool | |
1453 | paradoxical_subreg_p (const_rtx x) | |
1454 | { | |
1455 | if (GET_CODE (x) != SUBREG) | |
1456 | return false; | |
1457 | return (GET_MODE_PRECISION (GET_MODE (x)) | |
1458 | > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))); | |
1459 | } | |
15bbde2b | 1460 | \f |
701e46d0 | 1461 | /* Return subword OFFSET of operand OP. |
1462 | The word number, OFFSET, is interpreted as the word number starting | |
1463 | at the low-order address. OFFSET 0 is the low-order word if not | |
1464 | WORDS_BIG_ENDIAN, otherwise it is the high-order word. | |
1465 | ||
1466 | If we cannot extract the required word, we return zero. Otherwise, | |
1467 | an rtx corresponding to the requested word will be returned. | |
1468 | ||
1469 | VALIDATE_ADDRESS is nonzero if the address should be validated. Before | |
1470 | reload has completed, a valid address will always be returned. After | |
1471 | reload, if a valid address cannot be returned, we return zero. | |
1472 | ||
1473 | If VALIDATE_ADDRESS is zero, we simply form the required address; validating | |
1474 | it is the responsibility of the caller. | |
1475 | ||
1476 | MODE is the mode of OP in case it is a CONST_INT. | |
1477 | ||
1478 | ??? This is still rather broken for some cases. The problem for the | |
1479 | moment is that all callers of this thing provide no 'goal mode' to | |
1480 | tell us to work with. This exists because all callers were written | |
84e81e84 | 1481 | in a word based SUBREG world. |
1482 | Now use of this function can be deprecated by simplify_subreg in most | |
1483 | cases. | |
1484 | */ | |
701e46d0 | 1485 | |
1486 | rtx | |
35cb5232 | 1487 | operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode) |
701e46d0 | 1488 | { |
1489 | if (mode == VOIDmode) | |
1490 | mode = GET_MODE (op); | |
1491 | ||
611234b4 | 1492 | gcc_assert (mode != VOIDmode); |
701e46d0 | 1493 | |
6312a35e | 1494 | /* If OP is narrower than a word, fail. */ |
701e46d0 | 1495 | if (mode != BLKmode |
1496 | && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)) | |
1497 | return 0; | |
1498 | ||
6312a35e | 1499 | /* If we want a word outside OP, return zero. */ |
701e46d0 | 1500 | if (mode != BLKmode |
1501 | && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode)) | |
1502 | return const0_rtx; | |
1503 | ||
701e46d0 | 1504 | /* Form a new MEM at the requested address. */ |
e16ceb8e | 1505 | if (MEM_P (op)) |
701e46d0 | 1506 | { |
9ce37fa7 | 1507 | rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD); |
701e46d0 | 1508 | |
e4e86ec5 | 1509 | if (! validate_address) |
9ce37fa7 | 1510 | return new_rtx; |
e4e86ec5 | 1511 | |
1512 | else if (reload_completed) | |
701e46d0 | 1513 | { |
bd1a81f7 | 1514 | if (! strict_memory_address_addr_space_p (word_mode, |
1515 | XEXP (new_rtx, 0), | |
1516 | MEM_ADDR_SPACE (op))) | |
e4e86ec5 | 1517 | return 0; |
701e46d0 | 1518 | } |
e4e86ec5 | 1519 | else |
9ce37fa7 | 1520 | return replace_equiv_address (new_rtx, XEXP (new_rtx, 0)); |
701e46d0 | 1521 | } |
1522 | ||
84e81e84 | 1523 | /* Rest can be handled by simplify_subreg. */ |
1524 | return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD)); | |
701e46d0 | 1525 | } |
1526 | ||
89f18f73 | 1527 | /* Similar to `operand_subword', but never return 0. If we can't |
1528 | extract the required subword, put OP into a register and try again. | |
1529 | The second attempt must succeed. We always validate the address in | |
1530 | this case. | |
15bbde2b | 1531 | |
1532 | MODE is the mode of OP, in case it is CONST_INT. */ | |
1533 | ||
1534 | rtx | |
35cb5232 | 1535 | operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode) |
15bbde2b | 1536 | { |
701e46d0 | 1537 | rtx result = operand_subword (op, offset, 1, mode); |
15bbde2b | 1538 | |
1539 | if (result) | |
1540 | return result; | |
1541 | ||
1542 | if (mode != BLKmode && mode != VOIDmode) | |
ac825d29 | 1543 | { |
1544 | /* If this is a register which can not be accessed by words, copy it | |
1545 | to a pseudo register. */ | |
8ad4c111 | 1546 | if (REG_P (op)) |
ac825d29 | 1547 | op = copy_to_reg (op); |
1548 | else | |
1549 | op = force_reg (mode, op); | |
1550 | } | |
15bbde2b | 1551 | |
701e46d0 | 1552 | result = operand_subword (op, offset, 1, mode); |
611234b4 | 1553 | gcc_assert (result); |
15bbde2b | 1554 | |
1555 | return result; | |
1556 | } | |
1557 | \f | |
b3ff8d90 | 1558 | /* Returns 1 if both MEM_EXPR can be considered equal |
1559 | and 0 otherwise. */ | |
1560 | ||
1561 | int | |
52d07779 | 1562 | mem_expr_equal_p (const_tree expr1, const_tree expr2) |
b3ff8d90 | 1563 | { |
1564 | if (expr1 == expr2) | |
1565 | return 1; | |
1566 | ||
1567 | if (! expr1 || ! expr2) | |
1568 | return 0; | |
1569 | ||
1570 | if (TREE_CODE (expr1) != TREE_CODE (expr2)) | |
1571 | return 0; | |
1572 | ||
3a443843 | 1573 | return operand_equal_p (expr1, expr2, 0); |
b3ff8d90 | 1574 | } |
1575 | ||
ad0a178f | 1576 | /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN |
1577 | bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or | |
1578 | -1 if not known. */ | |
1579 | ||
1580 | int | |
7cfdc2f0 | 1581 | get_mem_align_offset (rtx mem, unsigned int align) |
ad0a178f | 1582 | { |
1583 | tree expr; | |
1584 | unsigned HOST_WIDE_INT offset; | |
1585 | ||
1586 | /* This function can't use | |
da443c27 | 1587 | if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem) |
98ab9e8f | 1588 | || (MAX (MEM_ALIGN (mem), |
957d0361 | 1589 | MAX (align, get_object_alignment (MEM_EXPR (mem)))) |
ad0a178f | 1590 | < align)) |
1591 | return -1; | |
1592 | else | |
da443c27 | 1593 | return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1); |
ad0a178f | 1594 | for two reasons: |
1595 | - COMPONENT_REFs in MEM_EXPR can have NULL first operand, | |
1596 | for <variable>. get_inner_reference doesn't handle it and | |
1597 | even if it did, the alignment in that case needs to be determined | |
1598 | from DECL_FIELD_CONTEXT's TYPE_ALIGN. | |
1599 | - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR | |
1600 | isn't sufficiently aligned, the object it is in might be. */ | |
1601 | gcc_assert (MEM_P (mem)); | |
1602 | expr = MEM_EXPR (mem); | |
da443c27 | 1603 | if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem)) |
ad0a178f | 1604 | return -1; |
1605 | ||
da443c27 | 1606 | offset = MEM_OFFSET (mem); |
ad0a178f | 1607 | if (DECL_P (expr)) |
1608 | { | |
1609 | if (DECL_ALIGN (expr) < align) | |
1610 | return -1; | |
1611 | } | |
1612 | else if (INDIRECT_REF_P (expr)) | |
1613 | { | |
1614 | if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align) | |
1615 | return -1; | |
1616 | } | |
1617 | else if (TREE_CODE (expr) == COMPONENT_REF) | |
1618 | { | |
1619 | while (1) | |
1620 | { | |
1621 | tree inner = TREE_OPERAND (expr, 0); | |
1622 | tree field = TREE_OPERAND (expr, 1); | |
1623 | tree byte_offset = component_ref_field_offset (expr); | |
1624 | tree bit_offset = DECL_FIELD_BIT_OFFSET (field); | |
1625 | ||
1626 | if (!byte_offset | |
e913b5cd | 1627 | || !tree_fits_uhwi_p (byte_offset) |
1628 | || !tree_fits_uhwi_p (bit_offset)) | |
ad0a178f | 1629 | return -1; |
1630 | ||
e913b5cd | 1631 | offset += tree_to_uhwi (byte_offset); |
1632 | offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT; | |
ad0a178f | 1633 | |
1634 | if (inner == NULL_TREE) | |
1635 | { | |
1636 | if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field)) | |
1637 | < (unsigned int) align) | |
1638 | return -1; | |
1639 | break; | |
1640 | } | |
1641 | else if (DECL_P (inner)) | |
1642 | { | |
1643 | if (DECL_ALIGN (inner) < align) | |
1644 | return -1; | |
1645 | break; | |
1646 | } | |
1647 | else if (TREE_CODE (inner) != COMPONENT_REF) | |
1648 | return -1; | |
1649 | expr = inner; | |
1650 | } | |
1651 | } | |
1652 | else | |
1653 | return -1; | |
1654 | ||
1655 | return offset & ((align / BITS_PER_UNIT) - 1); | |
1656 | } | |
1657 | ||
310b57a1 | 1658 | /* Given REF (a MEM) and T, either the type of X or the expression |
c6259b83 | 1659 | corresponding to REF, set the memory attributes. OBJECTP is nonzero |
6f717f77 | 1660 | if we are making a new object of this type. BITPOS is nonzero if |
1661 | there is an offset outstanding on T that will be applied later. */ | |
c6259b83 | 1662 | |
1663 | void | |
35cb5232 | 1664 | set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, |
1665 | HOST_WIDE_INT bitpos) | |
c6259b83 | 1666 | { |
6f717f77 | 1667 | HOST_WIDE_INT apply_bitpos = 0; |
c6259b83 | 1668 | tree type; |
d72886b5 | 1669 | struct mem_attrs attrs, *defattrs, *refattrs; |
3f06bd1b | 1670 | addr_space_t as; |
c6259b83 | 1671 | |
1672 | /* It can happen that type_for_mode was given a mode for which there | |
1673 | is no language-level type. In which case it returns NULL, which | |
1674 | we can see here. */ | |
1675 | if (t == NULL_TREE) | |
1676 | return; | |
1677 | ||
1678 | type = TYPE_P (t) ? t : TREE_TYPE (t); | |
4ccffa39 | 1679 | if (type == error_mark_node) |
1680 | return; | |
c6259b83 | 1681 | |
c6259b83 | 1682 | /* If we have already set DECL_RTL = ref, get_alias_set will get the |
1683 | wrong answer, as it assumes that DECL_RTL already has the right alias | |
1684 | info. Callers should not set DECL_RTL until after the call to | |
1685 | set_mem_attributes. */ | |
611234b4 | 1686 | gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); |
c6259b83 | 1687 | |
d72886b5 | 1688 | memset (&attrs, 0, sizeof (attrs)); |
1689 | ||
96216d37 | 1690 | /* Get the alias set from the expression or type (perhaps using a |
2a631e19 | 1691 | front-end routine) and use it. */ |
d72886b5 | 1692 | attrs.alias = get_alias_set (t); |
c6259b83 | 1693 | |
fbc6244b | 1694 | MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type); |
8d350e69 | 1695 | MEM_POINTER (ref) = POINTER_TYPE_P (type); |
c6259b83 | 1696 | |
d8dccfe9 | 1697 | /* Default values from pre-existing memory attributes if present. */ |
d72886b5 | 1698 | refattrs = MEM_ATTRS (ref); |
1699 | if (refattrs) | |
d8dccfe9 | 1700 | { |
1701 | /* ??? Can this ever happen? Calling this routine on a MEM that | |
1702 | already carries memory attributes should probably be invalid. */ | |
d72886b5 | 1703 | attrs.expr = refattrs->expr; |
6d58bcba | 1704 | attrs.offset_known_p = refattrs->offset_known_p; |
d72886b5 | 1705 | attrs.offset = refattrs->offset; |
6d58bcba | 1706 | attrs.size_known_p = refattrs->size_known_p; |
d72886b5 | 1707 | attrs.size = refattrs->size; |
1708 | attrs.align = refattrs->align; | |
d8dccfe9 | 1709 | } |
1710 | ||
1711 | /* Otherwise, default values from the mode of the MEM reference. */ | |
d72886b5 | 1712 | else |
d8dccfe9 | 1713 | { |
d72886b5 | 1714 | defattrs = mode_mem_attrs[(int) GET_MODE (ref)]; |
1715 | gcc_assert (!defattrs->expr); | |
6d58bcba | 1716 | gcc_assert (!defattrs->offset_known_p); |
d72886b5 | 1717 | |
d8dccfe9 | 1718 | /* Respect mode size. */ |
6d58bcba | 1719 | attrs.size_known_p = defattrs->size_known_p; |
d72886b5 | 1720 | attrs.size = defattrs->size; |
d8dccfe9 | 1721 | /* ??? Is this really necessary? We probably should always get |
1722 | the size from the type below. */ | |
1723 | ||
1724 | /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type; | |
1725 | if T is an object, always compute the object alignment below. */ | |
d72886b5 | 1726 | if (TYPE_P (t)) |
1727 | attrs.align = defattrs->align; | |
1728 | else | |
1729 | attrs.align = BITS_PER_UNIT; | |
d8dccfe9 | 1730 | /* ??? If T is a type, respecting mode alignment may *also* be wrong |
1731 | e.g. if the type carries an alignment attribute. Should we be | |
1732 | able to simply always use TYPE_ALIGN? */ | |
1733 | } | |
1734 | ||
a9d9ab08 | 1735 | /* We can set the alignment from the type if we are making an object, |
1736 | this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */ | |
679e0056 | 1737 | if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type)) |
d72886b5 | 1738 | attrs.align = MAX (attrs.align, TYPE_ALIGN (type)); |
679e0056 | 1739 | |
96216d37 | 1740 | /* If the size is known, we can set that. */ |
50ba3acc | 1741 | tree new_size = TYPE_SIZE_UNIT (type); |
96216d37 | 1742 | |
9eec20bf | 1743 | /* The address-space is that of the type. */ |
1744 | as = TYPE_ADDR_SPACE (type); | |
1745 | ||
579bccf9 | 1746 | /* If T is not a type, we may be able to deduce some more information about |
1747 | the expression. */ | |
1748 | if (! TYPE_P (t)) | |
2a631e19 | 1749 | { |
ae2dd339 | 1750 | tree base; |
b04fab2a | 1751 | |
2a631e19 | 1752 | if (TREE_THIS_VOLATILE (t)) |
1753 | MEM_VOLATILE_P (ref) = 1; | |
c6259b83 | 1754 | |
3c00f11c | 1755 | /* Now remove any conversions: they don't change what the underlying |
1756 | object is. Likewise for SAVE_EXPR. */ | |
72dd6141 | 1757 | while (CONVERT_EXPR_P (t) |
3c00f11c | 1758 | || TREE_CODE (t) == VIEW_CONVERT_EXPR |
1759 | || TREE_CODE (t) == SAVE_EXPR) | |
2a631e19 | 1760 | t = TREE_OPERAND (t, 0); |
1761 | ||
73eb0a09 | 1762 | /* Note whether this expression can trap. */ |
1763 | MEM_NOTRAP_P (ref) = !tree_could_trap_p (t); | |
1764 | ||
1765 | base = get_base_address (t); | |
3f06bd1b | 1766 | if (base) |
1767 | { | |
1768 | if (DECL_P (base) | |
1769 | && TREE_READONLY (base) | |
1770 | && (TREE_STATIC (base) || DECL_EXTERNAL (base)) | |
1771 | && !TREE_THIS_VOLATILE (base)) | |
1772 | MEM_READONLY_P (ref) = 1; | |
1773 | ||
1774 | /* Mark static const strings readonly as well. */ | |
1775 | if (TREE_CODE (base) == STRING_CST | |
1776 | && TREE_READONLY (base) | |
1777 | && TREE_STATIC (base)) | |
1778 | MEM_READONLY_P (ref) = 1; | |
1779 | ||
9eec20bf | 1780 | /* Address-space information is on the base object. */ |
3f06bd1b | 1781 | if (TREE_CODE (base) == MEM_REF |
1782 | || TREE_CODE (base) == TARGET_MEM_REF) | |
1783 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base, | |
1784 | 0)))); | |
1785 | else | |
1786 | as = TYPE_ADDR_SPACE (TREE_TYPE (base)); | |
1787 | } | |
cab98a0d | 1788 | |
2b02580f | 1789 | /* If this expression uses it's parent's alias set, mark it such |
1790 | that we won't change it. */ | |
d400f5e1 | 1791 | if (component_uses_parent_alias_set_from (t) != NULL_TREE) |
5cc193e7 | 1792 | MEM_KEEP_ALIAS_SET_P (ref) = 1; |
1793 | ||
2a631e19 | 1794 | /* If this is a decl, set the attributes of the MEM from it. */ |
1795 | if (DECL_P (t)) | |
1796 | { | |
d72886b5 | 1797 | attrs.expr = t; |
6d58bcba | 1798 | attrs.offset_known_p = true; |
1799 | attrs.offset = 0; | |
6f717f77 | 1800 | apply_bitpos = bitpos; |
50ba3acc | 1801 | new_size = DECL_SIZE_UNIT (t); |
2a631e19 | 1802 | } |
1803 | ||
9eec20bf | 1804 | /* ??? If we end up with a constant here do record a MEM_EXPR. */ |
ce45a448 | 1805 | else if (CONSTANT_CLASS_P (t)) |
9eec20bf | 1806 | ; |
b10dbbca | 1807 | |
50ba3acc | 1808 | /* If this is a field reference, record it. */ |
1809 | else if (TREE_CODE (t) == COMPONENT_REF) | |
b10dbbca | 1810 | { |
d72886b5 | 1811 | attrs.expr = t; |
6d58bcba | 1812 | attrs.offset_known_p = true; |
1813 | attrs.offset = 0; | |
6f717f77 | 1814 | apply_bitpos = bitpos; |
50ba3acc | 1815 | if (DECL_BIT_FIELD (TREE_OPERAND (t, 1))) |
1816 | new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1)); | |
b10dbbca | 1817 | } |
1818 | ||
1819 | /* If this is an array reference, look for an outer field reference. */ | |
1820 | else if (TREE_CODE (t) == ARRAY_REF) | |
1821 | { | |
1822 | tree off_tree = size_zero_node; | |
6b039979 | 1823 | /* We can't modify t, because we use it at the end of the |
1824 | function. */ | |
1825 | tree t2 = t; | |
b10dbbca | 1826 | |
1827 | do | |
1828 | { | |
6b039979 | 1829 | tree index = TREE_OPERAND (t2, 1); |
6374121b | 1830 | tree low_bound = array_ref_low_bound (t2); |
1831 | tree unit_size = array_ref_element_size (t2); | |
97f8ce30 | 1832 | |
1833 | /* We assume all arrays have sizes that are a multiple of a byte. | |
1834 | First subtract the lower bound, if any, in the type of the | |
6374121b | 1835 | index, then convert to sizetype and multiply by the size of |
1836 | the array element. */ | |
1837 | if (! integer_zerop (low_bound)) | |
faa43f85 | 1838 | index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), |
1839 | index, low_bound); | |
97f8ce30 | 1840 | |
6374121b | 1841 | off_tree = size_binop (PLUS_EXPR, |
535664e3 | 1842 | size_binop (MULT_EXPR, |
1843 | fold_convert (sizetype, | |
1844 | index), | |
6374121b | 1845 | unit_size), |
1846 | off_tree); | |
6b039979 | 1847 | t2 = TREE_OPERAND (t2, 0); |
b10dbbca | 1848 | } |
6b039979 | 1849 | while (TREE_CODE (t2) == ARRAY_REF); |
b10dbbca | 1850 | |
9eec20bf | 1851 | if (DECL_P (t2) |
1852 | || TREE_CODE (t2) == COMPONENT_REF) | |
b10dbbca | 1853 | { |
d72886b5 | 1854 | attrs.expr = t2; |
6d58bcba | 1855 | attrs.offset_known_p = false; |
e913b5cd | 1856 | if (tree_fits_uhwi_p (off_tree)) |
6f717f77 | 1857 | { |
6d58bcba | 1858 | attrs.offset_known_p = true; |
e913b5cd | 1859 | attrs.offset = tree_to_uhwi (off_tree); |
6f717f77 | 1860 | apply_bitpos = bitpos; |
1861 | } | |
b10dbbca | 1862 | } |
9eec20bf | 1863 | /* Else do not record a MEM_EXPR. */ |
2d8fe5d0 | 1864 | } |
1865 | ||
6d72287b | 1866 | /* If this is an indirect reference, record it. */ |
182cf5a9 | 1867 | else if (TREE_CODE (t) == MEM_REF |
5d9de213 | 1868 | || TREE_CODE (t) == TARGET_MEM_REF) |
6d72287b | 1869 | { |
d72886b5 | 1870 | attrs.expr = t; |
6d58bcba | 1871 | attrs.offset_known_p = true; |
1872 | attrs.offset = 0; | |
6d72287b | 1873 | apply_bitpos = bitpos; |
1874 | } | |
1875 | ||
9eec20bf | 1876 | /* Compute the alignment. */ |
1877 | unsigned int obj_align; | |
1878 | unsigned HOST_WIDE_INT obj_bitpos; | |
1879 | get_object_alignment_1 (t, &obj_align, &obj_bitpos); | |
1880 | obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1); | |
1881 | if (obj_bitpos != 0) | |
1882 | obj_align = (obj_bitpos & -obj_bitpos); | |
1883 | attrs.align = MAX (attrs.align, obj_align); | |
2a631e19 | 1884 | } |
1885 | ||
e913b5cd | 1886 | if (tree_fits_uhwi_p (new_size)) |
50ba3acc | 1887 | { |
1888 | attrs.size_known_p = true; | |
e913b5cd | 1889 | attrs.size = tree_to_uhwi (new_size); |
50ba3acc | 1890 | } |
1891 | ||
e2e205b3 | 1892 | /* If we modified OFFSET based on T, then subtract the outstanding |
595f1461 | 1893 | bit position offset. Similarly, increase the size of the accessed |
1894 | object to contain the negative offset. */ | |
6f717f77 | 1895 | if (apply_bitpos) |
595f1461 | 1896 | { |
6d58bcba | 1897 | gcc_assert (attrs.offset_known_p); |
1898 | attrs.offset -= apply_bitpos / BITS_PER_UNIT; | |
1899 | if (attrs.size_known_p) | |
1900 | attrs.size += apply_bitpos / BITS_PER_UNIT; | |
595f1461 | 1901 | } |
6f717f77 | 1902 | |
2a631e19 | 1903 | /* Now set the attributes we computed above. */ |
3f06bd1b | 1904 | attrs.addrspace = as; |
d72886b5 | 1905 | set_mem_attrs (ref, &attrs); |
c6259b83 | 1906 | } |
1907 | ||
6f717f77 | 1908 | void |
35cb5232 | 1909 | set_mem_attributes (rtx ref, tree t, int objectp) |
6f717f77 | 1910 | { |
1911 | set_mem_attributes_minus_bitpos (ref, t, objectp, 0); | |
1912 | } | |
1913 | ||
c6259b83 | 1914 | /* Set the alias set of MEM to SET. */ |
1915 | ||
1916 | void | |
32c2fdea | 1917 | set_mem_alias_set (rtx mem, alias_set_type set) |
c6259b83 | 1918 | { |
d72886b5 | 1919 | struct mem_attrs attrs; |
1920 | ||
c6259b83 | 1921 | /* If the new and old alias sets don't conflict, something is wrong. */ |
1b4345f7 | 1922 | gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); |
d72886b5 | 1923 | attrs = *get_mem_attrs (mem); |
1924 | attrs.alias = set; | |
1925 | set_mem_attrs (mem, &attrs); | |
bd1a81f7 | 1926 | } |
1927 | ||
1928 | /* Set the address space of MEM to ADDRSPACE (target-defined). */ | |
1929 | ||
1930 | void | |
1931 | set_mem_addr_space (rtx mem, addr_space_t addrspace) | |
1932 | { | |
d72886b5 | 1933 | struct mem_attrs attrs; |
1934 | ||
1935 | attrs = *get_mem_attrs (mem); | |
1936 | attrs.addrspace = addrspace; | |
1937 | set_mem_attrs (mem, &attrs); | |
c6259b83 | 1938 | } |
96216d37 | 1939 | |
1c4512da | 1940 | /* Set the alignment of MEM to ALIGN bits. */ |
96216d37 | 1941 | |
1942 | void | |
35cb5232 | 1943 | set_mem_align (rtx mem, unsigned int align) |
96216d37 | 1944 | { |
d72886b5 | 1945 | struct mem_attrs attrs; |
1946 | ||
1947 | attrs = *get_mem_attrs (mem); | |
1948 | attrs.align = align; | |
1949 | set_mem_attrs (mem, &attrs); | |
96216d37 | 1950 | } |
278fe152 | 1951 | |
b10dbbca | 1952 | /* Set the expr for MEM to EXPR. */ |
278fe152 | 1953 | |
1954 | void | |
35cb5232 | 1955 | set_mem_expr (rtx mem, tree expr) |
278fe152 | 1956 | { |
d72886b5 | 1957 | struct mem_attrs attrs; |
1958 | ||
1959 | attrs = *get_mem_attrs (mem); | |
1960 | attrs.expr = expr; | |
1961 | set_mem_attrs (mem, &attrs); | |
278fe152 | 1962 | } |
b10dbbca | 1963 | |
1964 | /* Set the offset of MEM to OFFSET. */ | |
1965 | ||
1966 | void | |
da443c27 | 1967 | set_mem_offset (rtx mem, HOST_WIDE_INT offset) |
b10dbbca | 1968 | { |
d72886b5 | 1969 | struct mem_attrs attrs; |
1970 | ||
1971 | attrs = *get_mem_attrs (mem); | |
6d58bcba | 1972 | attrs.offset_known_p = true; |
1973 | attrs.offset = offset; | |
da443c27 | 1974 | set_mem_attrs (mem, &attrs); |
1975 | } | |
1976 | ||
1977 | /* Clear the offset of MEM. */ | |
1978 | ||
1979 | void | |
1980 | clear_mem_offset (rtx mem) | |
1981 | { | |
1982 | struct mem_attrs attrs; | |
1983 | ||
1984 | attrs = *get_mem_attrs (mem); | |
6d58bcba | 1985 | attrs.offset_known_p = false; |
d72886b5 | 1986 | set_mem_attrs (mem, &attrs); |
f0500469 | 1987 | } |
1988 | ||
1989 | /* Set the size of MEM to SIZE. */ | |
1990 | ||
1991 | void | |
5b2a69fa | 1992 | set_mem_size (rtx mem, HOST_WIDE_INT size) |
f0500469 | 1993 | { |
d72886b5 | 1994 | struct mem_attrs attrs; |
1995 | ||
1996 | attrs = *get_mem_attrs (mem); | |
6d58bcba | 1997 | attrs.size_known_p = true; |
1998 | attrs.size = size; | |
5b2a69fa | 1999 | set_mem_attrs (mem, &attrs); |
2000 | } | |
2001 | ||
2002 | /* Clear the size of MEM. */ | |
2003 | ||
2004 | void | |
2005 | clear_mem_size (rtx mem) | |
2006 | { | |
2007 | struct mem_attrs attrs; | |
2008 | ||
2009 | attrs = *get_mem_attrs (mem); | |
6d58bcba | 2010 | attrs.size_known_p = false; |
d72886b5 | 2011 | set_mem_attrs (mem, &attrs); |
b10dbbca | 2012 | } |
c6259b83 | 2013 | \f |
96216d37 | 2014 | /* Return a memory reference like MEMREF, but with its mode changed to MODE |
2015 | and its address changed to ADDR. (VOIDmode means don't change the mode. | |
2016 | NULL for ADDR means don't change the address.) VALIDATE is nonzero if the | |
5cc04e45 | 2017 | returned memory location is required to be valid. INPLACE is true if any |
2018 | changes can be made directly to MEMREF or false if MEMREF must be treated | |
2019 | as immutable. | |
2020 | ||
2021 | The memory attributes are not changed. */ | |
15bbde2b | 2022 | |
96216d37 | 2023 | static rtx |
5cc04e45 | 2024 | change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate, |
2025 | bool inplace) | |
15bbde2b | 2026 | { |
bd1a81f7 | 2027 | addr_space_t as; |
9ce37fa7 | 2028 | rtx new_rtx; |
15bbde2b | 2029 | |
611234b4 | 2030 | gcc_assert (MEM_P (memref)); |
bd1a81f7 | 2031 | as = MEM_ADDR_SPACE (memref); |
15bbde2b | 2032 | if (mode == VOIDmode) |
2033 | mode = GET_MODE (memref); | |
2034 | if (addr == 0) | |
2035 | addr = XEXP (memref, 0); | |
3988ef8b | 2036 | if (mode == GET_MODE (memref) && addr == XEXP (memref, 0) |
bd1a81f7 | 2037 | && (!validate || memory_address_addr_space_p (mode, addr, as))) |
3988ef8b | 2038 | return memref; |
15bbde2b | 2039 | |
73a18f44 | 2040 | /* Don't validate address for LRA. LRA can make the address valid |
2041 | by itself in most efficient way. */ | |
2042 | if (validate && !lra_in_progress) | |
15bbde2b | 2043 | { |
e4e86ec5 | 2044 | if (reload_in_progress || reload_completed) |
bd1a81f7 | 2045 | gcc_assert (memory_address_addr_space_p (mode, addr, as)); |
e4e86ec5 | 2046 | else |
bd1a81f7 | 2047 | addr = memory_address_addr_space (mode, addr, as); |
15bbde2b | 2048 | } |
d823ba47 | 2049 | |
e8976cd7 | 2050 | if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref)) |
2051 | return memref; | |
2052 | ||
5cc04e45 | 2053 | if (inplace) |
2054 | { | |
2055 | XEXP (memref, 0) = addr; | |
2056 | return memref; | |
2057 | } | |
2058 | ||
9ce37fa7 | 2059 | new_rtx = gen_rtx_MEM (mode, addr); |
2060 | MEM_COPY_ATTRIBUTES (new_rtx, memref); | |
2061 | return new_rtx; | |
15bbde2b | 2062 | } |
537ffcfc | 2063 | |
96216d37 | 2064 | /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what |
2065 | way we are changing MEMREF, so we only preserve the alias set. */ | |
e513d163 | 2066 | |
2067 | rtx | |
35cb5232 | 2068 | change_address (rtx memref, enum machine_mode mode, rtx addr) |
e513d163 | 2069 | { |
5cc04e45 | 2070 | rtx new_rtx = change_address_1 (memref, mode, addr, 1, false); |
9ce37fa7 | 2071 | enum machine_mode mmode = GET_MODE (new_rtx); |
d72886b5 | 2072 | struct mem_attrs attrs, *defattrs; |
0ab96142 | 2073 | |
d72886b5 | 2074 | attrs = *get_mem_attrs (memref); |
2075 | defattrs = mode_mem_attrs[(int) mmode]; | |
6d58bcba | 2076 | attrs.expr = NULL_TREE; |
2077 | attrs.offset_known_p = false; | |
2078 | attrs.size_known_p = defattrs->size_known_p; | |
d72886b5 | 2079 | attrs.size = defattrs->size; |
2080 | attrs.align = defattrs->align; | |
6cc60c4d | 2081 | |
d28edf0d | 2082 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2083 | if (new_rtx == memref) |
0ab96142 | 2084 | { |
d72886b5 | 2085 | if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs)) |
9ce37fa7 | 2086 | return new_rtx; |
0ab96142 | 2087 | |
9ce37fa7 | 2088 | new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0)); |
2089 | MEM_COPY_ATTRIBUTES (new_rtx, memref); | |
0ab96142 | 2090 | } |
d28edf0d | 2091 | |
d72886b5 | 2092 | set_mem_attrs (new_rtx, &attrs); |
9ce37fa7 | 2093 | return new_rtx; |
e513d163 | 2094 | } |
537ffcfc | 2095 | |
96216d37 | 2096 | /* Return a memory reference like MEMREF, but with its mode changed |
2097 | to MODE and its address offset by OFFSET bytes. If VALIDATE is | |
bf42c62d | 2098 | nonzero, the memory address is forced to be valid. |
2d0fd66d | 2099 | If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS |
2100 | and the caller is responsible for adjusting MEMREF base register. | |
2101 | If ADJUST_OBJECT is zero, the underlying object associated with the | |
2102 | memory reference is left unchanged and the caller is responsible for | |
2103 | dealing with it. Otherwise, if the new memory reference is outside | |
226c6baf | 2104 | the underlying object, even partially, then the object is dropped. |
2105 | SIZE, if nonzero, is the size of an access in cases where MODE | |
2106 | has no inherent size. */ | |
e4e86ec5 | 2107 | |
2108 | rtx | |
35cb5232 | 2109 | adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset, |
226c6baf | 2110 | int validate, int adjust_address, int adjust_object, |
2111 | HOST_WIDE_INT size) | |
e4e86ec5 | 2112 | { |
fb257ae6 | 2113 | rtx addr = XEXP (memref, 0); |
9ce37fa7 | 2114 | rtx new_rtx; |
d72886b5 | 2115 | enum machine_mode address_mode; |
cfb75cdf | 2116 | int pbits; |
21b8bc7e | 2117 | struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs; |
d72886b5 | 2118 | unsigned HOST_WIDE_INT max_align; |
21b8bc7e | 2119 | #ifdef POINTERS_EXTEND_UNSIGNED |
2120 | enum machine_mode pointer_mode | |
2121 | = targetm.addr_space.pointer_mode (attrs.addrspace); | |
2122 | #endif | |
fb257ae6 | 2123 | |
4733f549 | 2124 | /* VOIDmode means no mode change for change_address_1. */ |
2125 | if (mode == VOIDmode) | |
2126 | mode = GET_MODE (memref); | |
2127 | ||
226c6baf | 2128 | /* Take the size of non-BLKmode accesses from the mode. */ |
2129 | defattrs = mode_mem_attrs[(int) mode]; | |
2130 | if (defattrs->size_known_p) | |
2131 | size = defattrs->size; | |
2132 | ||
d28edf0d | 2133 | /* If there are no changes, just return the original memory reference. */ |
2134 | if (mode == GET_MODE (memref) && !offset | |
226c6baf | 2135 | && (size == 0 || (attrs.size_known_p && attrs.size == size)) |
d72886b5 | 2136 | && (!validate || memory_address_addr_space_p (mode, addr, |
2137 | attrs.addrspace))) | |
d28edf0d | 2138 | return memref; |
2139 | ||
e36c3d58 | 2140 | /* ??? Prefer to create garbage instead of creating shared rtl. |
6ef828f9 | 2141 | This may happen even if offset is nonzero -- consider |
e36c3d58 | 2142 | (plus (plus reg reg) const_int) -- so do this always. */ |
2143 | addr = copy_rtx (addr); | |
2144 | ||
cfb75cdf | 2145 | /* Convert a possibly large offset to a signed value within the |
2146 | range of the target address space. */ | |
87cf5753 | 2147 | address_mode = get_address_mode (memref); |
98155838 | 2148 | pbits = GET_MODE_BITSIZE (address_mode); |
cfb75cdf | 2149 | if (HOST_BITS_PER_WIDE_INT > pbits) |
2150 | { | |
2151 | int shift = HOST_BITS_PER_WIDE_INT - pbits; | |
2152 | offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift)) | |
2153 | >> shift); | |
2154 | } | |
2155 | ||
2d0fd66d | 2156 | if (adjust_address) |
cd358719 | 2157 | { |
2158 | /* If MEMREF is a LO_SUM and the offset is within the alignment of the | |
2159 | object, we can merge it into the LO_SUM. */ | |
2160 | if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM | |
2161 | && offset >= 0 | |
2162 | && (unsigned HOST_WIDE_INT) offset | |
2163 | < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT) | |
98155838 | 2164 | addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0), |
29c05e22 | 2165 | plus_constant (address_mode, |
2166 | XEXP (addr, 1), offset)); | |
21b8bc7e | 2167 | #ifdef POINTERS_EXTEND_UNSIGNED |
2168 | /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid | |
2169 | in that mode, we merge it into the ZERO_EXTEND. We take advantage of | |
2170 | the fact that pointers are not allowed to overflow. */ | |
2171 | else if (POINTERS_EXTEND_UNSIGNED > 0 | |
2172 | && GET_CODE (addr) == ZERO_EXTEND | |
2173 | && GET_MODE (XEXP (addr, 0)) == pointer_mode | |
2174 | && trunc_int_for_mode (offset, pointer_mode) == offset) | |
2175 | addr = gen_rtx_ZERO_EXTEND (address_mode, | |
2176 | plus_constant (pointer_mode, | |
2177 | XEXP (addr, 0), offset)); | |
2178 | #endif | |
cd358719 | 2179 | else |
29c05e22 | 2180 | addr = plus_constant (address_mode, addr, offset); |
cd358719 | 2181 | } |
fb257ae6 | 2182 | |
5cc04e45 | 2183 | new_rtx = change_address_1 (memref, mode, addr, validate, false); |
96216d37 | 2184 | |
e077413c | 2185 | /* If the address is a REG, change_address_1 rightfully returns memref, |
2186 | but this would destroy memref's MEM_ATTRS. */ | |
2187 | if (new_rtx == memref && offset != 0) | |
2188 | new_rtx = copy_rtx (new_rtx); | |
2189 | ||
2d0fd66d | 2190 | /* Conservatively drop the object if we don't know where we start from. */ |
2191 | if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p)) | |
2192 | { | |
2193 | attrs.expr = NULL_TREE; | |
2194 | attrs.alias = 0; | |
2195 | } | |
2196 | ||
96216d37 | 2197 | /* Compute the new values of the memory attributes due to this adjustment. |
2198 | We add the offsets and update the alignment. */ | |
6d58bcba | 2199 | if (attrs.offset_known_p) |
2d0fd66d | 2200 | { |
2201 | attrs.offset += offset; | |
2202 | ||
2203 | /* Drop the object if the new left end is not within its bounds. */ | |
2204 | if (adjust_object && attrs.offset < 0) | |
2205 | { | |
2206 | attrs.expr = NULL_TREE; | |
2207 | attrs.alias = 0; | |
2208 | } | |
2209 | } | |
96216d37 | 2210 | |
b8098e5b | 2211 | /* Compute the new alignment by taking the MIN of the alignment and the |
2212 | lowest-order set bit in OFFSET, but don't change the alignment if OFFSET | |
2213 | if zero. */ | |
2214 | if (offset != 0) | |
d72886b5 | 2215 | { |
2216 | max_align = (offset & -offset) * BITS_PER_UNIT; | |
2217 | attrs.align = MIN (attrs.align, max_align); | |
2218 | } | |
96216d37 | 2219 | |
226c6baf | 2220 | if (size) |
6d58bcba | 2221 | { |
2d0fd66d | 2222 | /* Drop the object if the new right end is not within its bounds. */ |
226c6baf | 2223 | if (adjust_object && (offset + size) > attrs.size) |
2d0fd66d | 2224 | { |
2225 | attrs.expr = NULL_TREE; | |
2226 | attrs.alias = 0; | |
2227 | } | |
6d58bcba | 2228 | attrs.size_known_p = true; |
226c6baf | 2229 | attrs.size = size; |
6d58bcba | 2230 | } |
2231 | else if (attrs.size_known_p) | |
2d0fd66d | 2232 | { |
226c6baf | 2233 | gcc_assert (!adjust_object); |
2d0fd66d | 2234 | attrs.size -= offset; |
226c6baf | 2235 | /* ??? The store_by_pieces machinery generates negative sizes, |
2236 | so don't assert for that here. */ | |
2d0fd66d | 2237 | } |
5cc193e7 | 2238 | |
d72886b5 | 2239 | set_mem_attrs (new_rtx, &attrs); |
96216d37 | 2240 | |
9ce37fa7 | 2241 | return new_rtx; |
e4e86ec5 | 2242 | } |
2243 | ||
bf42c62d | 2244 | /* Return a memory reference like MEMREF, but with its mode changed |
2245 | to MODE and its address changed to ADDR, which is assumed to be | |
f0b5f617 | 2246 | MEMREF offset by OFFSET bytes. If VALIDATE is |
bf42c62d | 2247 | nonzero, the memory address is forced to be valid. */ |
2248 | ||
2249 | rtx | |
35cb5232 | 2250 | adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr, |
2251 | HOST_WIDE_INT offset, int validate) | |
bf42c62d | 2252 | { |
5cc04e45 | 2253 | memref = change_address_1 (memref, VOIDmode, addr, validate, false); |
226c6baf | 2254 | return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0); |
bf42c62d | 2255 | } |
2256 | ||
2a631e19 | 2257 | /* Return a memory reference like MEMREF, but whose address is changed by |
2258 | adding OFFSET, an RTX, to it. POW2 is the highest power of two factor | |
2259 | known to be in OFFSET (possibly 1). */ | |
fcdc122e | 2260 | |
2261 | rtx | |
35cb5232 | 2262 | offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) |
fcdc122e | 2263 | { |
9ce37fa7 | 2264 | rtx new_rtx, addr = XEXP (memref, 0); |
d72886b5 | 2265 | enum machine_mode address_mode; |
6d58bcba | 2266 | struct mem_attrs attrs, *defattrs; |
fac6aae6 | 2267 | |
d72886b5 | 2268 | attrs = *get_mem_attrs (memref); |
87cf5753 | 2269 | address_mode = get_address_mode (memref); |
98155838 | 2270 | new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
fac6aae6 | 2271 | |
d4c5e26d | 2272 | /* At this point we don't know _why_ the address is invalid. It |
917bbcab | 2273 | could have secondary memory references, multiplies or anything. |
fac6aae6 | 2274 | |
2275 | However, if we did go and rearrange things, we can wind up not | |
2276 | being able to recognize the magic around pic_offset_table_rtx. | |
2277 | This stuff is fragile, and is yet another example of why it is | |
2278 | bad to expose PIC machinery too early. */ | |
d72886b5 | 2279 | if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, |
2280 | attrs.addrspace) | |
fac6aae6 | 2281 | && GET_CODE (addr) == PLUS |
2282 | && XEXP (addr, 0) == pic_offset_table_rtx) | |
2283 | { | |
2284 | addr = force_reg (GET_MODE (addr), addr); | |
98155838 | 2285 | new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
fac6aae6 | 2286 | } |
2287 | ||
9ce37fa7 | 2288 | update_temp_slot_address (XEXP (memref, 0), new_rtx); |
5cc04e45 | 2289 | new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false); |
fcdc122e | 2290 | |
d28edf0d | 2291 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2292 | if (new_rtx == memref) |
2293 | return new_rtx; | |
d28edf0d | 2294 | |
fcdc122e | 2295 | /* Update the alignment to reflect the offset. Reset the offset, which |
2296 | we don't know. */ | |
6d58bcba | 2297 | defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)]; |
2298 | attrs.offset_known_p = false; | |
2299 | attrs.size_known_p = defattrs->size_known_p; | |
2300 | attrs.size = defattrs->size; | |
d72886b5 | 2301 | attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT); |
2302 | set_mem_attrs (new_rtx, &attrs); | |
9ce37fa7 | 2303 | return new_rtx; |
fcdc122e | 2304 | } |
d4c5e26d | 2305 | |
537ffcfc | 2306 | /* Return a memory reference like MEMREF, but with its address changed to |
2307 | ADDR. The caller is asserting that the actual piece of memory pointed | |
2308 | to is the same, just the form of the address is being changed, such as | |
5cc04e45 | 2309 | by putting something into a register. INPLACE is true if any changes |
2310 | can be made directly to MEMREF or false if MEMREF must be treated as | |
2311 | immutable. */ | |
537ffcfc | 2312 | |
2313 | rtx | |
5cc04e45 | 2314 | replace_equiv_address (rtx memref, rtx addr, bool inplace) |
537ffcfc | 2315 | { |
96216d37 | 2316 | /* change_address_1 copies the memory attribute structure without change |
2317 | and that's exactly what we want here. */ | |
ecfe4ca9 | 2318 | update_temp_slot_address (XEXP (memref, 0), addr); |
5cc04e45 | 2319 | return change_address_1 (memref, VOIDmode, addr, 1, inplace); |
537ffcfc | 2320 | } |
96216d37 | 2321 | |
e4e86ec5 | 2322 | /* Likewise, but the reference is not required to be valid. */ |
2323 | ||
2324 | rtx | |
5cc04e45 | 2325 | replace_equiv_address_nv (rtx memref, rtx addr, bool inplace) |
e4e86ec5 | 2326 | { |
5cc04e45 | 2327 | return change_address_1 (memref, VOIDmode, addr, 0, inplace); |
e4e86ec5 | 2328 | } |
8259ab07 | 2329 | |
2330 | /* Return a memory reference like MEMREF, but with its mode widened to | |
2331 | MODE and offset by OFFSET. This would be used by targets that e.g. | |
2332 | cannot issue QImode memory operations and have to use SImode memory | |
2333 | operations plus masking logic. */ | |
2334 | ||
2335 | rtx | |
35cb5232 | 2336 | widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset) |
8259ab07 | 2337 | { |
226c6baf | 2338 | rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0); |
d72886b5 | 2339 | struct mem_attrs attrs; |
8259ab07 | 2340 | unsigned int size = GET_MODE_SIZE (mode); |
2341 | ||
d28edf0d | 2342 | /* If there are no changes, just return the original memory reference. */ |
9ce37fa7 | 2343 | if (new_rtx == memref) |
2344 | return new_rtx; | |
d28edf0d | 2345 | |
d72886b5 | 2346 | attrs = *get_mem_attrs (new_rtx); |
2347 | ||
8259ab07 | 2348 | /* If we don't know what offset we were at within the expression, then |
2349 | we can't know if we've overstepped the bounds. */ | |
6d58bcba | 2350 | if (! attrs.offset_known_p) |
d72886b5 | 2351 | attrs.expr = NULL_TREE; |
8259ab07 | 2352 | |
d72886b5 | 2353 | while (attrs.expr) |
8259ab07 | 2354 | { |
d72886b5 | 2355 | if (TREE_CODE (attrs.expr) == COMPONENT_REF) |
8259ab07 | 2356 | { |
d72886b5 | 2357 | tree field = TREE_OPERAND (attrs.expr, 1); |
2358 | tree offset = component_ref_field_offset (attrs.expr); | |
8259ab07 | 2359 | |
2360 | if (! DECL_SIZE_UNIT (field)) | |
2361 | { | |
d72886b5 | 2362 | attrs.expr = NULL_TREE; |
8259ab07 | 2363 | break; |
2364 | } | |
2365 | ||
2366 | /* Is the field at least as large as the access? If so, ok, | |
2367 | otherwise strip back to the containing structure. */ | |
8359cfb4 | 2368 | if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST |
2369 | && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0 | |
6d58bcba | 2370 | && attrs.offset >= 0) |
8259ab07 | 2371 | break; |
2372 | ||
e913b5cd | 2373 | if (! tree_fits_uhwi_p (offset)) |
8259ab07 | 2374 | { |
d72886b5 | 2375 | attrs.expr = NULL_TREE; |
8259ab07 | 2376 | break; |
2377 | } | |
2378 | ||
d72886b5 | 2379 | attrs.expr = TREE_OPERAND (attrs.expr, 0); |
e913b5cd | 2380 | attrs.offset += tree_to_uhwi (offset); |
2381 | attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) | |
6d58bcba | 2382 | / BITS_PER_UNIT); |
8259ab07 | 2383 | } |
2384 | /* Similarly for the decl. */ | |
d72886b5 | 2385 | else if (DECL_P (attrs.expr) |
2386 | && DECL_SIZE_UNIT (attrs.expr) | |
2387 | && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST | |
2388 | && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0 | |
6d58bcba | 2389 | && (! attrs.offset_known_p || attrs.offset >= 0)) |
8259ab07 | 2390 | break; |
2391 | else | |
2392 | { | |
2393 | /* The widened memory access overflows the expression, which means | |
2394 | that it could alias another expression. Zap it. */ | |
d72886b5 | 2395 | attrs.expr = NULL_TREE; |
8259ab07 | 2396 | break; |
2397 | } | |
2398 | } | |
2399 | ||
d72886b5 | 2400 | if (! attrs.expr) |
6d58bcba | 2401 | attrs.offset_known_p = false; |
8259ab07 | 2402 | |
2403 | /* The widened memory may alias other stuff, so zap the alias set. */ | |
2404 | /* ??? Maybe use get_alias_set on any remaining expression. */ | |
d72886b5 | 2405 | attrs.alias = 0; |
6d58bcba | 2406 | attrs.size_known_p = true; |
2407 | attrs.size = size; | |
d72886b5 | 2408 | set_mem_attrs (new_rtx, &attrs); |
9ce37fa7 | 2409 | return new_rtx; |
8259ab07 | 2410 | } |
15bbde2b | 2411 | \f |
ac681e84 | 2412 | /* A fake decl that is used as the MEM_EXPR of spill slots. */ |
2413 | static GTY(()) tree spill_slot_decl; | |
2414 | ||
58029e61 | 2415 | tree |
2416 | get_spill_slot_decl (bool force_build_p) | |
ac681e84 | 2417 | { |
2418 | tree d = spill_slot_decl; | |
2419 | rtx rd; | |
d72886b5 | 2420 | struct mem_attrs attrs; |
ac681e84 | 2421 | |
58029e61 | 2422 | if (d || !force_build_p) |
ac681e84 | 2423 | return d; |
2424 | ||
e60a6f7b | 2425 | d = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
2426 | VAR_DECL, get_identifier ("%sfp"), void_type_node); | |
ac681e84 | 2427 | DECL_ARTIFICIAL (d) = 1; |
2428 | DECL_IGNORED_P (d) = 1; | |
2429 | TREE_USED (d) = 1; | |
ac681e84 | 2430 | spill_slot_decl = d; |
2431 | ||
2432 | rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); | |
2433 | MEM_NOTRAP_P (rd) = 1; | |
d72886b5 | 2434 | attrs = *mode_mem_attrs[(int) BLKmode]; |
2435 | attrs.alias = new_alias_set (); | |
2436 | attrs.expr = d; | |
2437 | set_mem_attrs (rd, &attrs); | |
ac681e84 | 2438 | SET_DECL_RTL (d, rd); |
2439 | ||
2440 | return d; | |
2441 | } | |
2442 | ||
2443 | /* Given MEM, a result from assign_stack_local, fill in the memory | |
2444 | attributes as appropriate for a register allocator spill slot. | |
2445 | These slots are not aliasable by other memory. We arrange for | |
2446 | them all to use a single MEM_EXPR, so that the aliasing code can | |
2447 | work properly in the case of shared spill slots. */ | |
2448 | ||
2449 | void | |
2450 | set_mem_attrs_for_spill (rtx mem) | |
2451 | { | |
d72886b5 | 2452 | struct mem_attrs attrs; |
2453 | rtx addr; | |
ac681e84 | 2454 | |
d72886b5 | 2455 | attrs = *get_mem_attrs (mem); |
2456 | attrs.expr = get_spill_slot_decl (true); | |
2457 | attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr)); | |
2458 | attrs.addrspace = ADDR_SPACE_GENERIC; | |
ac681e84 | 2459 | |
2460 | /* We expect the incoming memory to be of the form: | |
2461 | (mem:MODE (plus (reg sfp) (const_int offset))) | |
2462 | with perhaps the plus missing for offset = 0. */ | |
2463 | addr = XEXP (mem, 0); | |
6d58bcba | 2464 | attrs.offset_known_p = true; |
2465 | attrs.offset = 0; | |
ac681e84 | 2466 | if (GET_CODE (addr) == PLUS |
971ba038 | 2467 | && CONST_INT_P (XEXP (addr, 1))) |
6d58bcba | 2468 | attrs.offset = INTVAL (XEXP (addr, 1)); |
ac681e84 | 2469 | |
d72886b5 | 2470 | set_mem_attrs (mem, &attrs); |
ac681e84 | 2471 | MEM_NOTRAP_P (mem) = 1; |
2472 | } | |
2473 | \f | |
15bbde2b | 2474 | /* Return a newly created CODE_LABEL rtx with a unique label number. */ |
2475 | ||
be95c7c7 | 2476 | rtx_code_label * |
35cb5232 | 2477 | gen_label_rtx (void) |
15bbde2b | 2478 | { |
be95c7c7 | 2479 | return as_a <rtx_code_label *> ( |
2480 | gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX, | |
2481 | NULL, label_num++, NULL)); | |
15bbde2b | 2482 | } |
2483 | \f | |
2484 | /* For procedure integration. */ | |
2485 | ||
15bbde2b | 2486 | /* Install new pointers to the first and last insns in the chain. |
d4c332ff | 2487 | Also, set cur_insn_uid to one higher than the last in use. |
15bbde2b | 2488 | Used for an inline-procedure after copying the insn chain. */ |
2489 | ||
2490 | void | |
35cb5232 | 2491 | set_new_first_and_last_insn (rtx first, rtx last) |
15bbde2b | 2492 | { |
d4c332ff | 2493 | rtx insn; |
2494 | ||
06f9d6ef | 2495 | set_first_insn (first); |
2496 | set_last_insn (last); | |
d4c332ff | 2497 | cur_insn_uid = 0; |
2498 | ||
9845d120 | 2499 | if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS) |
2500 | { | |
2501 | int debug_count = 0; | |
2502 | ||
2503 | cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1; | |
2504 | cur_debug_insn_uid = 0; | |
2505 | ||
2506 | for (insn = first; insn; insn = NEXT_INSN (insn)) | |
2507 | if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID) | |
2508 | cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn)); | |
2509 | else | |
2510 | { | |
2511 | cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); | |
2512 | if (DEBUG_INSN_P (insn)) | |
2513 | debug_count++; | |
2514 | } | |
2515 | ||
2516 | if (debug_count) | |
2517 | cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count; | |
2518 | else | |
2519 | cur_debug_insn_uid++; | |
2520 | } | |
2521 | else | |
2522 | for (insn = first; insn; insn = NEXT_INSN (insn)) | |
2523 | cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn)); | |
d4c332ff | 2524 | |
2525 | cur_insn_uid++; | |
15bbde2b | 2526 | } |
15bbde2b | 2527 | \f |
d823ba47 | 2528 | /* Go through all the RTL insn bodies and copy any invalid shared |
2d96a59a | 2529 | structure. This routine should only be called once. */ |
15bbde2b | 2530 | |
a40c0eeb | 2531 | static void |
58945f46 | 2532 | unshare_all_rtl_1 (rtx_insn *insn) |
15bbde2b | 2533 | { |
2d96a59a | 2534 | /* Unshare just about everything else. */ |
1cd4cfea | 2535 | unshare_all_rtl_in_chain (insn); |
d823ba47 | 2536 | |
15bbde2b | 2537 | /* Make sure the addresses of stack slots found outside the insn chain |
2538 | (such as, in DECL_RTL of a variable) are not shared | |
2539 | with the insn chain. | |
2540 | ||
2541 | This special care is necessary when the stack slot MEM does not | |
2542 | actually appear in the insn chain. If it does appear, its address | |
2543 | is unshared from all else at that point. */ | |
45733446 | 2544 | stack_slot_list = copy_rtx_if_shared (stack_slot_list); |
15bbde2b | 2545 | } |
2546 | ||
d823ba47 | 2547 | /* Go through all the RTL insn bodies and copy any invalid shared |
2d96a59a | 2548 | structure, again. This is a fairly expensive thing to do so it |
2549 | should be done sparingly. */ | |
2550 | ||
2551 | void | |
58945f46 | 2552 | unshare_all_rtl_again (rtx_insn *insn) |
2d96a59a | 2553 | { |
58945f46 | 2554 | rtx_insn *p; |
5244079b | 2555 | tree decl; |
2556 | ||
2d96a59a | 2557 | for (p = insn; p; p = NEXT_INSN (p)) |
9204e736 | 2558 | if (INSN_P (p)) |
2d96a59a | 2559 | { |
2560 | reset_used_flags (PATTERN (p)); | |
2561 | reset_used_flags (REG_NOTES (p)); | |
6d2a4bac | 2562 | if (CALL_P (p)) |
2563 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (p)); | |
2d96a59a | 2564 | } |
5244079b | 2565 | |
01dc9f0c | 2566 | /* Make sure that virtual stack slots are not shared. */ |
265be050 | 2567 | set_used_decls (DECL_INITIAL (cfun->decl)); |
01dc9f0c | 2568 | |
5244079b | 2569 | /* Make sure that virtual parameters are not shared. */ |
1767a056 | 2570 | for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl)) |
265be050 | 2571 | set_used_flags (DECL_RTL (decl)); |
5244079b | 2572 | |
2573 | reset_used_flags (stack_slot_list); | |
2574 | ||
df329266 | 2575 | unshare_all_rtl_1 (insn); |
a40c0eeb | 2576 | } |
2577 | ||
2a1990e9 | 2578 | unsigned int |
a40c0eeb | 2579 | unshare_all_rtl (void) |
2580 | { | |
df329266 | 2581 | unshare_all_rtl_1 (get_insns ()); |
2a1990e9 | 2582 | return 0; |
2d96a59a | 2583 | } |
2584 | ||
77fce4cd | 2585 | |
1cd4cfea | 2586 | /* Check that ORIG is not marked when it should not be and mark ORIG as in use, |
2587 | Recursively does the same for subexpressions. */ | |
2588 | ||
2589 | static void | |
2590 | verify_rtx_sharing (rtx orig, rtx insn) | |
2591 | { | |
2592 | rtx x = orig; | |
2593 | int i; | |
2594 | enum rtx_code code; | |
2595 | const char *format_ptr; | |
2596 | ||
2597 | if (x == 0) | |
2598 | return; | |
2599 | ||
2600 | code = GET_CODE (x); | |
2601 | ||
2602 | /* These types may be freely shared. */ | |
2603 | ||
2604 | switch (code) | |
2605 | { | |
2606 | case REG: | |
688ff29b | 2607 | case DEBUG_EXPR: |
2608 | case VALUE: | |
0349edce | 2609 | CASE_CONST_ANY: |
1cd4cfea | 2610 | case SYMBOL_REF: |
2611 | case LABEL_REF: | |
2612 | case CODE_LABEL: | |
2613 | case PC: | |
2614 | case CC0: | |
1a860023 | 2615 | case RETURN: |
9cb2517e | 2616 | case SIMPLE_RETURN: |
1cd4cfea | 2617 | case SCRATCH: |
c09425a0 | 2618 | /* SCRATCH must be shared because they represent distinct values. */ |
b291008a | 2619 | return; |
c09425a0 | 2620 | case CLOBBER: |
b291008a | 2621 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
2622 | clobbers or clobbers of hard registers that originated as pseudos. | |
2623 | This is needed to allow safe register renaming. */ | |
2624 | if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER | |
2625 | && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0))) | |
c09425a0 | 2626 | return; |
2627 | break; | |
1cd4cfea | 2628 | |
2629 | case CONST: | |
3072d30e | 2630 | if (shared_const_p (orig)) |
1cd4cfea | 2631 | return; |
2632 | break; | |
2633 | ||
2634 | case MEM: | |
2635 | /* A MEM is allowed to be shared if its address is constant. */ | |
2636 | if (CONSTANT_ADDRESS_P (XEXP (x, 0)) | |
2637 | || reload_completed || reload_in_progress) | |
2638 | return; | |
2639 | ||
2640 | break; | |
2641 | ||
2642 | default: | |
2643 | break; | |
2644 | } | |
2645 | ||
2646 | /* This rtx may not be shared. If it has already been seen, | |
2647 | replace it with a copy of itself. */ | |
9cee7c3f | 2648 | #ifdef ENABLE_CHECKING |
1cd4cfea | 2649 | if (RTX_FLAG (x, used)) |
2650 | { | |
0a81f5a0 | 2651 | error ("invalid rtl sharing found in the insn"); |
1cd4cfea | 2652 | debug_rtx (insn); |
0a81f5a0 | 2653 | error ("shared rtx"); |
1cd4cfea | 2654 | debug_rtx (x); |
0a81f5a0 | 2655 | internal_error ("internal consistency failure"); |
1cd4cfea | 2656 | } |
9cee7c3f | 2657 | #endif |
2658 | gcc_assert (!RTX_FLAG (x, used)); | |
48e1416a | 2659 | |
1cd4cfea | 2660 | RTX_FLAG (x, used) = 1; |
2661 | ||
8b332087 | 2662 | /* Now scan the subexpressions recursively. */ |
1cd4cfea | 2663 | |
2664 | format_ptr = GET_RTX_FORMAT (code); | |
2665 | ||
2666 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
2667 | { | |
2668 | switch (*format_ptr++) | |
2669 | { | |
2670 | case 'e': | |
2671 | verify_rtx_sharing (XEXP (x, i), insn); | |
2672 | break; | |
2673 | ||
2674 | case 'E': | |
2675 | if (XVEC (x, i) != NULL) | |
2676 | { | |
2677 | int j; | |
2678 | int len = XVECLEN (x, i); | |
2679 | ||
2680 | for (j = 0; j < len; j++) | |
2681 | { | |
9cee7c3f | 2682 | /* We allow sharing of ASM_OPERANDS inside single |
2683 | instruction. */ | |
1cd4cfea | 2684 | if (j && GET_CODE (XVECEXP (x, i, j)) == SET |
9cee7c3f | 2685 | && (GET_CODE (SET_SRC (XVECEXP (x, i, j))) |
2686 | == ASM_OPERANDS)) | |
1cd4cfea | 2687 | verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn); |
2688 | else | |
2689 | verify_rtx_sharing (XVECEXP (x, i, j), insn); | |
2690 | } | |
2691 | } | |
2692 | break; | |
2693 | } | |
2694 | } | |
2695 | return; | |
2696 | } | |
2697 | ||
1e9af25c | 2698 | /* Reset used-flags for INSN. */ |
2699 | ||
2700 | static void | |
2701 | reset_insn_used_flags (rtx insn) | |
2702 | { | |
2703 | gcc_assert (INSN_P (insn)); | |
2704 | reset_used_flags (PATTERN (insn)); | |
2705 | reset_used_flags (REG_NOTES (insn)); | |
2706 | if (CALL_P (insn)) | |
2707 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn)); | |
2708 | } | |
2709 | ||
7cdd84a2 | 2710 | /* Go through all the RTL insn bodies and clear all the USED bits. */ |
1cd4cfea | 2711 | |
7cdd84a2 | 2712 | static void |
2713 | reset_all_used_flags (void) | |
1cd4cfea | 2714 | { |
2715 | rtx p; | |
2716 | ||
2717 | for (p = get_insns (); p; p = NEXT_INSN (p)) | |
2718 | if (INSN_P (p)) | |
2719 | { | |
1e9af25c | 2720 | rtx pat = PATTERN (p); |
2721 | if (GET_CODE (pat) != SEQUENCE) | |
2722 | reset_insn_used_flags (p); | |
2723 | else | |
764f640f | 2724 | { |
1e9af25c | 2725 | gcc_assert (REG_NOTES (p) == NULL); |
2726 | for (int i = 0; i < XVECLEN (pat, 0); i++) | |
11c8949c | 2727 | { |
2728 | rtx insn = XVECEXP (pat, 0, i); | |
2729 | if (INSN_P (insn)) | |
2730 | reset_insn_used_flags (insn); | |
2731 | } | |
764f640f | 2732 | } |
1cd4cfea | 2733 | } |
7cdd84a2 | 2734 | } |
2735 | ||
1e9af25c | 2736 | /* Verify sharing in INSN. */ |
2737 | ||
2738 | static void | |
2739 | verify_insn_sharing (rtx insn) | |
2740 | { | |
2741 | gcc_assert (INSN_P (insn)); | |
2742 | reset_used_flags (PATTERN (insn)); | |
2743 | reset_used_flags (REG_NOTES (insn)); | |
2744 | if (CALL_P (insn)) | |
2745 | reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn)); | |
2746 | } | |
2747 | ||
7cdd84a2 | 2748 | /* Go through all the RTL insn bodies and check that there is no unexpected |
2749 | sharing in between the subexpressions. */ | |
2750 | ||
2751 | DEBUG_FUNCTION void | |
2752 | verify_rtl_sharing (void) | |
2753 | { | |
2754 | rtx p; | |
2755 | ||
2756 | timevar_push (TV_VERIFY_RTL_SHARING); | |
2757 | ||
2758 | reset_all_used_flags (); | |
1cd4cfea | 2759 | |
2760 | for (p = get_insns (); p; p = NEXT_INSN (p)) | |
2761 | if (INSN_P (p)) | |
2762 | { | |
1e9af25c | 2763 | rtx pat = PATTERN (p); |
2764 | if (GET_CODE (pat) != SEQUENCE) | |
2765 | verify_insn_sharing (p); | |
2766 | else | |
2767 | for (int i = 0; i < XVECLEN (pat, 0); i++) | |
11c8949c | 2768 | { |
2769 | rtx insn = XVECEXP (pat, 0, i); | |
2770 | if (INSN_P (insn)) | |
2771 | verify_insn_sharing (insn); | |
2772 | } | |
1cd4cfea | 2773 | } |
4b366dd3 | 2774 | |
7cdd84a2 | 2775 | reset_all_used_flags (); |
2776 | ||
4b366dd3 | 2777 | timevar_pop (TV_VERIFY_RTL_SHARING); |
1cd4cfea | 2778 | } |
2779 | ||
2d96a59a | 2780 | /* Go through all the RTL insn bodies and copy any invalid shared structure. |
2781 | Assumes the mark bits are cleared at entry. */ | |
2782 | ||
1cd4cfea | 2783 | void |
2784 | unshare_all_rtl_in_chain (rtx insn) | |
2d96a59a | 2785 | { |
2786 | for (; insn; insn = NEXT_INSN (insn)) | |
9204e736 | 2787 | if (INSN_P (insn)) |
2d96a59a | 2788 | { |
2789 | PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn)); | |
2790 | REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn)); | |
6d2a4bac | 2791 | if (CALL_P (insn)) |
2792 | CALL_INSN_FUNCTION_USAGE (insn) | |
2793 | = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn)); | |
2d96a59a | 2794 | } |
2795 | } | |
2796 | ||
01dc9f0c | 2797 | /* Go through all virtual stack slots of a function and mark them as |
265be050 | 2798 | shared. We never replace the DECL_RTLs themselves with a copy, |
2799 | but expressions mentioned into a DECL_RTL cannot be shared with | |
2800 | expressions in the instruction stream. | |
2801 | ||
2802 | Note that reload may convert pseudo registers into memories in-place. | |
2803 | Pseudo registers are always shared, but MEMs never are. Thus if we | |
2804 | reset the used flags on MEMs in the instruction stream, we must set | |
2805 | them again on MEMs that appear in DECL_RTLs. */ | |
2806 | ||
01dc9f0c | 2807 | static void |
265be050 | 2808 | set_used_decls (tree blk) |
01dc9f0c | 2809 | { |
2810 | tree t; | |
2811 | ||
2812 | /* Mark decls. */ | |
1767a056 | 2813 | for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t)) |
0e8e37b2 | 2814 | if (DECL_RTL_SET_P (t)) |
265be050 | 2815 | set_used_flags (DECL_RTL (t)); |
01dc9f0c | 2816 | |
2817 | /* Now process sub-blocks. */ | |
93110716 | 2818 | for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t)) |
265be050 | 2819 | set_used_decls (t); |
01dc9f0c | 2820 | } |
2821 | ||
15bbde2b | 2822 | /* Mark ORIG as in use, and return a copy of it if it was already in use. |
7ba6ce7a | 2823 | Recursively does the same for subexpressions. Uses |
2824 | copy_rtx_if_shared_1 to reduce stack space. */ | |
15bbde2b | 2825 | |
2826 | rtx | |
35cb5232 | 2827 | copy_rtx_if_shared (rtx orig) |
15bbde2b | 2828 | { |
0e0727c4 | 2829 | copy_rtx_if_shared_1 (&orig); |
2830 | return orig; | |
2831 | } | |
2832 | ||
7ba6ce7a | 2833 | /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in |
2834 | use. Recursively does the same for subexpressions. */ | |
2835 | ||
0e0727c4 | 2836 | static void |
2837 | copy_rtx_if_shared_1 (rtx *orig1) | |
2838 | { | |
2839 | rtx x; | |
19cb6b50 | 2840 | int i; |
2841 | enum rtx_code code; | |
0e0727c4 | 2842 | rtx *last_ptr; |
19cb6b50 | 2843 | const char *format_ptr; |
15bbde2b | 2844 | int copied = 0; |
0e0727c4 | 2845 | int length; |
2846 | ||
2847 | /* Repeat is used to turn tail-recursion into iteration. */ | |
2848 | repeat: | |
2849 | x = *orig1; | |
15bbde2b | 2850 | |
2851 | if (x == 0) | |
0e0727c4 | 2852 | return; |
15bbde2b | 2853 | |
2854 | code = GET_CODE (x); | |
2855 | ||
2856 | /* These types may be freely shared. */ | |
2857 | ||
2858 | switch (code) | |
2859 | { | |
2860 | case REG: | |
688ff29b | 2861 | case DEBUG_EXPR: |
2862 | case VALUE: | |
0349edce | 2863 | CASE_CONST_ANY: |
15bbde2b | 2864 | case SYMBOL_REF: |
1cd4cfea | 2865 | case LABEL_REF: |
15bbde2b | 2866 | case CODE_LABEL: |
2867 | case PC: | |
2868 | case CC0: | |
e0691b9a | 2869 | case RETURN: |
9cb2517e | 2870 | case SIMPLE_RETURN: |
15bbde2b | 2871 | case SCRATCH: |
a92771b8 | 2872 | /* SCRATCH must be shared because they represent distinct values. */ |
0e0727c4 | 2873 | return; |
c09425a0 | 2874 | case CLOBBER: |
b291008a | 2875 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
2876 | clobbers or clobbers of hard registers that originated as pseudos. | |
2877 | This is needed to allow safe register renaming. */ | |
2878 | if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER | |
2879 | && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0))) | |
c09425a0 | 2880 | return; |
2881 | break; | |
15bbde2b | 2882 | |
f63d12e3 | 2883 | case CONST: |
3072d30e | 2884 | if (shared_const_p (x)) |
0e0727c4 | 2885 | return; |
f63d12e3 | 2886 | break; |
2887 | ||
9845d120 | 2888 | case DEBUG_INSN: |
15bbde2b | 2889 | case INSN: |
2890 | case JUMP_INSN: | |
2891 | case CALL_INSN: | |
2892 | case NOTE: | |
15bbde2b | 2893 | case BARRIER: |
2894 | /* The chain of insns is not being copied. */ | |
0e0727c4 | 2895 | return; |
15bbde2b | 2896 | |
0dbd1c74 | 2897 | default: |
2898 | break; | |
15bbde2b | 2899 | } |
2900 | ||
2901 | /* This rtx may not be shared. If it has already been seen, | |
2902 | replace it with a copy of itself. */ | |
2903 | ||
7c25cb91 | 2904 | if (RTX_FLAG (x, used)) |
15bbde2b | 2905 | { |
f2d0e9f1 | 2906 | x = shallow_copy_rtx (x); |
15bbde2b | 2907 | copied = 1; |
2908 | } | |
7c25cb91 | 2909 | RTX_FLAG (x, used) = 1; |
15bbde2b | 2910 | |
2911 | /* Now scan the subexpressions recursively. | |
2912 | We can store any replaced subexpressions directly into X | |
2913 | since we know X is not shared! Any vectors in X | |
2914 | must be copied if X was copied. */ | |
2915 | ||
2916 | format_ptr = GET_RTX_FORMAT (code); | |
0e0727c4 | 2917 | length = GET_RTX_LENGTH (code); |
2918 | last_ptr = NULL; | |
48e1416a | 2919 | |
0e0727c4 | 2920 | for (i = 0; i < length; i++) |
15bbde2b | 2921 | { |
2922 | switch (*format_ptr++) | |
2923 | { | |
2924 | case 'e': | |
0e0727c4 | 2925 | if (last_ptr) |
2926 | copy_rtx_if_shared_1 (last_ptr); | |
2927 | last_ptr = &XEXP (x, i); | |
15bbde2b | 2928 | break; |
2929 | ||
2930 | case 'E': | |
2931 | if (XVEC (x, i) != NULL) | |
2932 | { | |
19cb6b50 | 2933 | int j; |
ffe0869b | 2934 | int len = XVECLEN (x, i); |
48e1416a | 2935 | |
8b332087 | 2936 | /* Copy the vector iff I copied the rtx and the length |
2937 | is nonzero. */ | |
ffe0869b | 2938 | if (copied && len > 0) |
a4070a91 | 2939 | XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem); |
48e1416a | 2940 | |
d632b59a | 2941 | /* Call recursively on all inside the vector. */ |
ffe0869b | 2942 | for (j = 0; j < len; j++) |
0e0727c4 | 2943 | { |
2944 | if (last_ptr) | |
2945 | copy_rtx_if_shared_1 (last_ptr); | |
2946 | last_ptr = &XVECEXP (x, i, j); | |
2947 | } | |
15bbde2b | 2948 | } |
2949 | break; | |
2950 | } | |
2951 | } | |
0e0727c4 | 2952 | *orig1 = x; |
2953 | if (last_ptr) | |
2954 | { | |
2955 | orig1 = last_ptr; | |
2956 | goto repeat; | |
2957 | } | |
2958 | return; | |
15bbde2b | 2959 | } |
2960 | ||
709947e6 | 2961 | /* Set the USED bit in X and its non-shareable subparts to FLAG. */ |
15bbde2b | 2962 | |
709947e6 | 2963 | static void |
2964 | mark_used_flags (rtx x, int flag) | |
15bbde2b | 2965 | { |
19cb6b50 | 2966 | int i, j; |
2967 | enum rtx_code code; | |
2968 | const char *format_ptr; | |
0e0727c4 | 2969 | int length; |
15bbde2b | 2970 | |
0e0727c4 | 2971 | /* Repeat is used to turn tail-recursion into iteration. */ |
2972 | repeat: | |
15bbde2b | 2973 | if (x == 0) |
2974 | return; | |
2975 | ||
2976 | code = GET_CODE (x); | |
2977 | ||
c3418f42 | 2978 | /* These types may be freely shared so we needn't do any resetting |
15bbde2b | 2979 | for them. */ |
2980 | ||
2981 | switch (code) | |
2982 | { | |
2983 | case REG: | |
688ff29b | 2984 | case DEBUG_EXPR: |
2985 | case VALUE: | |
0349edce | 2986 | CASE_CONST_ANY: |
15bbde2b | 2987 | case SYMBOL_REF: |
2988 | case CODE_LABEL: | |
2989 | case PC: | |
2990 | case CC0: | |
e0691b9a | 2991 | case RETURN: |
9cb2517e | 2992 | case SIMPLE_RETURN: |
15bbde2b | 2993 | return; |
2994 | ||
9845d120 | 2995 | case DEBUG_INSN: |
15bbde2b | 2996 | case INSN: |
2997 | case JUMP_INSN: | |
2998 | case CALL_INSN: | |
2999 | case NOTE: | |
3000 | case LABEL_REF: | |
3001 | case BARRIER: | |
3002 | /* The chain of insns is not being copied. */ | |
3003 | return; | |
d823ba47 | 3004 | |
0dbd1c74 | 3005 | default: |
3006 | break; | |
15bbde2b | 3007 | } |
3008 | ||
709947e6 | 3009 | RTX_FLAG (x, used) = flag; |
15bbde2b | 3010 | |
3011 | format_ptr = GET_RTX_FORMAT (code); | |
0e0727c4 | 3012 | length = GET_RTX_LENGTH (code); |
48e1416a | 3013 | |
0e0727c4 | 3014 | for (i = 0; i < length; i++) |
15bbde2b | 3015 | { |
3016 | switch (*format_ptr++) | |
3017 | { | |
3018 | case 'e': | |
0e0727c4 | 3019 | if (i == length-1) |
3020 | { | |
3021 | x = XEXP (x, i); | |
3022 | goto repeat; | |
3023 | } | |
709947e6 | 3024 | mark_used_flags (XEXP (x, i), flag); |
15bbde2b | 3025 | break; |
3026 | ||
3027 | case 'E': | |
3028 | for (j = 0; j < XVECLEN (x, i); j++) | |
709947e6 | 3029 | mark_used_flags (XVECEXP (x, i, j), flag); |
15bbde2b | 3030 | break; |
3031 | } | |
3032 | } | |
3033 | } | |
1cd4cfea | 3034 | |
709947e6 | 3035 | /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used |
1cd4cfea | 3036 | to look for shared sub-parts. */ |
3037 | ||
3038 | void | |
709947e6 | 3039 | reset_used_flags (rtx x) |
1cd4cfea | 3040 | { |
709947e6 | 3041 | mark_used_flags (x, 0); |
3042 | } | |
1cd4cfea | 3043 | |
709947e6 | 3044 | /* Set all the USED bits in X to allow copy_rtx_if_shared to be used |
3045 | to look for shared sub-parts. */ | |
1cd4cfea | 3046 | |
709947e6 | 3047 | void |
3048 | set_used_flags (rtx x) | |
3049 | { | |
3050 | mark_used_flags (x, 1); | |
1cd4cfea | 3051 | } |
15bbde2b | 3052 | \f |
3053 | /* Copy X if necessary so that it won't be altered by changes in OTHER. | |
3054 | Return X or the rtx for the pseudo reg the value of X was copied into. | |
3055 | OTHER must be valid as a SET_DEST. */ | |
3056 | ||
3057 | rtx | |
35cb5232 | 3058 | make_safe_from (rtx x, rtx other) |
15bbde2b | 3059 | { |
3060 | while (1) | |
3061 | switch (GET_CODE (other)) | |
3062 | { | |
3063 | case SUBREG: | |
3064 | other = SUBREG_REG (other); | |
3065 | break; | |
3066 | case STRICT_LOW_PART: | |
3067 | case SIGN_EXTEND: | |
3068 | case ZERO_EXTEND: | |
3069 | other = XEXP (other, 0); | |
3070 | break; | |
3071 | default: | |
3072 | goto done; | |
3073 | } | |
3074 | done: | |
e16ceb8e | 3075 | if ((MEM_P (other) |
15bbde2b | 3076 | && ! CONSTANT_P (x) |
8ad4c111 | 3077 | && !REG_P (x) |
15bbde2b | 3078 | && GET_CODE (x) != SUBREG) |
8ad4c111 | 3079 | || (REG_P (other) |
15bbde2b | 3080 | && (REGNO (other) < FIRST_PSEUDO_REGISTER |
3081 | || reg_mentioned_p (other, x)))) | |
3082 | { | |
3083 | rtx temp = gen_reg_rtx (GET_MODE (x)); | |
3084 | emit_move_insn (temp, x); | |
3085 | return temp; | |
3086 | } | |
3087 | return x; | |
3088 | } | |
3089 | \f | |
3090 | /* Emission of insns (adding them to the doubly-linked list). */ | |
3091 | ||
15bbde2b | 3092 | /* Return the last insn emitted, even if it is in a sequence now pushed. */ |
3093 | ||
3094 | rtx | |
35cb5232 | 3095 | get_last_insn_anywhere (void) |
15bbde2b | 3096 | { |
3097 | struct sequence_stack *stack; | |
06f9d6ef | 3098 | if (get_last_insn ()) |
3099 | return get_last_insn (); | |
0a893c29 | 3100 | for (stack = seq_stack; stack; stack = stack->next) |
15bbde2b | 3101 | if (stack->last != 0) |
3102 | return stack->last; | |
3103 | return 0; | |
3104 | } | |
3105 | ||
70545de4 | 3106 | /* Return the first nonnote insn emitted in current sequence or current |
3107 | function. This routine looks inside SEQUENCEs. */ | |
3108 | ||
3109 | rtx | |
35cb5232 | 3110 | get_first_nonnote_insn (void) |
70545de4 | 3111 | { |
06f9d6ef | 3112 | rtx insn = get_insns (); |
f86e856e | 3113 | |
3114 | if (insn) | |
3115 | { | |
3116 | if (NOTE_P (insn)) | |
3117 | for (insn = next_insn (insn); | |
3118 | insn && NOTE_P (insn); | |
3119 | insn = next_insn (insn)) | |
3120 | continue; | |
3121 | else | |
3122 | { | |
1c14a50e | 3123 | if (NONJUMP_INSN_P (insn) |
f86e856e | 3124 | && GET_CODE (PATTERN (insn)) == SEQUENCE) |
3125 | insn = XVECEXP (PATTERN (insn), 0, 0); | |
3126 | } | |
3127 | } | |
70545de4 | 3128 | |
3129 | return insn; | |
3130 | } | |
3131 | ||
3132 | /* Return the last nonnote insn emitted in current sequence or current | |
3133 | function. This routine looks inside SEQUENCEs. */ | |
3134 | ||
3135 | rtx | |
35cb5232 | 3136 | get_last_nonnote_insn (void) |
70545de4 | 3137 | { |
06f9d6ef | 3138 | rtx insn = get_last_insn (); |
f86e856e | 3139 | |
3140 | if (insn) | |
3141 | { | |
3142 | if (NOTE_P (insn)) | |
3143 | for (insn = previous_insn (insn); | |
3144 | insn && NOTE_P (insn); | |
3145 | insn = previous_insn (insn)) | |
3146 | continue; | |
3147 | else | |
3148 | { | |
1c14a50e | 3149 | if (NONJUMP_INSN_P (insn) |
f86e856e | 3150 | && GET_CODE (PATTERN (insn)) == SEQUENCE) |
3151 | insn = XVECEXP (PATTERN (insn), 0, | |
3152 | XVECLEN (PATTERN (insn), 0) - 1); | |
3153 | } | |
3154 | } | |
70545de4 | 3155 | |
3156 | return insn; | |
3157 | } | |
3158 | ||
9845d120 | 3159 | /* Return the number of actual (non-debug) insns emitted in this |
3160 | function. */ | |
3161 | ||
3162 | int | |
3163 | get_max_insn_count (void) | |
3164 | { | |
3165 | int n = cur_insn_uid; | |
3166 | ||
3167 | /* The table size must be stable across -g, to avoid codegen | |
3168 | differences due to debug insns, and not be affected by | |
3169 | -fmin-insn-uid, to avoid excessive table size and to simplify | |
3170 | debugging of -fcompare-debug failures. */ | |
3171 | if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID) | |
3172 | n -= cur_debug_insn_uid; | |
3173 | else | |
3174 | n -= MIN_NONDEBUG_INSN_UID; | |
3175 | ||
3176 | return n; | |
3177 | } | |
3178 | ||
15bbde2b | 3179 | \f |
3180 | /* Return the next insn. If it is a SEQUENCE, return the first insn | |
3181 | of the sequence. */ | |
3182 | ||
7bac25b3 | 3183 | rtx_insn * |
35cb5232 | 3184 | next_insn (rtx insn) |
15bbde2b | 3185 | { |
ce4469fa | 3186 | if (insn) |
3187 | { | |
3188 | insn = NEXT_INSN (insn); | |
3189 | if (insn && NONJUMP_INSN_P (insn) | |
3190 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
3191 | insn = XVECEXP (PATTERN (insn), 0, 0); | |
3192 | } | |
15bbde2b | 3193 | |
7bac25b3 | 3194 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3195 | } |
3196 | ||
3197 | /* Return the previous insn. If it is a SEQUENCE, return the last insn | |
3198 | of the sequence. */ | |
3199 | ||
7bac25b3 | 3200 | rtx_insn * |
35cb5232 | 3201 | previous_insn (rtx insn) |
15bbde2b | 3202 | { |
ce4469fa | 3203 | if (insn) |
3204 | { | |
3205 | insn = PREV_INSN (insn); | |
3206 | if (insn && NONJUMP_INSN_P (insn) | |
3207 | && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
3208 | insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1); | |
3209 | } | |
15bbde2b | 3210 | |
7bac25b3 | 3211 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3212 | } |
3213 | ||
3214 | /* Return the next insn after INSN that is not a NOTE. This routine does not | |
3215 | look inside SEQUENCEs. */ | |
3216 | ||
7bac25b3 | 3217 | rtx_insn * |
35cb5232 | 3218 | next_nonnote_insn (rtx insn) |
15bbde2b | 3219 | { |
ce4469fa | 3220 | while (insn) |
3221 | { | |
3222 | insn = NEXT_INSN (insn); | |
3223 | if (insn == 0 || !NOTE_P (insn)) | |
3224 | break; | |
3225 | } | |
15bbde2b | 3226 | |
7bac25b3 | 3227 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3228 | } |
3229 | ||
c4d13c5c | 3230 | /* Return the next insn after INSN that is not a NOTE, but stop the |
3231 | search before we enter another basic block. This routine does not | |
3232 | look inside SEQUENCEs. */ | |
3233 | ||
7bac25b3 | 3234 | rtx_insn * |
c4d13c5c | 3235 | next_nonnote_insn_bb (rtx insn) |
3236 | { | |
3237 | while (insn) | |
3238 | { | |
3239 | insn = NEXT_INSN (insn); | |
3240 | if (insn == 0 || !NOTE_P (insn)) | |
3241 | break; | |
3242 | if (NOTE_INSN_BASIC_BLOCK_P (insn)) | |
7bac25b3 | 3243 | return NULL; |
c4d13c5c | 3244 | } |
3245 | ||
7bac25b3 | 3246 | return safe_as_a <rtx_insn *> (insn); |
c4d13c5c | 3247 | } |
3248 | ||
15bbde2b | 3249 | /* Return the previous insn before INSN that is not a NOTE. This routine does |
3250 | not look inside SEQUENCEs. */ | |
3251 | ||
7bac25b3 | 3252 | rtx_insn * |
35cb5232 | 3253 | prev_nonnote_insn (rtx insn) |
15bbde2b | 3254 | { |
ce4469fa | 3255 | while (insn) |
3256 | { | |
3257 | insn = PREV_INSN (insn); | |
3258 | if (insn == 0 || !NOTE_P (insn)) | |
3259 | break; | |
3260 | } | |
15bbde2b | 3261 | |
7bac25b3 | 3262 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3263 | } |
3264 | ||
bcc66782 | 3265 | /* Return the previous insn before INSN that is not a NOTE, but stop |
3266 | the search before we enter another basic block. This routine does | |
3267 | not look inside SEQUENCEs. */ | |
3268 | ||
7bac25b3 | 3269 | rtx_insn * |
bcc66782 | 3270 | prev_nonnote_insn_bb (rtx insn) |
3271 | { | |
3272 | while (insn) | |
3273 | { | |
3274 | insn = PREV_INSN (insn); | |
3275 | if (insn == 0 || !NOTE_P (insn)) | |
3276 | break; | |
3277 | if (NOTE_INSN_BASIC_BLOCK_P (insn)) | |
7bac25b3 | 3278 | return NULL; |
bcc66782 | 3279 | } |
3280 | ||
7bac25b3 | 3281 | return safe_as_a <rtx_insn *> (insn); |
bcc66782 | 3282 | } |
3283 | ||
9845d120 | 3284 | /* Return the next insn after INSN that is not a DEBUG_INSN. This |
3285 | routine does not look inside SEQUENCEs. */ | |
3286 | ||
7bac25b3 | 3287 | rtx_insn * |
9845d120 | 3288 | next_nondebug_insn (rtx insn) |
3289 | { | |
3290 | while (insn) | |
3291 | { | |
3292 | insn = NEXT_INSN (insn); | |
3293 | if (insn == 0 || !DEBUG_INSN_P (insn)) | |
3294 | break; | |
3295 | } | |
3296 | ||
7bac25b3 | 3297 | return safe_as_a <rtx_insn *> (insn); |
9845d120 | 3298 | } |
3299 | ||
3300 | /* Return the previous insn before INSN that is not a DEBUG_INSN. | |
3301 | This routine does not look inside SEQUENCEs. */ | |
3302 | ||
7bac25b3 | 3303 | rtx_insn * |
9845d120 | 3304 | prev_nondebug_insn (rtx insn) |
3305 | { | |
3306 | while (insn) | |
3307 | { | |
3308 | insn = PREV_INSN (insn); | |
3309 | if (insn == 0 || !DEBUG_INSN_P (insn)) | |
3310 | break; | |
3311 | } | |
3312 | ||
7bac25b3 | 3313 | return safe_as_a <rtx_insn *> (insn); |
9845d120 | 3314 | } |
3315 | ||
5b8537a8 | 3316 | /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN. |
3317 | This routine does not look inside SEQUENCEs. */ | |
3318 | ||
7bac25b3 | 3319 | rtx_insn * |
5b8537a8 | 3320 | next_nonnote_nondebug_insn (rtx insn) |
3321 | { | |
3322 | while (insn) | |
3323 | { | |
3324 | insn = NEXT_INSN (insn); | |
3325 | if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) | |
3326 | break; | |
3327 | } | |
3328 | ||
7bac25b3 | 3329 | return safe_as_a <rtx_insn *> (insn); |
5b8537a8 | 3330 | } |
3331 | ||
3332 | /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN. | |
3333 | This routine does not look inside SEQUENCEs. */ | |
3334 | ||
7bac25b3 | 3335 | rtx_insn * |
5b8537a8 | 3336 | prev_nonnote_nondebug_insn (rtx insn) |
3337 | { | |
3338 | while (insn) | |
3339 | { | |
3340 | insn = PREV_INSN (insn); | |
3341 | if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) | |
3342 | break; | |
3343 | } | |
3344 | ||
7bac25b3 | 3345 | return safe_as_a <rtx_insn *> (insn); |
5b8537a8 | 3346 | } |
3347 | ||
15bbde2b | 3348 | /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN; |
3349 | or 0, if there is none. This routine does not look inside | |
a92771b8 | 3350 | SEQUENCEs. */ |
15bbde2b | 3351 | |
7bac25b3 | 3352 | rtx_insn * |
35cb5232 | 3353 | next_real_insn (rtx insn) |
15bbde2b | 3354 | { |
ce4469fa | 3355 | while (insn) |
3356 | { | |
3357 | insn = NEXT_INSN (insn); | |
3358 | if (insn == 0 || INSN_P (insn)) | |
3359 | break; | |
3360 | } | |
15bbde2b | 3361 | |
7bac25b3 | 3362 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3363 | } |
3364 | ||
3365 | /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN; | |
3366 | or 0, if there is none. This routine does not look inside | |
3367 | SEQUENCEs. */ | |
3368 | ||
7bac25b3 | 3369 | rtx_insn * |
35cb5232 | 3370 | prev_real_insn (rtx insn) |
15bbde2b | 3371 | { |
ce4469fa | 3372 | while (insn) |
3373 | { | |
3374 | insn = PREV_INSN (insn); | |
3375 | if (insn == 0 || INSN_P (insn)) | |
3376 | break; | |
3377 | } | |
15bbde2b | 3378 | |
7bac25b3 | 3379 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3380 | } |
3381 | ||
d5f9786f | 3382 | /* Return the last CALL_INSN in the current list, or 0 if there is none. |
3383 | This routine does not look inside SEQUENCEs. */ | |
3384 | ||
ec22da62 | 3385 | rtx_call_insn * |
35cb5232 | 3386 | last_call_insn (void) |
d5f9786f | 3387 | { |
ec22da62 | 3388 | rtx_insn *insn; |
d5f9786f | 3389 | |
3390 | for (insn = get_last_insn (); | |
6d7dc5b9 | 3391 | insn && !CALL_P (insn); |
d5f9786f | 3392 | insn = PREV_INSN (insn)) |
3393 | ; | |
3394 | ||
ec22da62 | 3395 | return safe_as_a <rtx_call_insn *> (insn); |
d5f9786f | 3396 | } |
3397 | ||
15bbde2b | 3398 | /* Find the next insn after INSN that really does something. This routine |
084950ee | 3399 | does not look inside SEQUENCEs. After reload this also skips over |
3400 | standalone USE and CLOBBER insn. */ | |
15bbde2b | 3401 | |
2215ca0d | 3402 | int |
52d07779 | 3403 | active_insn_p (const_rtx insn) |
2215ca0d | 3404 | { |
6d7dc5b9 | 3405 | return (CALL_P (insn) || JUMP_P (insn) |
91f71fa3 | 3406 | || JUMP_TABLE_DATA_P (insn) /* FIXME */ |
6d7dc5b9 | 3407 | || (NONJUMP_INSN_P (insn) |
3a66feab | 3408 | && (! reload_completed |
3409 | || (GET_CODE (PATTERN (insn)) != USE | |
3410 | && GET_CODE (PATTERN (insn)) != CLOBBER)))); | |
2215ca0d | 3411 | } |
3412 | ||
7bac25b3 | 3413 | rtx_insn * |
35cb5232 | 3414 | next_active_insn (rtx insn) |
15bbde2b | 3415 | { |
ce4469fa | 3416 | while (insn) |
3417 | { | |
3418 | insn = NEXT_INSN (insn); | |
3419 | if (insn == 0 || active_insn_p (insn)) | |
3420 | break; | |
3421 | } | |
15bbde2b | 3422 | |
7bac25b3 | 3423 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3424 | } |
3425 | ||
3426 | /* Find the last insn before INSN that really does something. This routine | |
084950ee | 3427 | does not look inside SEQUENCEs. After reload this also skips over |
3428 | standalone USE and CLOBBER insn. */ | |
15bbde2b | 3429 | |
7bac25b3 | 3430 | rtx_insn * |
35cb5232 | 3431 | prev_active_insn (rtx insn) |
15bbde2b | 3432 | { |
ce4469fa | 3433 | while (insn) |
3434 | { | |
3435 | insn = PREV_INSN (insn); | |
3436 | if (insn == 0 || active_insn_p (insn)) | |
3437 | break; | |
3438 | } | |
15bbde2b | 3439 | |
7bac25b3 | 3440 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3441 | } |
15bbde2b | 3442 | \f |
3443 | #ifdef HAVE_cc0 | |
3444 | /* Return the next insn that uses CC0 after INSN, which is assumed to | |
3445 | set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter | |
3446 | applied to the result of this function should yield INSN). | |
3447 | ||
3448 | Normally, this is simply the next insn. However, if a REG_CC_USER note | |
3449 | is present, it contains the insn that uses CC0. | |
3450 | ||
3451 | Return 0 if we can't find the insn. */ | |
3452 | ||
0be88abd | 3453 | rtx_insn * |
35cb5232 | 3454 | next_cc0_user (rtx insn) |
15bbde2b | 3455 | { |
b572011e | 3456 | rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX); |
15bbde2b | 3457 | |
3458 | if (note) | |
0be88abd | 3459 | return safe_as_a <rtx_insn *> (XEXP (note, 0)); |
15bbde2b | 3460 | |
3461 | insn = next_nonnote_insn (insn); | |
6d7dc5b9 | 3462 | if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) |
15bbde2b | 3463 | insn = XVECEXP (PATTERN (insn), 0, 0); |
3464 | ||
9204e736 | 3465 | if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn))) |
0be88abd | 3466 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3467 | |
3468 | return 0; | |
3469 | } | |
3470 | ||
3471 | /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER | |
3472 | note, it is the previous insn. */ | |
3473 | ||
0be88abd | 3474 | rtx_insn * |
35cb5232 | 3475 | prev_cc0_setter (rtx insn) |
15bbde2b | 3476 | { |
b572011e | 3477 | rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX); |
15bbde2b | 3478 | |
3479 | if (note) | |
0be88abd | 3480 | return safe_as_a <rtx_insn *> (XEXP (note, 0)); |
15bbde2b | 3481 | |
3482 | insn = prev_nonnote_insn (insn); | |
611234b4 | 3483 | gcc_assert (sets_cc0_p (PATTERN (insn))); |
15bbde2b | 3484 | |
0be88abd | 3485 | return safe_as_a <rtx_insn *> (insn); |
15bbde2b | 3486 | } |
3487 | #endif | |
344dc2fa | 3488 | |
698ff1f0 | 3489 | #ifdef AUTO_INC_DEC |
3490 | /* Find a RTX_AUTOINC class rtx which matches DATA. */ | |
3491 | ||
3492 | static int | |
3493 | find_auto_inc (rtx *xp, void *data) | |
3494 | { | |
3495 | rtx x = *xp; | |
225ab426 | 3496 | rtx reg = (rtx) data; |
698ff1f0 | 3497 | |
3498 | if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC) | |
3499 | return 0; | |
3500 | ||
3501 | switch (GET_CODE (x)) | |
3502 | { | |
3503 | case PRE_DEC: | |
3504 | case PRE_INC: | |
3505 | case POST_DEC: | |
3506 | case POST_INC: | |
3507 | case PRE_MODIFY: | |
3508 | case POST_MODIFY: | |
3509 | if (rtx_equal_p (reg, XEXP (x, 0))) | |
3510 | return 1; | |
3511 | break; | |
3512 | ||
3513 | default: | |
3514 | gcc_unreachable (); | |
3515 | } | |
3516 | return -1; | |
3517 | } | |
3518 | #endif | |
3519 | ||
344dc2fa | 3520 | /* Increment the label uses for all labels present in rtx. */ |
3521 | ||
3522 | static void | |
35cb5232 | 3523 | mark_label_nuses (rtx x) |
344dc2fa | 3524 | { |
19cb6b50 | 3525 | enum rtx_code code; |
3526 | int i, j; | |
3527 | const char *fmt; | |
344dc2fa | 3528 | |
3529 | code = GET_CODE (x); | |
a030d4a8 | 3530 | if (code == LABEL_REF && LABEL_P (XEXP (x, 0))) |
344dc2fa | 3531 | LABEL_NUSES (XEXP (x, 0))++; |
3532 | ||
3533 | fmt = GET_RTX_FORMAT (code); | |
3534 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3535 | { | |
3536 | if (fmt[i] == 'e') | |
ff385626 | 3537 | mark_label_nuses (XEXP (x, i)); |
344dc2fa | 3538 | else if (fmt[i] == 'E') |
ff385626 | 3539 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
344dc2fa | 3540 | mark_label_nuses (XVECEXP (x, i, j)); |
3541 | } | |
3542 | } | |
3543 | ||
15bbde2b | 3544 | \f |
3545 | /* Try splitting insns that can be split for better scheduling. | |
3546 | PAT is the pattern which might split. | |
3547 | TRIAL is the insn providing PAT. | |
6ef828f9 | 3548 | LAST is nonzero if we should return the last insn of the sequence produced. |
15bbde2b | 3549 | |
3550 | If this routine succeeds in splitting, it returns the first or last | |
0e69a50a | 3551 | replacement insn depending on the value of LAST. Otherwise, it |
15bbde2b | 3552 | returns TRIAL. If the insn to be returned can be split, it will be. */ |
3553 | ||
bffa1357 | 3554 | rtx_insn * |
35cb5232 | 3555 | try_split (rtx pat, rtx trial, int last) |
15bbde2b | 3556 | { |
bffa1357 | 3557 | rtx_insn *before = PREV_INSN (trial); |
3558 | rtx_insn *after = NEXT_INSN (trial); | |
15bbde2b | 3559 | int has_barrier = 0; |
1e5b92fa | 3560 | rtx note, seq, tem; |
3cd757b1 | 3561 | int probability; |
e13693ec | 3562 | rtx insn_last, insn; |
3563 | int njumps = 0; | |
2e3b0d0f | 3564 | rtx call_insn = NULL_RTX; |
3cd757b1 | 3565 | |
25e880b1 | 3566 | /* We're not good at redistributing frame information. */ |
3567 | if (RTX_FRAME_RELATED_P (trial)) | |
bffa1357 | 3568 | return as_a <rtx_insn *> (trial); |
25e880b1 | 3569 | |
3cd757b1 | 3570 | if (any_condjump_p (trial) |
3571 | && (note = find_reg_note (trial, REG_BR_PROB, 0))) | |
9eb946de | 3572 | split_branch_probability = XINT (note, 0); |
3cd757b1 | 3573 | probability = split_branch_probability; |
3574 | ||
3575 | seq = split_insns (pat, trial); | |
3576 | ||
3577 | split_branch_probability = -1; | |
15bbde2b | 3578 | |
3579 | /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER. | |
3580 | We may need to handle this specially. */ | |
6d7dc5b9 | 3581 | if (after && BARRIER_P (after)) |
15bbde2b | 3582 | { |
3583 | has_barrier = 1; | |
3584 | after = NEXT_INSN (after); | |
3585 | } | |
3586 | ||
e13693ec | 3587 | if (!seq) |
bffa1357 | 3588 | return as_a <rtx_insn *> (trial); |
e13693ec | 3589 | |
3590 | /* Avoid infinite loop if any insn of the result matches | |
3591 | the original pattern. */ | |
3592 | insn_last = seq; | |
3593 | while (1) | |
15bbde2b | 3594 | { |
e13693ec | 3595 | if (INSN_P (insn_last) |
3596 | && rtx_equal_p (PATTERN (insn_last), pat)) | |
bffa1357 | 3597 | return as_a <rtx_insn *> (trial); |
e13693ec | 3598 | if (!NEXT_INSN (insn_last)) |
3599 | break; | |
3600 | insn_last = NEXT_INSN (insn_last); | |
3601 | } | |
d823ba47 | 3602 | |
3072d30e | 3603 | /* We will be adding the new sequence to the function. The splitters |
3604 | may have introduced invalid RTL sharing, so unshare the sequence now. */ | |
3605 | unshare_all_rtl_in_chain (seq); | |
3606 | ||
8f869004 | 3607 | /* Mark labels and copy flags. */ |
e13693ec | 3608 | for (insn = insn_last; insn ; insn = PREV_INSN (insn)) |
3609 | { | |
6d7dc5b9 | 3610 | if (JUMP_P (insn)) |
e13693ec | 3611 | { |
8f869004 | 3612 | if (JUMP_P (trial)) |
3613 | CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial); | |
e13693ec | 3614 | mark_jump_label (PATTERN (insn), insn, 0); |
3615 | njumps++; | |
3616 | if (probability != -1 | |
3617 | && any_condjump_p (insn) | |
3618 | && !find_reg_note (insn, REG_BR_PROB, 0)) | |
31d3e01c | 3619 | { |
e13693ec | 3620 | /* We can preserve the REG_BR_PROB notes only if exactly |
3621 | one jump is created, otherwise the machine description | |
3622 | is responsible for this step using | |
3623 | split_branch_probability variable. */ | |
611234b4 | 3624 | gcc_assert (njumps == 1); |
9eb946de | 3625 | add_int_reg_note (insn, REG_BR_PROB, probability); |
31d3e01c | 3626 | } |
e13693ec | 3627 | } |
3628 | } | |
3629 | ||
3630 | /* If we are splitting a CALL_INSN, look for the CALL_INSN | |
b0bd0491 | 3631 | in SEQ and copy any additional information across. */ |
6d7dc5b9 | 3632 | if (CALL_P (trial)) |
e13693ec | 3633 | { |
3634 | for (insn = insn_last; insn ; insn = PREV_INSN (insn)) | |
6d7dc5b9 | 3635 | if (CALL_P (insn)) |
e13693ec | 3636 | { |
b0bd0491 | 3637 | rtx next, *p; |
3638 | ||
2e3b0d0f | 3639 | gcc_assert (call_insn == NULL_RTX); |
3640 | call_insn = insn; | |
3641 | ||
b0bd0491 | 3642 | /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the |
3643 | target may have explicitly specified. */ | |
3644 | p = &CALL_INSN_FUNCTION_USAGE (insn); | |
0bb5a6cd | 3645 | while (*p) |
3646 | p = &XEXP (*p, 1); | |
3647 | *p = CALL_INSN_FUNCTION_USAGE (trial); | |
b0bd0491 | 3648 | |
3649 | /* If the old call was a sibling call, the new one must | |
3650 | be too. */ | |
e13693ec | 3651 | SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial); |
b0bd0491 | 3652 | |
3653 | /* If the new call is the last instruction in the sequence, | |
3654 | it will effectively replace the old call in-situ. Otherwise | |
3655 | we must move any following NOTE_INSN_CALL_ARG_LOCATION note | |
3656 | so that it comes immediately after the new call. */ | |
3657 | if (NEXT_INSN (insn)) | |
47e1410d | 3658 | for (next = NEXT_INSN (trial); |
3659 | next && NOTE_P (next); | |
3660 | next = NEXT_INSN (next)) | |
3661 | if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION) | |
b0bd0491 | 3662 | { |
3663 | remove_insn (next); | |
3664 | add_insn_after (next, insn, NULL); | |
47e1410d | 3665 | break; |
b0bd0491 | 3666 | } |
e13693ec | 3667 | } |
3668 | } | |
5262c253 | 3669 | |
e13693ec | 3670 | /* Copy notes, particularly those related to the CFG. */ |
3671 | for (note = REG_NOTES (trial); note; note = XEXP (note, 1)) | |
3672 | { | |
3673 | switch (REG_NOTE_KIND (note)) | |
3674 | { | |
3675 | case REG_EH_REGION: | |
e38def9c | 3676 | copy_reg_eh_region_note_backward (note, insn_last, NULL); |
e13693ec | 3677 | break; |
381eb1e7 | 3678 | |
e13693ec | 3679 | case REG_NORETURN: |
3680 | case REG_SETJMP: | |
4c0315d0 | 3681 | case REG_TM: |
698ff1f0 | 3682 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
381eb1e7 | 3683 | { |
6d7dc5b9 | 3684 | if (CALL_P (insn)) |
a1ddb869 | 3685 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
381eb1e7 | 3686 | } |
e13693ec | 3687 | break; |
5bb27a4b | 3688 | |
e13693ec | 3689 | case REG_NON_LOCAL_GOTO: |
698ff1f0 | 3690 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
31d3e01c | 3691 | { |
6d7dc5b9 | 3692 | if (JUMP_P (insn)) |
a1ddb869 | 3693 | add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
31d3e01c | 3694 | } |
e13693ec | 3695 | break; |
344dc2fa | 3696 | |
698ff1f0 | 3697 | #ifdef AUTO_INC_DEC |
3698 | case REG_INC: | |
3699 | for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) | |
3700 | { | |
3701 | rtx reg = XEXP (note, 0); | |
3702 | if (!FIND_REG_INC_NOTE (insn, reg) | |
3703 | && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0) | |
a1ddb869 | 3704 | add_reg_note (insn, REG_INC, reg); |
698ff1f0 | 3705 | } |
3706 | break; | |
3707 | #endif | |
3708 | ||
dfe00a8f | 3709 | case REG_ARGS_SIZE: |
3710 | fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0))); | |
3711 | break; | |
3712 | ||
2e3b0d0f | 3713 | case REG_CALL_DECL: |
3714 | gcc_assert (call_insn != NULL_RTX); | |
3715 | add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0)); | |
3716 | break; | |
3717 | ||
e13693ec | 3718 | default: |
3719 | break; | |
15bbde2b | 3720 | } |
e13693ec | 3721 | } |
3722 | ||
3723 | /* If there are LABELS inside the split insns increment the | |
3724 | usage count so we don't delete the label. */ | |
19d2fe05 | 3725 | if (INSN_P (trial)) |
e13693ec | 3726 | { |
3727 | insn = insn_last; | |
3728 | while (insn != NULL_RTX) | |
15bbde2b | 3729 | { |
19d2fe05 | 3730 | /* JUMP_P insns have already been "marked" above. */ |
6d7dc5b9 | 3731 | if (NONJUMP_INSN_P (insn)) |
e13693ec | 3732 | mark_label_nuses (PATTERN (insn)); |
15bbde2b | 3733 | |
e13693ec | 3734 | insn = PREV_INSN (insn); |
3735 | } | |
15bbde2b | 3736 | } |
3737 | ||
5169661d | 3738 | tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial)); |
e13693ec | 3739 | |
3740 | delete_insn (trial); | |
3741 | if (has_barrier) | |
3742 | emit_barrier_after (tem); | |
3743 | ||
3744 | /* Recursively call try_split for each new insn created; by the | |
3745 | time control returns here that insn will be fully split, so | |
3746 | set LAST and continue from the insn after the one returned. | |
3747 | We can't use next_active_insn here since AFTER may be a note. | |
3748 | Ignore deleted insns, which can be occur if not optimizing. */ | |
3749 | for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem)) | |
3750 | if (! INSN_DELETED_P (tem) && INSN_P (tem)) | |
3751 | tem = try_split (PATTERN (tem), tem, 1); | |
3752 | ||
3753 | /* Return either the first or the last insn, depending on which was | |
3754 | requested. */ | |
3755 | return last | |
06f9d6ef | 3756 | ? (after ? PREV_INSN (after) : get_last_insn ()) |
e13693ec | 3757 | : NEXT_INSN (before); |
15bbde2b | 3758 | } |
3759 | \f | |
3760 | /* Make and return an INSN rtx, initializing all its slots. | |
6a84e367 | 3761 | Store PATTERN in the pattern slots. */ |
15bbde2b | 3762 | |
2c57d586 | 3763 | rtx_insn * |
35cb5232 | 3764 | make_insn_raw (rtx pattern) |
15bbde2b | 3765 | { |
2c57d586 | 3766 | rtx_insn *insn; |
15bbde2b | 3767 | |
2c57d586 | 3768 | insn = as_a <rtx_insn *> (rtx_alloc (INSN)); |
15bbde2b | 3769 | |
575333f9 | 3770 | INSN_UID (insn) = cur_insn_uid++; |
15bbde2b | 3771 | PATTERN (insn) = pattern; |
3772 | INSN_CODE (insn) = -1; | |
fc92fa61 | 3773 | REG_NOTES (insn) = NULL; |
5169661d | 3774 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 3775 | BLOCK_FOR_INSN (insn) = NULL; |
15bbde2b | 3776 | |
fe7f701d | 3777 | #ifdef ENABLE_RTL_CHECKING |
3778 | if (insn | |
9204e736 | 3779 | && INSN_P (insn) |
fe7f701d | 3780 | && (returnjump_p (insn) |
3781 | || (GET_CODE (insn) == SET | |
3782 | && SET_DEST (insn) == pc_rtx))) | |
3783 | { | |
c3ceba8e | 3784 | warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n"); |
fe7f701d | 3785 | debug_rtx (insn); |
3786 | } | |
3787 | #endif | |
d823ba47 | 3788 | |
15bbde2b | 3789 | return insn; |
3790 | } | |
3791 | ||
9845d120 | 3792 | /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */ |
3793 | ||
2c57d586 | 3794 | static rtx_insn * |
9845d120 | 3795 | make_debug_insn_raw (rtx pattern) |
3796 | { | |
2c57d586 | 3797 | rtx_debug_insn *insn; |
9845d120 | 3798 | |
2c57d586 | 3799 | insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN)); |
9845d120 | 3800 | INSN_UID (insn) = cur_debug_insn_uid++; |
3801 | if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID) | |
3802 | INSN_UID (insn) = cur_insn_uid++; | |
3803 | ||
3804 | PATTERN (insn) = pattern; | |
3805 | INSN_CODE (insn) = -1; | |
3806 | REG_NOTES (insn) = NULL; | |
5169661d | 3807 | INSN_LOCATION (insn) = curr_insn_location (); |
9845d120 | 3808 | BLOCK_FOR_INSN (insn) = NULL; |
3809 | ||
3810 | return insn; | |
3811 | } | |
3812 | ||
31d3e01c | 3813 | /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */ |
15bbde2b | 3814 | |
2c57d586 | 3815 | static rtx_insn * |
35cb5232 | 3816 | make_jump_insn_raw (rtx pattern) |
15bbde2b | 3817 | { |
2c57d586 | 3818 | rtx_jump_insn *insn; |
15bbde2b | 3819 | |
2c57d586 | 3820 | insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN)); |
fc92fa61 | 3821 | INSN_UID (insn) = cur_insn_uid++; |
15bbde2b | 3822 | |
3823 | PATTERN (insn) = pattern; | |
3824 | INSN_CODE (insn) = -1; | |
fc92fa61 | 3825 | REG_NOTES (insn) = NULL; |
3826 | JUMP_LABEL (insn) = NULL; | |
5169661d | 3827 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 3828 | BLOCK_FOR_INSN (insn) = NULL; |
15bbde2b | 3829 | |
3830 | return insn; | |
3831 | } | |
6e911104 | 3832 | |
31d3e01c | 3833 | /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */ |
6e911104 | 3834 | |
2c57d586 | 3835 | static rtx_insn * |
35cb5232 | 3836 | make_call_insn_raw (rtx pattern) |
6e911104 | 3837 | { |
2c57d586 | 3838 | rtx_call_insn *insn; |
6e911104 | 3839 | |
2c57d586 | 3840 | insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN)); |
6e911104 | 3841 | INSN_UID (insn) = cur_insn_uid++; |
3842 | ||
3843 | PATTERN (insn) = pattern; | |
3844 | INSN_CODE (insn) = -1; | |
6e911104 | 3845 | REG_NOTES (insn) = NULL; |
3846 | CALL_INSN_FUNCTION_USAGE (insn) = NULL; | |
5169661d | 3847 | INSN_LOCATION (insn) = curr_insn_location (); |
ab87d1bc | 3848 | BLOCK_FOR_INSN (insn) = NULL; |
6e911104 | 3849 | |
3850 | return insn; | |
3851 | } | |
35f3420b | 3852 | |
3853 | /* Like `make_insn_raw' but make a NOTE instead of an insn. */ | |
3854 | ||
cef3d8ad | 3855 | static rtx_note * |
35f3420b | 3856 | make_note_raw (enum insn_note subtype) |
3857 | { | |
3858 | /* Some notes are never created this way at all. These notes are | |
3859 | only created by patching out insns. */ | |
3860 | gcc_assert (subtype != NOTE_INSN_DELETED_LABEL | |
3861 | && subtype != NOTE_INSN_DELETED_DEBUG_LABEL); | |
3862 | ||
cef3d8ad | 3863 | rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE)); |
35f3420b | 3864 | INSN_UID (note) = cur_insn_uid++; |
3865 | NOTE_KIND (note) = subtype; | |
3866 | BLOCK_FOR_INSN (note) = NULL; | |
3867 | memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note))); | |
3868 | return note; | |
3869 | } | |
15bbde2b | 3870 | \f |
35f3420b | 3871 | /* Add INSN to the end of the doubly-linked list, between PREV and NEXT. |
3872 | INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects, | |
3873 | but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */ | |
3874 | ||
3875 | static inline void | |
3e75e92b | 3876 | link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next) |
35f3420b | 3877 | { |
4a57a2e8 | 3878 | SET_PREV_INSN (insn) = prev; |
3879 | SET_NEXT_INSN (insn) = next; | |
35f3420b | 3880 | if (prev != NULL) |
3881 | { | |
4a57a2e8 | 3882 | SET_NEXT_INSN (prev) = insn; |
35f3420b | 3883 | if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
3884 | { | |
3885 | rtx sequence = PATTERN (prev); | |
4a57a2e8 | 3886 | SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn; |
35f3420b | 3887 | } |
3888 | } | |
3889 | if (next != NULL) | |
3890 | { | |
4a57a2e8 | 3891 | SET_PREV_INSN (next) = insn; |
35f3420b | 3892 | if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
4a57a2e8 | 3893 | SET_PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn; |
35f3420b | 3894 | } |
34f5b9ac | 3895 | |
3896 | if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
3897 | { | |
3898 | rtx sequence = PATTERN (insn); | |
4a57a2e8 | 3899 | SET_PREV_INSN (XVECEXP (sequence, 0, 0)) = prev; |
3900 | SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next; | |
34f5b9ac | 3901 | } |
35f3420b | 3902 | } |
3903 | ||
15bbde2b | 3904 | /* Add INSN to the end of the doubly-linked list. |
3905 | INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */ | |
3906 | ||
3907 | void | |
3e75e92b | 3908 | add_insn (rtx_insn *insn) |
15bbde2b | 3909 | { |
3e75e92b | 3910 | rtx_insn *prev = get_last_insn (); |
35f3420b | 3911 | link_insn_into_chain (insn, prev, NULL); |
06f9d6ef | 3912 | if (NULL == get_insns ()) |
3913 | set_first_insn (insn); | |
06f9d6ef | 3914 | set_last_insn (insn); |
15bbde2b | 3915 | } |
3916 | ||
35f3420b | 3917 | /* Add INSN into the doubly-linked list after insn AFTER. */ |
15bbde2b | 3918 | |
35f3420b | 3919 | static void |
3e75e92b | 3920 | add_insn_after_nobb (rtx_insn *insn, rtx_insn *after) |
15bbde2b | 3921 | { |
3e75e92b | 3922 | rtx_insn *next = NEXT_INSN (after); |
15bbde2b | 3923 | |
611234b4 | 3924 | gcc_assert (!optimize || !INSN_DELETED_P (after)); |
f65c10c0 | 3925 | |
35f3420b | 3926 | link_insn_into_chain (insn, after, next); |
15bbde2b | 3927 | |
35f3420b | 3928 | if (next == NULL) |
15bbde2b | 3929 | { |
35f3420b | 3930 | if (get_last_insn () == after) |
3931 | set_last_insn (insn); | |
3932 | else | |
3933 | { | |
3934 | struct sequence_stack *stack = seq_stack; | |
3935 | /* Scan all pending sequences too. */ | |
3936 | for (; stack; stack = stack->next) | |
3937 | if (after == stack->last) | |
3938 | { | |
3939 | stack->last = insn; | |
3940 | break; | |
3941 | } | |
3942 | } | |
15bbde2b | 3943 | } |
35f3420b | 3944 | } |
3945 | ||
3946 | /* Add INSN into the doubly-linked list before insn BEFORE. */ | |
3947 | ||
3948 | static void | |
3e75e92b | 3949 | add_insn_before_nobb (rtx_insn *insn, rtx_insn *before) |
35f3420b | 3950 | { |
3e75e92b | 3951 | rtx_insn *prev = PREV_INSN (before); |
35f3420b | 3952 | |
3953 | gcc_assert (!optimize || !INSN_DELETED_P (before)); | |
3954 | ||
3955 | link_insn_into_chain (insn, prev, before); | |
3956 | ||
3957 | if (prev == NULL) | |
15bbde2b | 3958 | { |
35f3420b | 3959 | if (get_insns () == before) |
3960 | set_first_insn (insn); | |
3961 | else | |
3962 | { | |
3963 | struct sequence_stack *stack = seq_stack; | |
3964 | /* Scan all pending sequences too. */ | |
3965 | for (; stack; stack = stack->next) | |
3966 | if (before == stack->first) | |
3967 | { | |
3968 | stack->first = insn; | |
3969 | break; | |
3970 | } | |
312de84d | 3971 | |
35f3420b | 3972 | gcc_assert (stack); |
3973 | } | |
15bbde2b | 3974 | } |
35f3420b | 3975 | } |
3976 | ||
3977 | /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN. | |
3978 | If BB is NULL, an attempt is made to infer the bb from before. | |
3979 | ||
3980 | This and the next function should be the only functions called | |
3981 | to insert an insn once delay slots have been filled since only | |
3982 | they know how to update a SEQUENCE. */ | |
15bbde2b | 3983 | |
35f3420b | 3984 | void |
3e75e92b | 3985 | add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb) |
35f3420b | 3986 | { |
26bb3cb2 | 3987 | rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); |
3e75e92b | 3988 | rtx_insn *after = as_a <rtx_insn *> (uncast_after); |
35f3420b | 3989 | add_insn_after_nobb (insn, after); |
6d7dc5b9 | 3990 | if (!BARRIER_P (after) |
3991 | && !BARRIER_P (insn) | |
9dda7915 | 3992 | && (bb = BLOCK_FOR_INSN (after))) |
3993 | { | |
3994 | set_block_for_insn (insn, bb); | |
308f9b79 | 3995 | if (INSN_P (insn)) |
3072d30e | 3996 | df_insn_rescan (insn); |
9dda7915 | 3997 | /* Should not happen as first in the BB is always |
3fb1e43b | 3998 | either NOTE or LABEL. */ |
5496dbfc | 3999 | if (BB_END (bb) == after |
9dda7915 | 4000 | /* Avoid clobbering of structure when creating new BB. */ |
6d7dc5b9 | 4001 | && !BARRIER_P (insn) |
ad4583d9 | 4002 | && !NOTE_INSN_BASIC_BLOCK_P (insn)) |
26bb3cb2 | 4003 | BB_END (bb) = insn; |
9dda7915 | 4004 | } |
15bbde2b | 4005 | } |
4006 | ||
35f3420b | 4007 | /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN. |
4008 | If BB is NULL, an attempt is made to infer the bb from before. | |
4009 | ||
4010 | This and the previous function should be the only functions called | |
4011 | to insert an insn once delay slots have been filled since only | |
4012 | they know how to update a SEQUENCE. */ | |
312de84d | 4013 | |
4014 | void | |
3e75e92b | 4015 | add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb) |
312de84d | 4016 | { |
3e75e92b | 4017 | rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); |
4018 | rtx_insn *before = as_a <rtx_insn *> (uncast_before); | |
35f3420b | 4019 | add_insn_before_nobb (insn, before); |
312de84d | 4020 | |
48e1416a | 4021 | if (!bb |
3072d30e | 4022 | && !BARRIER_P (before) |
4023 | && !BARRIER_P (insn)) | |
4024 | bb = BLOCK_FOR_INSN (before); | |
4025 | ||
4026 | if (bb) | |
9dda7915 | 4027 | { |
4028 | set_block_for_insn (insn, bb); | |
308f9b79 | 4029 | if (INSN_P (insn)) |
3072d30e | 4030 | df_insn_rescan (insn); |
611234b4 | 4031 | /* Should not happen as first in the BB is always either NOTE or |
ba821eb1 | 4032 | LABEL. */ |
611234b4 | 4033 | gcc_assert (BB_HEAD (bb) != insn |
4034 | /* Avoid clobbering of structure when creating new BB. */ | |
4035 | || BARRIER_P (insn) | |
ad4583d9 | 4036 | || NOTE_INSN_BASIC_BLOCK_P (insn)); |
9dda7915 | 4037 | } |
312de84d | 4038 | } |
4039 | ||
3072d30e | 4040 | /* Replace insn with an deleted instruction note. */ |
4041 | ||
fc3d1695 | 4042 | void |
4043 | set_insn_deleted (rtx insn) | |
3072d30e | 4044 | { |
91f71fa3 | 4045 | if (INSN_P (insn)) |
b983ea33 | 4046 | df_insn_delete (insn); |
3072d30e | 4047 | PUT_CODE (insn, NOTE); |
4048 | NOTE_KIND (insn) = NOTE_INSN_DELETED; | |
4049 | } | |
4050 | ||
4051 | ||
93ff53d3 | 4052 | /* Unlink INSN from the insn chain. |
4053 | ||
4054 | This function knows how to handle sequences. | |
4055 | ||
4056 | This function does not invalidate data flow information associated with | |
4057 | INSN (i.e. does not call df_insn_delete). That makes this function | |
4058 | usable for only disconnecting an insn from the chain, and re-emit it | |
4059 | elsewhere later. | |
4060 | ||
4061 | To later insert INSN elsewhere in the insn chain via add_insn and | |
4062 | similar functions, PREV_INSN and NEXT_INSN must be nullified by | |
4063 | the caller. Nullifying them here breaks many insn chain walks. | |
4064 | ||
4065 | To really delete an insn and related DF information, use delete_insn. */ | |
4066 | ||
7ddcf2bf | 4067 | void |
35cb5232 | 4068 | remove_insn (rtx insn) |
7ddcf2bf | 4069 | { |
26bb3cb2 | 4070 | rtx_insn *next = NEXT_INSN (insn); |
4071 | rtx_insn *prev = PREV_INSN (insn); | |
e4bf866d | 4072 | basic_block bb; |
4073 | ||
7ddcf2bf | 4074 | if (prev) |
4075 | { | |
4a57a2e8 | 4076 | SET_NEXT_INSN (prev) = next; |
6d7dc5b9 | 4077 | if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE) |
7ddcf2bf | 4078 | { |
4079 | rtx sequence = PATTERN (prev); | |
4a57a2e8 | 4080 | SET_NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next; |
7ddcf2bf | 4081 | } |
4082 | } | |
06f9d6ef | 4083 | else if (get_insns () == insn) |
4084 | { | |
c8f0c143 | 4085 | if (next) |
4a57a2e8 | 4086 | SET_PREV_INSN (next) = NULL; |
06f9d6ef | 4087 | set_first_insn (next); |
4088 | } | |
7ddcf2bf | 4089 | else |
4090 | { | |
0a893c29 | 4091 | struct sequence_stack *stack = seq_stack; |
7ddcf2bf | 4092 | /* Scan all pending sequences too. */ |
4093 | for (; stack; stack = stack->next) | |
4094 | if (insn == stack->first) | |
4095 | { | |
4096 | stack->first = next; | |
4097 | break; | |
4098 | } | |
4099 | ||
611234b4 | 4100 | gcc_assert (stack); |
7ddcf2bf | 4101 | } |
4102 | ||
4103 | if (next) | |
4104 | { | |
4a57a2e8 | 4105 | SET_PREV_INSN (next) = prev; |
6d7dc5b9 | 4106 | if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE) |
4a57a2e8 | 4107 | SET_PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev; |
7ddcf2bf | 4108 | } |
06f9d6ef | 4109 | else if (get_last_insn () == insn) |
4110 | set_last_insn (prev); | |
7ddcf2bf | 4111 | else |
4112 | { | |
0a893c29 | 4113 | struct sequence_stack *stack = seq_stack; |
7ddcf2bf | 4114 | /* Scan all pending sequences too. */ |
4115 | for (; stack; stack = stack->next) | |
4116 | if (insn == stack->last) | |
4117 | { | |
4118 | stack->last = prev; | |
4119 | break; | |
4120 | } | |
4121 | ||
611234b4 | 4122 | gcc_assert (stack); |
7ddcf2bf | 4123 | } |
b983ea33 | 4124 | |
b983ea33 | 4125 | /* Fix up basic block boundaries, if necessary. */ |
6d7dc5b9 | 4126 | if (!BARRIER_P (insn) |
e4bf866d | 4127 | && (bb = BLOCK_FOR_INSN (insn))) |
4128 | { | |
5496dbfc | 4129 | if (BB_HEAD (bb) == insn) |
e4bf866d | 4130 | { |
f4aee538 | 4131 | /* Never ever delete the basic block note without deleting whole |
4132 | basic block. */ | |
611234b4 | 4133 | gcc_assert (!NOTE_P (insn)); |
26bb3cb2 | 4134 | BB_HEAD (bb) = next; |
e4bf866d | 4135 | } |
5496dbfc | 4136 | if (BB_END (bb) == insn) |
26bb3cb2 | 4137 | BB_END (bb) = prev; |
e4bf866d | 4138 | } |
7ddcf2bf | 4139 | } |
4140 | ||
d5f9786f | 4141 | /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */ |
4142 | ||
4143 | void | |
35cb5232 | 4144 | add_function_usage_to (rtx call_insn, rtx call_fusage) |
d5f9786f | 4145 | { |
611234b4 | 4146 | gcc_assert (call_insn && CALL_P (call_insn)); |
d5f9786f | 4147 | |
4148 | /* Put the register usage information on the CALL. If there is already | |
4149 | some usage information, put ours at the end. */ | |
4150 | if (CALL_INSN_FUNCTION_USAGE (call_insn)) | |
4151 | { | |
4152 | rtx link; | |
4153 | ||
4154 | for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0; | |
4155 | link = XEXP (link, 1)) | |
4156 | ; | |
4157 | ||
4158 | XEXP (link, 1) = call_fusage; | |
4159 | } | |
4160 | else | |
4161 | CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage; | |
4162 | } | |
4163 | ||
15bbde2b | 4164 | /* Delete all insns made since FROM. |
4165 | FROM becomes the new last instruction. */ | |
4166 | ||
4167 | void | |
35cb5232 | 4168 | delete_insns_since (rtx from) |
15bbde2b | 4169 | { |
4170 | if (from == 0) | |
06f9d6ef | 4171 | set_first_insn (0); |
15bbde2b | 4172 | else |
4a57a2e8 | 4173 | SET_NEXT_INSN (from) = 0; |
06f9d6ef | 4174 | set_last_insn (from); |
15bbde2b | 4175 | } |
4176 | ||
34e2ddcd | 4177 | /* This function is deprecated, please use sequences instead. |
4178 | ||
4179 | Move a consecutive bunch of insns to a different place in the chain. | |
15bbde2b | 4180 | The insns to be moved are those between FROM and TO. |
4181 | They are moved to a new position after the insn AFTER. | |
4182 | AFTER must not be FROM or TO or any insn in between. | |
4183 | ||
4184 | This function does not know about SEQUENCEs and hence should not be | |
4185 | called after delay-slot filling has been done. */ | |
4186 | ||
4187 | void | |
35cb5232 | 4188 | reorder_insns_nobb (rtx from, rtx to, rtx after) |
15bbde2b | 4189 | { |
7f6ca11f | 4190 | #ifdef ENABLE_CHECKING |
4191 | rtx x; | |
4192 | for (x = from; x != to; x = NEXT_INSN (x)) | |
4193 | gcc_assert (after != x); | |
4194 | gcc_assert (after != to); | |
4195 | #endif | |
4196 | ||
15bbde2b | 4197 | /* Splice this bunch out of where it is now. */ |
4198 | if (PREV_INSN (from)) | |
4a57a2e8 | 4199 | SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to); |
15bbde2b | 4200 | if (NEXT_INSN (to)) |
4a57a2e8 | 4201 | SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from); |
06f9d6ef | 4202 | if (get_last_insn () == to) |
4203 | set_last_insn (PREV_INSN (from)); | |
4204 | if (get_insns () == from) | |
4205 | set_first_insn (NEXT_INSN (to)); | |
15bbde2b | 4206 | |
4207 | /* Make the new neighbors point to it and it to them. */ | |
4208 | if (NEXT_INSN (after)) | |
4a57a2e8 | 4209 | SET_PREV_INSN (NEXT_INSN (after)) = to; |
15bbde2b | 4210 | |
4a57a2e8 | 4211 | SET_NEXT_INSN (to) = NEXT_INSN (after); |
4212 | SET_PREV_INSN (from) = after; | |
4213 | SET_NEXT_INSN (after) = from; | |
9af5ce0c | 4214 | if (after == get_last_insn ()) |
06f9d6ef | 4215 | set_last_insn (to); |
15bbde2b | 4216 | } |
4217 | ||
9dda7915 | 4218 | /* Same as function above, but take care to update BB boundaries. */ |
4219 | void | |
4a3fb716 | 4220 | reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after) |
9dda7915 | 4221 | { |
4a3fb716 | 4222 | rtx_insn *prev = PREV_INSN (from); |
9dda7915 | 4223 | basic_block bb, bb2; |
4224 | ||
4225 | reorder_insns_nobb (from, to, after); | |
4226 | ||
6d7dc5b9 | 4227 | if (!BARRIER_P (after) |
9dda7915 | 4228 | && (bb = BLOCK_FOR_INSN (after))) |
4229 | { | |
4230 | rtx x; | |
3072d30e | 4231 | df_set_bb_dirty (bb); |
d4c5e26d | 4232 | |
6d7dc5b9 | 4233 | if (!BARRIER_P (from) |
9dda7915 | 4234 | && (bb2 = BLOCK_FOR_INSN (from))) |
4235 | { | |
5496dbfc | 4236 | if (BB_END (bb2) == to) |
26bb3cb2 | 4237 | BB_END (bb2) = prev; |
3072d30e | 4238 | df_set_bb_dirty (bb2); |
9dda7915 | 4239 | } |
4240 | ||
5496dbfc | 4241 | if (BB_END (bb) == after) |
26bb3cb2 | 4242 | BB_END (bb) = to; |
9dda7915 | 4243 | |
4244 | for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x)) | |
7097dd0c | 4245 | if (!BARRIER_P (x)) |
a2bdd643 | 4246 | df_insn_change_bb (x, bb); |
9dda7915 | 4247 | } |
4248 | } | |
4249 | ||
15bbde2b | 4250 | \f |
31d3e01c | 4251 | /* Emit insn(s) of given code and pattern |
4252 | at a specified place within the doubly-linked list. | |
15bbde2b | 4253 | |
31d3e01c | 4254 | All of the emit_foo global entry points accept an object |
4255 | X which is either an insn list or a PATTERN of a single | |
4256 | instruction. | |
15bbde2b | 4257 | |
31d3e01c | 4258 | There are thus a few canonical ways to generate code and |
4259 | emit it at a specific place in the instruction stream. For | |
4260 | example, consider the instruction named SPOT and the fact that | |
4261 | we would like to emit some instructions before SPOT. We might | |
4262 | do it like this: | |
15bbde2b | 4263 | |
31d3e01c | 4264 | start_sequence (); |
4265 | ... emit the new instructions ... | |
4266 | insns_head = get_insns (); | |
4267 | end_sequence (); | |
15bbde2b | 4268 | |
31d3e01c | 4269 | emit_insn_before (insns_head, SPOT); |
15bbde2b | 4270 | |
31d3e01c | 4271 | It used to be common to generate SEQUENCE rtl instead, but that |
4272 | is a relic of the past which no longer occurs. The reason is that | |
4273 | SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE | |
4274 | generated would almost certainly die right after it was created. */ | |
15bbde2b | 4275 | |
722334ea | 4276 | static rtx_insn * |
5f7c5ddd | 4277 | emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb, |
2c57d586 | 4278 | rtx_insn *(*make_raw) (rtx)) |
15bbde2b | 4279 | { |
2c57d586 | 4280 | rtx_insn *insn; |
15bbde2b | 4281 | |
611234b4 | 4282 | gcc_assert (before); |
31d3e01c | 4283 | |
4284 | if (x == NULL_RTX) | |
722334ea | 4285 | return safe_as_a <rtx_insn *> (last); |
31d3e01c | 4286 | |
4287 | switch (GET_CODE (x)) | |
15bbde2b | 4288 | { |
9845d120 | 4289 | case DEBUG_INSN: |
31d3e01c | 4290 | case INSN: |
4291 | case JUMP_INSN: | |
4292 | case CALL_INSN: | |
4293 | case CODE_LABEL: | |
4294 | case BARRIER: | |
4295 | case NOTE: | |
2c57d586 | 4296 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 4297 | while (insn) |
4298 | { | |
2c57d586 | 4299 | rtx_insn *next = NEXT_INSN (insn); |
3072d30e | 4300 | add_insn_before (insn, before, bb); |
31d3e01c | 4301 | last = insn; |
4302 | insn = next; | |
4303 | } | |
4304 | break; | |
4305 | ||
4306 | #ifdef ENABLE_RTL_CHECKING | |
4307 | case SEQUENCE: | |
611234b4 | 4308 | gcc_unreachable (); |
31d3e01c | 4309 | break; |
4310 | #endif | |
4311 | ||
4312 | default: | |
5f7c5ddd | 4313 | last = (*make_raw) (x); |
3072d30e | 4314 | add_insn_before (last, before, bb); |
31d3e01c | 4315 | break; |
15bbde2b | 4316 | } |
4317 | ||
722334ea | 4318 | return safe_as_a <rtx_insn *> (last); |
15bbde2b | 4319 | } |
4320 | ||
5f7c5ddd | 4321 | /* Make X be output before the instruction BEFORE. */ |
4322 | ||
722334ea | 4323 | rtx_insn * |
5f7c5ddd | 4324 | emit_insn_before_noloc (rtx x, rtx before, basic_block bb) |
4325 | { | |
4326 | return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw); | |
4327 | } | |
4328 | ||
31d3e01c | 4329 | /* Make an instruction with body X and code JUMP_INSN |
15bbde2b | 4330 | and output it before the instruction BEFORE. */ |
4331 | ||
722334ea | 4332 | rtx_insn * |
0891f67c | 4333 | emit_jump_insn_before_noloc (rtx x, rtx before) |
15bbde2b | 4334 | { |
5f7c5ddd | 4335 | return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, |
4336 | make_jump_insn_raw); | |
15bbde2b | 4337 | } |
4338 | ||
31d3e01c | 4339 | /* Make an instruction with body X and code CALL_INSN |
cd0fe062 | 4340 | and output it before the instruction BEFORE. */ |
4341 | ||
722334ea | 4342 | rtx_insn * |
0891f67c | 4343 | emit_call_insn_before_noloc (rtx x, rtx before) |
cd0fe062 | 4344 | { |
5f7c5ddd | 4345 | return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, |
4346 | make_call_insn_raw); | |
cd0fe062 | 4347 | } |
4348 | ||
9845d120 | 4349 | /* Make an instruction with body X and code DEBUG_INSN |
4350 | and output it before the instruction BEFORE. */ | |
4351 | ||
722334ea | 4352 | rtx_insn * |
9845d120 | 4353 | emit_debug_insn_before_noloc (rtx x, rtx before) |
4354 | { | |
5f7c5ddd | 4355 | return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, |
4356 | make_debug_insn_raw); | |
9845d120 | 4357 | } |
4358 | ||
15bbde2b | 4359 | /* Make an insn of code BARRIER |
71caadc0 | 4360 | and output it before the insn BEFORE. */ |
15bbde2b | 4361 | |
722334ea | 4362 | rtx_barrier * |
35cb5232 | 4363 | emit_barrier_before (rtx before) |
15bbde2b | 4364 | { |
722334ea | 4365 | rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 4366 | |
4367 | INSN_UID (insn) = cur_insn_uid++; | |
4368 | ||
3072d30e | 4369 | add_insn_before (insn, before, NULL); |
15bbde2b | 4370 | return insn; |
4371 | } | |
4372 | ||
71caadc0 | 4373 | /* Emit the label LABEL before the insn BEFORE. */ |
4374 | ||
722334ea | 4375 | rtx_insn * |
35cb5232 | 4376 | emit_label_before (rtx label, rtx before) |
71caadc0 | 4377 | { |
596ef494 | 4378 | gcc_checking_assert (INSN_UID (label) == 0); |
4379 | INSN_UID (label) = cur_insn_uid++; | |
4380 | add_insn_before (label, before, NULL); | |
722334ea | 4381 | return as_a <rtx_insn *> (label); |
71caadc0 | 4382 | } |
15bbde2b | 4383 | \f |
31d3e01c | 4384 | /* Helper for emit_insn_after, handles lists of instructions |
4385 | efficiently. */ | |
15bbde2b | 4386 | |
31d3e01c | 4387 | static rtx |
26bb3cb2 | 4388 | emit_insn_after_1 (rtx_insn *first, rtx after, basic_block bb) |
15bbde2b | 4389 | { |
26bb3cb2 | 4390 | rtx_insn *last; |
4391 | rtx_insn *after_after; | |
3072d30e | 4392 | if (!bb && !BARRIER_P (after)) |
4393 | bb = BLOCK_FOR_INSN (after); | |
15bbde2b | 4394 | |
3072d30e | 4395 | if (bb) |
15bbde2b | 4396 | { |
3072d30e | 4397 | df_set_bb_dirty (bb); |
31d3e01c | 4398 | for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) |
6d7dc5b9 | 4399 | if (!BARRIER_P (last)) |
3072d30e | 4400 | { |
4401 | set_block_for_insn (last, bb); | |
4402 | df_insn_rescan (last); | |
4403 | } | |
6d7dc5b9 | 4404 | if (!BARRIER_P (last)) |
3072d30e | 4405 | { |
4406 | set_block_for_insn (last, bb); | |
4407 | df_insn_rescan (last); | |
4408 | } | |
5496dbfc | 4409 | if (BB_END (bb) == after) |
26bb3cb2 | 4410 | BB_END (bb) = last; |
15bbde2b | 4411 | } |
4412 | else | |
31d3e01c | 4413 | for (last = first; NEXT_INSN (last); last = NEXT_INSN (last)) |
4414 | continue; | |
4415 | ||
4416 | after_after = NEXT_INSN (after); | |
4417 | ||
4a57a2e8 | 4418 | SET_NEXT_INSN (after) = first; |
4419 | SET_PREV_INSN (first) = after; | |
4420 | SET_NEXT_INSN (last) = after_after; | |
31d3e01c | 4421 | if (after_after) |
4a57a2e8 | 4422 | SET_PREV_INSN (after_after) = last; |
31d3e01c | 4423 | |
9af5ce0c | 4424 | if (after == get_last_insn ()) |
06f9d6ef | 4425 | set_last_insn (last); |
e1ab7874 | 4426 | |
31d3e01c | 4427 | return last; |
4428 | } | |
4429 | ||
722334ea | 4430 | static rtx_insn * |
5f7c5ddd | 4431 | emit_pattern_after_noloc (rtx x, rtx after, basic_block bb, |
2c57d586 | 4432 | rtx_insn *(*make_raw)(rtx)) |
31d3e01c | 4433 | { |
4434 | rtx last = after; | |
4435 | ||
611234b4 | 4436 | gcc_assert (after); |
31d3e01c | 4437 | |
4438 | if (x == NULL_RTX) | |
722334ea | 4439 | return safe_as_a <rtx_insn *> (last); |
31d3e01c | 4440 | |
4441 | switch (GET_CODE (x)) | |
15bbde2b | 4442 | { |
9845d120 | 4443 | case DEBUG_INSN: |
31d3e01c | 4444 | case INSN: |
4445 | case JUMP_INSN: | |
4446 | case CALL_INSN: | |
4447 | case CODE_LABEL: | |
4448 | case BARRIER: | |
4449 | case NOTE: | |
26bb3cb2 | 4450 | last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb); |
31d3e01c | 4451 | break; |
4452 | ||
4453 | #ifdef ENABLE_RTL_CHECKING | |
4454 | case SEQUENCE: | |
611234b4 | 4455 | gcc_unreachable (); |
31d3e01c | 4456 | break; |
4457 | #endif | |
4458 | ||
4459 | default: | |
5f7c5ddd | 4460 | last = (*make_raw) (x); |
3072d30e | 4461 | add_insn_after (last, after, bb); |
31d3e01c | 4462 | break; |
15bbde2b | 4463 | } |
4464 | ||
722334ea | 4465 | return safe_as_a <rtx_insn *> (last); |
15bbde2b | 4466 | } |
4467 | ||
5f7c5ddd | 4468 | /* Make X be output after the insn AFTER and set the BB of insn. If |
4469 | BB is NULL, an attempt is made to infer the BB from AFTER. */ | |
4470 | ||
722334ea | 4471 | rtx_insn * |
5f7c5ddd | 4472 | emit_insn_after_noloc (rtx x, rtx after, basic_block bb) |
4473 | { | |
4474 | return emit_pattern_after_noloc (x, after, bb, make_insn_raw); | |
4475 | } | |
4476 | ||
1bea98fb | 4477 | |
31d3e01c | 4478 | /* Make an insn of code JUMP_INSN with body X |
15bbde2b | 4479 | and output it after the insn AFTER. */ |
4480 | ||
722334ea | 4481 | rtx_insn * |
0891f67c | 4482 | emit_jump_insn_after_noloc (rtx x, rtx after) |
15bbde2b | 4483 | { |
5f7c5ddd | 4484 | return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw); |
31d3e01c | 4485 | } |
4486 | ||
4487 | /* Make an instruction with body X and code CALL_INSN | |
4488 | and output it after the instruction AFTER. */ | |
4489 | ||
722334ea | 4490 | rtx_insn * |
0891f67c | 4491 | emit_call_insn_after_noloc (rtx x, rtx after) |
31d3e01c | 4492 | { |
5f7c5ddd | 4493 | return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw); |
15bbde2b | 4494 | } |
4495 | ||
9845d120 | 4496 | /* Make an instruction with body X and code CALL_INSN |
4497 | and output it after the instruction AFTER. */ | |
4498 | ||
722334ea | 4499 | rtx_insn * |
9845d120 | 4500 | emit_debug_insn_after_noloc (rtx x, rtx after) |
4501 | { | |
5f7c5ddd | 4502 | return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw); |
9845d120 | 4503 | } |
4504 | ||
15bbde2b | 4505 | /* Make an insn of code BARRIER |
4506 | and output it after the insn AFTER. */ | |
4507 | ||
722334ea | 4508 | rtx_barrier * |
35cb5232 | 4509 | emit_barrier_after (rtx after) |
15bbde2b | 4510 | { |
722334ea | 4511 | rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 4512 | |
4513 | INSN_UID (insn) = cur_insn_uid++; | |
4514 | ||
3072d30e | 4515 | add_insn_after (insn, after, NULL); |
15bbde2b | 4516 | return insn; |
4517 | } | |
4518 | ||
4519 | /* Emit the label LABEL after the insn AFTER. */ | |
4520 | ||
722334ea | 4521 | rtx_insn * |
35cb5232 | 4522 | emit_label_after (rtx label, rtx after) |
15bbde2b | 4523 | { |
596ef494 | 4524 | gcc_checking_assert (INSN_UID (label) == 0); |
4525 | INSN_UID (label) = cur_insn_uid++; | |
4526 | add_insn_after (label, after, NULL); | |
722334ea | 4527 | return as_a <rtx_insn *> (label); |
15bbde2b | 4528 | } |
35f3420b | 4529 | \f |
4530 | /* Notes require a bit of special handling: Some notes need to have their | |
4531 | BLOCK_FOR_INSN set, others should never have it set, and some should | |
4532 | have it set or clear depending on the context. */ | |
4533 | ||
4534 | /* Return true iff a note of kind SUBTYPE should be emitted with routines | |
4535 | that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the | |
4536 | caller is asked to emit a note before BB_HEAD, or after BB_END. */ | |
4537 | ||
4538 | static bool | |
4539 | note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p) | |
4540 | { | |
4541 | switch (subtype) | |
4542 | { | |
4543 | /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */ | |
4544 | case NOTE_INSN_SWITCH_TEXT_SECTIONS: | |
4545 | return true; | |
4546 | ||
4547 | /* Notes for var tracking and EH region markers can appear between or | |
4548 | inside basic blocks. If the caller is emitting on the basic block | |
4549 | boundary, do not set BLOCK_FOR_INSN on the new note. */ | |
4550 | case NOTE_INSN_VAR_LOCATION: | |
4551 | case NOTE_INSN_CALL_ARG_LOCATION: | |
4552 | case NOTE_INSN_EH_REGION_BEG: | |
4553 | case NOTE_INSN_EH_REGION_END: | |
4554 | return on_bb_boundary_p; | |
4555 | ||
4556 | /* Otherwise, BLOCK_FOR_INSN must be set. */ | |
4557 | default: | |
4558 | return false; | |
4559 | } | |
4560 | } | |
15bbde2b | 4561 | |
4562 | /* Emit a note of subtype SUBTYPE after the insn AFTER. */ | |
4563 | ||
cef3d8ad | 4564 | rtx_note * |
3e75e92b | 4565 | emit_note_after (enum insn_note subtype, rtx uncast_after) |
15bbde2b | 4566 | { |
3e75e92b | 4567 | rtx_insn *after = as_a <rtx_insn *> (uncast_after); |
cef3d8ad | 4568 | rtx_note *note = make_note_raw (subtype); |
35f3420b | 4569 | basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after); |
4570 | bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after); | |
4571 | ||
4572 | if (note_outside_basic_block_p (subtype, on_bb_boundary_p)) | |
4573 | add_insn_after_nobb (note, after); | |
4574 | else | |
4575 | add_insn_after (note, after, bb); | |
4576 | return note; | |
4577 | } | |
4578 | ||
4579 | /* Emit a note of subtype SUBTYPE before the insn BEFORE. */ | |
4580 | ||
cef3d8ad | 4581 | rtx_note * |
3e75e92b | 4582 | emit_note_before (enum insn_note subtype, rtx uncast_before) |
35f3420b | 4583 | { |
3e75e92b | 4584 | rtx_insn *before = as_a <rtx_insn *> (uncast_before); |
cef3d8ad | 4585 | rtx_note *note = make_note_raw (subtype); |
35f3420b | 4586 | basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before); |
4587 | bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before); | |
4588 | ||
4589 | if (note_outside_basic_block_p (subtype, on_bb_boundary_p)) | |
4590 | add_insn_before_nobb (note, before); | |
4591 | else | |
4592 | add_insn_before (note, before, bb); | |
15bbde2b | 4593 | return note; |
4594 | } | |
15bbde2b | 4595 | \f |
ede4ebcb | 4596 | /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC. |
4597 | MAKE_RAW indicates how to turn PATTERN into a real insn. */ | |
4598 | ||
722334ea | 4599 | static rtx_insn * |
ede4ebcb | 4600 | emit_pattern_after_setloc (rtx pattern, rtx after, int loc, |
2c57d586 | 4601 | rtx_insn *(*make_raw) (rtx)) |
d321a68b | 4602 | { |
ede4ebcb | 4603 | rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
d321a68b | 4604 | |
0891f67c | 4605 | if (pattern == NULL_RTX || !loc) |
722334ea | 4606 | return safe_as_a <rtx_insn *> (last); |
ca154f3f | 4607 | |
31d3e01c | 4608 | after = NEXT_INSN (after); |
4609 | while (1) | |
4610 | { | |
5169661d | 4611 | if (active_insn_p (after) && !INSN_LOCATION (after)) |
4612 | INSN_LOCATION (after) = loc; | |
31d3e01c | 4613 | if (after == last) |
4614 | break; | |
4615 | after = NEXT_INSN (after); | |
4616 | } | |
722334ea | 4617 | return safe_as_a <rtx_insn *> (last); |
d321a68b | 4618 | } |
4619 | ||
ede4ebcb | 4620 | /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN |
4621 | into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after | |
4622 | any DEBUG_INSNs. */ | |
4623 | ||
722334ea | 4624 | static rtx_insn * |
ede4ebcb | 4625 | emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns, |
2c57d586 | 4626 | rtx_insn *(*make_raw) (rtx)) |
0891f67c | 4627 | { |
9845d120 | 4628 | rtx prev = after; |
4629 | ||
ede4ebcb | 4630 | if (skip_debug_insns) |
4631 | while (DEBUG_INSN_P (prev)) | |
4632 | prev = PREV_INSN (prev); | |
9845d120 | 4633 | |
4634 | if (INSN_P (prev)) | |
5169661d | 4635 | return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev), |
ede4ebcb | 4636 | make_raw); |
0891f67c | 4637 | else |
ede4ebcb | 4638 | return emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
0891f67c | 4639 | } |
4640 | ||
5169661d | 4641 | /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4642 | rtx_insn * |
ede4ebcb | 4643 | emit_insn_after_setloc (rtx pattern, rtx after, int loc) |
d321a68b | 4644 | { |
ede4ebcb | 4645 | return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw); |
4646 | } | |
31d3e01c | 4647 | |
5169661d | 4648 | /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4649 | rtx_insn * |
ede4ebcb | 4650 | emit_insn_after (rtx pattern, rtx after) |
4651 | { | |
4652 | return emit_pattern_after (pattern, after, true, make_insn_raw); | |
4653 | } | |
ca154f3f | 4654 | |
5169661d | 4655 | /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4656 | rtx_insn * |
ede4ebcb | 4657 | emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc) |
4658 | { | |
4659 | return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw); | |
d321a68b | 4660 | } |
4661 | ||
5169661d | 4662 | /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4663 | rtx_insn * |
0891f67c | 4664 | emit_jump_insn_after (rtx pattern, rtx after) |
4665 | { | |
ede4ebcb | 4666 | return emit_pattern_after (pattern, after, true, make_jump_insn_raw); |
0891f67c | 4667 | } |
4668 | ||
5169661d | 4669 | /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4670 | rtx_insn * |
35cb5232 | 4671 | emit_call_insn_after_setloc (rtx pattern, rtx after, int loc) |
d321a68b | 4672 | { |
ede4ebcb | 4673 | return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw); |
d321a68b | 4674 | } |
4675 | ||
5169661d | 4676 | /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4677 | rtx_insn * |
0891f67c | 4678 | emit_call_insn_after (rtx pattern, rtx after) |
4679 | { | |
ede4ebcb | 4680 | return emit_pattern_after (pattern, after, true, make_call_insn_raw); |
0891f67c | 4681 | } |
4682 | ||
5169661d | 4683 | /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4684 | rtx_insn * |
9845d120 | 4685 | emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc) |
4686 | { | |
ede4ebcb | 4687 | return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw); |
9845d120 | 4688 | } |
4689 | ||
5169661d | 4690 | /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
722334ea | 4691 | rtx_insn * |
9845d120 | 4692 | emit_debug_insn_after (rtx pattern, rtx after) |
4693 | { | |
ede4ebcb | 4694 | return emit_pattern_after (pattern, after, false, make_debug_insn_raw); |
9845d120 | 4695 | } |
4696 | ||
ede4ebcb | 4697 | /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC. |
4698 | MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP | |
4699 | indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN, | |
4700 | CALL_INSN, etc. */ | |
4701 | ||
722334ea | 4702 | static rtx_insn * |
ede4ebcb | 4703 | emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp, |
2c57d586 | 4704 | rtx_insn *(*make_raw) (rtx)) |
d321a68b | 4705 | { |
4706 | rtx first = PREV_INSN (before); | |
ede4ebcb | 4707 | rtx last = emit_pattern_before_noloc (pattern, before, |
4708 | insnp ? before : NULL_RTX, | |
4709 | NULL, make_raw); | |
0891f67c | 4710 | |
4711 | if (pattern == NULL_RTX || !loc) | |
722334ea | 4712 | return safe_as_a <rtx_insn *> (last); |
0891f67c | 4713 | |
4486418e | 4714 | if (!first) |
4715 | first = get_insns (); | |
4716 | else | |
4717 | first = NEXT_INSN (first); | |
0891f67c | 4718 | while (1) |
4719 | { | |
5169661d | 4720 | if (active_insn_p (first) && !INSN_LOCATION (first)) |
4721 | INSN_LOCATION (first) = loc; | |
0891f67c | 4722 | if (first == last) |
4723 | break; | |
4724 | first = NEXT_INSN (first); | |
4725 | } | |
722334ea | 4726 | return safe_as_a <rtx_insn *> (last); |
0891f67c | 4727 | } |
4728 | ||
ede4ebcb | 4729 | /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN |
4730 | into a real insn. SKIP_DEBUG_INSNS indicates whether to insert | |
4731 | before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an | |
4732 | INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */ | |
4733 | ||
722334ea | 4734 | static rtx_insn * |
ede4ebcb | 4735 | emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns, |
2c57d586 | 4736 | bool insnp, rtx_insn *(*make_raw) (rtx)) |
0891f67c | 4737 | { |
9845d120 | 4738 | rtx next = before; |
4739 | ||
ede4ebcb | 4740 | if (skip_debug_insns) |
4741 | while (DEBUG_INSN_P (next)) | |
4742 | next = PREV_INSN (next); | |
9845d120 | 4743 | |
4744 | if (INSN_P (next)) | |
5169661d | 4745 | return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next), |
ede4ebcb | 4746 | insnp, make_raw); |
0891f67c | 4747 | else |
ede4ebcb | 4748 | return emit_pattern_before_noloc (pattern, before, |
4749 | insnp ? before : NULL_RTX, | |
4750 | NULL, make_raw); | |
0891f67c | 4751 | } |
4752 | ||
5169661d | 4753 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4754 | rtx_insn * |
ede4ebcb | 4755 | emit_insn_before_setloc (rtx pattern, rtx before, int loc) |
0891f67c | 4756 | { |
ede4ebcb | 4757 | return emit_pattern_before_setloc (pattern, before, loc, true, |
4758 | make_insn_raw); | |
4759 | } | |
0891f67c | 4760 | |
5169661d | 4761 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
722334ea | 4762 | rtx_insn * |
ede4ebcb | 4763 | emit_insn_before (rtx pattern, rtx before) |
4764 | { | |
4765 | return emit_pattern_before (pattern, before, true, true, make_insn_raw); | |
4766 | } | |
0891f67c | 4767 | |
5169661d | 4768 | /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4769 | rtx_insn * |
ede4ebcb | 4770 | emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc) |
4771 | { | |
4772 | return emit_pattern_before_setloc (pattern, before, loc, false, | |
4773 | make_jump_insn_raw); | |
0891f67c | 4774 | } |
4775 | ||
5169661d | 4776 | /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
722334ea | 4777 | rtx_insn * |
0891f67c | 4778 | emit_jump_insn_before (rtx pattern, rtx before) |
4779 | { | |
ede4ebcb | 4780 | return emit_pattern_before (pattern, before, true, false, |
4781 | make_jump_insn_raw); | |
0891f67c | 4782 | } |
4783 | ||
5169661d | 4784 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4785 | rtx_insn * |
0891f67c | 4786 | emit_call_insn_before_setloc (rtx pattern, rtx before, int loc) |
4787 | { | |
ede4ebcb | 4788 | return emit_pattern_before_setloc (pattern, before, loc, false, |
4789 | make_call_insn_raw); | |
d321a68b | 4790 | } |
0891f67c | 4791 | |
ede4ebcb | 4792 | /* Like emit_call_insn_before_noloc, |
5169661d | 4793 | but set insn_location according to BEFORE. */ |
722334ea | 4794 | rtx_insn * |
0891f67c | 4795 | emit_call_insn_before (rtx pattern, rtx before) |
4796 | { | |
ede4ebcb | 4797 | return emit_pattern_before (pattern, before, true, false, |
4798 | make_call_insn_raw); | |
0891f67c | 4799 | } |
9845d120 | 4800 | |
5169661d | 4801 | /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
722334ea | 4802 | rtx_insn * |
9845d120 | 4803 | emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc) |
4804 | { | |
ede4ebcb | 4805 | return emit_pattern_before_setloc (pattern, before, loc, false, |
4806 | make_debug_insn_raw); | |
9845d120 | 4807 | } |
4808 | ||
ede4ebcb | 4809 | /* Like emit_debug_insn_before_noloc, |
5169661d | 4810 | but set insn_location according to BEFORE. */ |
722334ea | 4811 | rtx_insn * |
9845d120 | 4812 | emit_debug_insn_before (rtx pattern, rtx before) |
4813 | { | |
ede4ebcb | 4814 | return emit_pattern_before (pattern, before, false, false, |
4815 | make_debug_insn_raw); | |
9845d120 | 4816 | } |
d321a68b | 4817 | \f |
31d3e01c | 4818 | /* Take X and emit it at the end of the doubly-linked |
4819 | INSN list. | |
15bbde2b | 4820 | |
4821 | Returns the last insn emitted. */ | |
4822 | ||
722334ea | 4823 | rtx_insn * |
35cb5232 | 4824 | emit_insn (rtx x) |
15bbde2b | 4825 | { |
722334ea | 4826 | rtx_insn *last = get_last_insn (); |
4827 | rtx_insn *insn; | |
15bbde2b | 4828 | |
31d3e01c | 4829 | if (x == NULL_RTX) |
4830 | return last; | |
15bbde2b | 4831 | |
31d3e01c | 4832 | switch (GET_CODE (x)) |
4833 | { | |
9845d120 | 4834 | case DEBUG_INSN: |
31d3e01c | 4835 | case INSN: |
4836 | case JUMP_INSN: | |
4837 | case CALL_INSN: | |
4838 | case CODE_LABEL: | |
4839 | case BARRIER: | |
4840 | case NOTE: | |
722334ea | 4841 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 4842 | while (insn) |
15bbde2b | 4843 | { |
722334ea | 4844 | rtx_insn *next = NEXT_INSN (insn); |
15bbde2b | 4845 | add_insn (insn); |
31d3e01c | 4846 | last = insn; |
4847 | insn = next; | |
15bbde2b | 4848 | } |
31d3e01c | 4849 | break; |
15bbde2b | 4850 | |
31d3e01c | 4851 | #ifdef ENABLE_RTL_CHECKING |
91f71fa3 | 4852 | case JUMP_TABLE_DATA: |
31d3e01c | 4853 | case SEQUENCE: |
611234b4 | 4854 | gcc_unreachable (); |
31d3e01c | 4855 | break; |
4856 | #endif | |
15bbde2b | 4857 | |
31d3e01c | 4858 | default: |
4859 | last = make_insn_raw (x); | |
4860 | add_insn (last); | |
4861 | break; | |
15bbde2b | 4862 | } |
4863 | ||
4864 | return last; | |
4865 | } | |
4866 | ||
9845d120 | 4867 | /* Make an insn of code DEBUG_INSN with pattern X |
4868 | and add it to the end of the doubly-linked list. */ | |
4869 | ||
722334ea | 4870 | rtx_insn * |
9845d120 | 4871 | emit_debug_insn (rtx x) |
4872 | { | |
722334ea | 4873 | rtx_insn *last = get_last_insn (); |
4874 | rtx_insn *insn; | |
9845d120 | 4875 | |
4876 | if (x == NULL_RTX) | |
4877 | return last; | |
4878 | ||
4879 | switch (GET_CODE (x)) | |
4880 | { | |
4881 | case DEBUG_INSN: | |
4882 | case INSN: | |
4883 | case JUMP_INSN: | |
4884 | case CALL_INSN: | |
4885 | case CODE_LABEL: | |
4886 | case BARRIER: | |
4887 | case NOTE: | |
722334ea | 4888 | insn = as_a <rtx_insn *> (x); |
9845d120 | 4889 | while (insn) |
4890 | { | |
722334ea | 4891 | rtx_insn *next = NEXT_INSN (insn); |
9845d120 | 4892 | add_insn (insn); |
4893 | last = insn; | |
4894 | insn = next; | |
4895 | } | |
4896 | break; | |
4897 | ||
4898 | #ifdef ENABLE_RTL_CHECKING | |
91f71fa3 | 4899 | case JUMP_TABLE_DATA: |
9845d120 | 4900 | case SEQUENCE: |
4901 | gcc_unreachable (); | |
4902 | break; | |
4903 | #endif | |
4904 | ||
4905 | default: | |
4906 | last = make_debug_insn_raw (x); | |
4907 | add_insn (last); | |
4908 | break; | |
4909 | } | |
4910 | ||
4911 | return last; | |
4912 | } | |
4913 | ||
31d3e01c | 4914 | /* Make an insn of code JUMP_INSN with pattern X |
4915 | and add it to the end of the doubly-linked list. */ | |
15bbde2b | 4916 | |
722334ea | 4917 | rtx_insn * |
35cb5232 | 4918 | emit_jump_insn (rtx x) |
15bbde2b | 4919 | { |
722334ea | 4920 | rtx_insn *last = NULL; |
4921 | rtx_insn *insn; | |
15bbde2b | 4922 | |
31d3e01c | 4923 | switch (GET_CODE (x)) |
15bbde2b | 4924 | { |
9845d120 | 4925 | case DEBUG_INSN: |
31d3e01c | 4926 | case INSN: |
4927 | case JUMP_INSN: | |
4928 | case CALL_INSN: | |
4929 | case CODE_LABEL: | |
4930 | case BARRIER: | |
4931 | case NOTE: | |
722334ea | 4932 | insn = as_a <rtx_insn *> (x); |
31d3e01c | 4933 | while (insn) |
4934 | { | |
722334ea | 4935 | rtx_insn *next = NEXT_INSN (insn); |
31d3e01c | 4936 | add_insn (insn); |
4937 | last = insn; | |
4938 | insn = next; | |
4939 | } | |
4940 | break; | |
b36b07d8 | 4941 | |
31d3e01c | 4942 | #ifdef ENABLE_RTL_CHECKING |
91f71fa3 | 4943 | case JUMP_TABLE_DATA: |
31d3e01c | 4944 | case SEQUENCE: |
611234b4 | 4945 | gcc_unreachable (); |
31d3e01c | 4946 | break; |
4947 | #endif | |
b36b07d8 | 4948 | |
31d3e01c | 4949 | default: |
4950 | last = make_jump_insn_raw (x); | |
4951 | add_insn (last); | |
4952 | break; | |
9dda7915 | 4953 | } |
b36b07d8 | 4954 | |
4955 | return last; | |
4956 | } | |
4957 | ||
31d3e01c | 4958 | /* Make an insn of code CALL_INSN with pattern X |
15bbde2b | 4959 | and add it to the end of the doubly-linked list. */ |
4960 | ||
722334ea | 4961 | rtx_insn * |
35cb5232 | 4962 | emit_call_insn (rtx x) |
15bbde2b | 4963 | { |
722334ea | 4964 | rtx_insn *insn; |
31d3e01c | 4965 | |
4966 | switch (GET_CODE (x)) | |
15bbde2b | 4967 | { |
9845d120 | 4968 | case DEBUG_INSN: |
31d3e01c | 4969 | case INSN: |
4970 | case JUMP_INSN: | |
4971 | case CALL_INSN: | |
4972 | case CODE_LABEL: | |
4973 | case BARRIER: | |
4974 | case NOTE: | |
4975 | insn = emit_insn (x); | |
4976 | break; | |
15bbde2b | 4977 | |
31d3e01c | 4978 | #ifdef ENABLE_RTL_CHECKING |
4979 | case SEQUENCE: | |
91f71fa3 | 4980 | case JUMP_TABLE_DATA: |
611234b4 | 4981 | gcc_unreachable (); |
31d3e01c | 4982 | break; |
4983 | #endif | |
15bbde2b | 4984 | |
31d3e01c | 4985 | default: |
4986 | insn = make_call_insn_raw (x); | |
15bbde2b | 4987 | add_insn (insn); |
31d3e01c | 4988 | break; |
15bbde2b | 4989 | } |
31d3e01c | 4990 | |
4991 | return insn; | |
15bbde2b | 4992 | } |
4993 | ||
4994 | /* Add the label LABEL to the end of the doubly-linked list. */ | |
4995 | ||
722334ea | 4996 | rtx_insn * |
35cb5232 | 4997 | emit_label (rtx label) |
15bbde2b | 4998 | { |
596ef494 | 4999 | gcc_checking_assert (INSN_UID (label) == 0); |
5000 | INSN_UID (label) = cur_insn_uid++; | |
3e75e92b | 5001 | add_insn (as_a <rtx_insn *> (label)); |
722334ea | 5002 | return as_a <rtx_insn *> (label); |
15bbde2b | 5003 | } |
5004 | ||
91f71fa3 | 5005 | /* Make an insn of code JUMP_TABLE_DATA |
5006 | and add it to the end of the doubly-linked list. */ | |
5007 | ||
e41badc0 | 5008 | rtx_jump_table_data * |
91f71fa3 | 5009 | emit_jump_table_data (rtx table) |
5010 | { | |
e41badc0 | 5011 | rtx_jump_table_data *jump_table_data = |
5012 | as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA)); | |
91f71fa3 | 5013 | INSN_UID (jump_table_data) = cur_insn_uid++; |
5014 | PATTERN (jump_table_data) = table; | |
5015 | BLOCK_FOR_INSN (jump_table_data) = NULL; | |
5016 | add_insn (jump_table_data); | |
5017 | return jump_table_data; | |
5018 | } | |
5019 | ||
15bbde2b | 5020 | /* Make an insn of code BARRIER |
5021 | and add it to the end of the doubly-linked list. */ | |
5022 | ||
722334ea | 5023 | rtx_barrier * |
35cb5232 | 5024 | emit_barrier (void) |
15bbde2b | 5025 | { |
722334ea | 5026 | rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
15bbde2b | 5027 | INSN_UID (barrier) = cur_insn_uid++; |
5028 | add_insn (barrier); | |
5029 | return barrier; | |
5030 | } | |
5031 | ||
2f57e3d9 | 5032 | /* Emit a copy of note ORIG. */ |
35cb5232 | 5033 | |
cef3d8ad | 5034 | rtx_note * |
5035 | emit_note_copy (rtx_note *orig) | |
2f57e3d9 | 5036 | { |
35f3420b | 5037 | enum insn_note kind = (enum insn_note) NOTE_KIND (orig); |
cef3d8ad | 5038 | rtx_note *note = make_note_raw (kind); |
2f57e3d9 | 5039 | NOTE_DATA (note) = NOTE_DATA (orig); |
2f57e3d9 | 5040 | add_insn (note); |
31b97e8f | 5041 | return note; |
15bbde2b | 5042 | } |
5043 | ||
31b97e8f | 5044 | /* Make an insn of code NOTE or type NOTE_NO |
5045 | and add it to the end of the doubly-linked list. */ | |
15bbde2b | 5046 | |
cef3d8ad | 5047 | rtx_note * |
ad4583d9 | 5048 | emit_note (enum insn_note kind) |
15bbde2b | 5049 | { |
cef3d8ad | 5050 | rtx_note *note = make_note_raw (kind); |
15bbde2b | 5051 | add_insn (note); |
5052 | return note; | |
5053 | } | |
5054 | ||
18b42941 | 5055 | /* Emit a clobber of lvalue X. */ |
5056 | ||
722334ea | 5057 | rtx_insn * |
18b42941 | 5058 | emit_clobber (rtx x) |
5059 | { | |
5060 | /* CONCATs should not appear in the insn stream. */ | |
5061 | if (GET_CODE (x) == CONCAT) | |
5062 | { | |
5063 | emit_clobber (XEXP (x, 0)); | |
5064 | return emit_clobber (XEXP (x, 1)); | |
5065 | } | |
5066 | return emit_insn (gen_rtx_CLOBBER (VOIDmode, x)); | |
5067 | } | |
5068 | ||
5069 | /* Return a sequence of insns to clobber lvalue X. */ | |
5070 | ||
722334ea | 5071 | rtx_insn * |
18b42941 | 5072 | gen_clobber (rtx x) |
5073 | { | |
722334ea | 5074 | rtx_insn *seq; |
18b42941 | 5075 | |
5076 | start_sequence (); | |
5077 | emit_clobber (x); | |
5078 | seq = get_insns (); | |
5079 | end_sequence (); | |
5080 | return seq; | |
5081 | } | |
5082 | ||
5083 | /* Emit a use of rvalue X. */ | |
5084 | ||
722334ea | 5085 | rtx_insn * |
18b42941 | 5086 | emit_use (rtx x) |
5087 | { | |
5088 | /* CONCATs should not appear in the insn stream. */ | |
5089 | if (GET_CODE (x) == CONCAT) | |
5090 | { | |
5091 | emit_use (XEXP (x, 0)); | |
5092 | return emit_use (XEXP (x, 1)); | |
5093 | } | |
5094 | return emit_insn (gen_rtx_USE (VOIDmode, x)); | |
5095 | } | |
5096 | ||
5097 | /* Return a sequence of insns to use rvalue X. */ | |
5098 | ||
722334ea | 5099 | rtx_insn * |
18b42941 | 5100 | gen_use (rtx x) |
5101 | { | |
722334ea | 5102 | rtx_insn *seq; |
18b42941 | 5103 | |
5104 | start_sequence (); | |
5105 | emit_use (x); | |
5106 | seq = get_insns (); | |
5107 | end_sequence (); | |
5108 | return seq; | |
5109 | } | |
5110 | ||
3a286419 | 5111 | /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction. |
5112 | Return the set in INSN that such notes describe, or NULL if the notes | |
5113 | have no meaning for INSN. */ | |
5114 | ||
5115 | rtx | |
5116 | set_for_reg_notes (rtx insn) | |
5117 | { | |
5118 | rtx pat, reg; | |
5119 | ||
5120 | if (!INSN_P (insn)) | |
5121 | return NULL_RTX; | |
5122 | ||
5123 | pat = PATTERN (insn); | |
5124 | if (GET_CODE (pat) == PARALLEL) | |
5125 | { | |
5126 | /* We do not use single_set because that ignores SETs of unused | |
5127 | registers. REG_EQUAL and REG_EQUIV notes really do require the | |
5128 | PARALLEL to have a single SET. */ | |
5129 | if (multiple_sets (insn)) | |
5130 | return NULL_RTX; | |
5131 | pat = XVECEXP (pat, 0, 0); | |
5132 | } | |
5133 | ||
5134 | if (GET_CODE (pat) != SET) | |
5135 | return NULL_RTX; | |
5136 | ||
5137 | reg = SET_DEST (pat); | |
5138 | ||
5139 | /* Notes apply to the contents of a STRICT_LOW_PART. */ | |
5140 | if (GET_CODE (reg) == STRICT_LOW_PART) | |
5141 | reg = XEXP (reg, 0); | |
5142 | ||
5143 | /* Check that we have a register. */ | |
5144 | if (!(REG_P (reg) || GET_CODE (reg) == SUBREG)) | |
5145 | return NULL_RTX; | |
5146 | ||
5147 | return pat; | |
5148 | } | |
5149 | ||
f1934a33 | 5150 | /* Place a note of KIND on insn INSN with DATUM as the datum. If a |
6312a35e | 5151 | note of this type already exists, remove it first. */ |
f1934a33 | 5152 | |
c080d8f0 | 5153 | rtx |
35cb5232 | 5154 | set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) |
f1934a33 | 5155 | { |
5156 | rtx note = find_reg_note (insn, kind, NULL_RTX); | |
5157 | ||
7e6224ab | 5158 | switch (kind) |
5159 | { | |
5160 | case REG_EQUAL: | |
5161 | case REG_EQUIV: | |
3a286419 | 5162 | if (!set_for_reg_notes (insn)) |
5163 | return NULL_RTX; | |
7e6224ab | 5164 | |
5165 | /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes. | |
5166 | It serves no useful purpose and breaks eliminate_regs. */ | |
5167 | if (GET_CODE (datum) == ASM_OPERANDS) | |
5168 | return NULL_RTX; | |
5169 | break; | |
5170 | ||
5171 | default: | |
5172 | break; | |
5173 | } | |
c080d8f0 | 5174 | |
3a286419 | 5175 | if (note) |
5176 | XEXP (note, 0) = datum; | |
5177 | else | |
5178 | { | |
5179 | add_reg_note (insn, kind, datum); | |
5180 | note = REG_NOTES (insn); | |
5181 | } | |
3072d30e | 5182 | |
5183 | switch (kind) | |
c080d8f0 | 5184 | { |
3072d30e | 5185 | case REG_EQUAL: |
5186 | case REG_EQUIV: | |
5187 | df_notes_rescan (insn); | |
5188 | break; | |
5189 | default: | |
5190 | break; | |
c080d8f0 | 5191 | } |
f1934a33 | 5192 | |
3a286419 | 5193 | return note; |
f1934a33 | 5194 | } |
41cf444a | 5195 | |
5196 | /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */ | |
5197 | rtx | |
5198 | set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst) | |
5199 | { | |
3a286419 | 5200 | rtx set = set_for_reg_notes (insn); |
41cf444a | 5201 | |
5202 | if (set && SET_DEST (set) == dst) | |
5203 | return set_unique_reg_note (insn, kind, datum); | |
5204 | return NULL_RTX; | |
5205 | } | |
15bbde2b | 5206 | \f |
5207 | /* Return an indication of which type of insn should have X as a body. | |
5208 | The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */ | |
5209 | ||
9b69f75b | 5210 | static enum rtx_code |
35cb5232 | 5211 | classify_insn (rtx x) |
15bbde2b | 5212 | { |
6d7dc5b9 | 5213 | if (LABEL_P (x)) |
15bbde2b | 5214 | return CODE_LABEL; |
5215 | if (GET_CODE (x) == CALL) | |
5216 | return CALL_INSN; | |
9cb2517e | 5217 | if (ANY_RETURN_P (x)) |
15bbde2b | 5218 | return JUMP_INSN; |
5219 | if (GET_CODE (x) == SET) | |
5220 | { | |
5221 | if (SET_DEST (x) == pc_rtx) | |
5222 | return JUMP_INSN; | |
5223 | else if (GET_CODE (SET_SRC (x)) == CALL) | |
5224 | return CALL_INSN; | |
5225 | else | |
5226 | return INSN; | |
5227 | } | |
5228 | if (GET_CODE (x) == PARALLEL) | |
5229 | { | |
19cb6b50 | 5230 | int j; |
15bbde2b | 5231 | for (j = XVECLEN (x, 0) - 1; j >= 0; j--) |
5232 | if (GET_CODE (XVECEXP (x, 0, j)) == CALL) | |
5233 | return CALL_INSN; | |
5234 | else if (GET_CODE (XVECEXP (x, 0, j)) == SET | |
5235 | && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx) | |
5236 | return JUMP_INSN; | |
5237 | else if (GET_CODE (XVECEXP (x, 0, j)) == SET | |
5238 | && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL) | |
5239 | return CALL_INSN; | |
5240 | } | |
5241 | return INSN; | |
5242 | } | |
5243 | ||
5244 | /* Emit the rtl pattern X as an appropriate kind of insn. | |
5245 | If X is a label, it is simply added into the insn chain. */ | |
5246 | ||
722334ea | 5247 | rtx_insn * |
35cb5232 | 5248 | emit (rtx x) |
15bbde2b | 5249 | { |
5250 | enum rtx_code code = classify_insn (x); | |
5251 | ||
611234b4 | 5252 | switch (code) |
15bbde2b | 5253 | { |
611234b4 | 5254 | case CODE_LABEL: |
5255 | return emit_label (x); | |
5256 | case INSN: | |
5257 | return emit_insn (x); | |
5258 | case JUMP_INSN: | |
5259 | { | |
722334ea | 5260 | rtx_insn *insn = emit_jump_insn (x); |
611234b4 | 5261 | if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN) |
5262 | return emit_barrier (); | |
5263 | return insn; | |
5264 | } | |
5265 | case CALL_INSN: | |
5266 | return emit_call_insn (x); | |
9845d120 | 5267 | case DEBUG_INSN: |
5268 | return emit_debug_insn (x); | |
611234b4 | 5269 | default: |
5270 | gcc_unreachable (); | |
15bbde2b | 5271 | } |
15bbde2b | 5272 | } |
5273 | \f | |
1f3233d1 | 5274 | /* Space for free sequence stack entries. */ |
7035b2ab | 5275 | static GTY ((deletable)) struct sequence_stack *free_sequence_stack; |
1f3233d1 | 5276 | |
735f4358 | 5277 | /* Begin emitting insns to a sequence. If this sequence will contain |
5278 | something that might cause the compiler to pop arguments to function | |
5279 | calls (because those pops have previously been deferred; see | |
5280 | INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust | |
5281 | before calling this function. That will ensure that the deferred | |
5282 | pops are not accidentally emitted in the middle of this sequence. */ | |
15bbde2b | 5283 | |
5284 | void | |
35cb5232 | 5285 | start_sequence (void) |
15bbde2b | 5286 | { |
5287 | struct sequence_stack *tem; | |
5288 | ||
1f3233d1 | 5289 | if (free_sequence_stack != NULL) |
5290 | { | |
5291 | tem = free_sequence_stack; | |
5292 | free_sequence_stack = tem->next; | |
5293 | } | |
5294 | else | |
25a27413 | 5295 | tem = ggc_alloc<sequence_stack> (); |
15bbde2b | 5296 | |
0a893c29 | 5297 | tem->next = seq_stack; |
06f9d6ef | 5298 | tem->first = get_insns (); |
5299 | tem->last = get_last_insn (); | |
15bbde2b | 5300 | |
0a893c29 | 5301 | seq_stack = tem; |
15bbde2b | 5302 | |
06f9d6ef | 5303 | set_first_insn (0); |
5304 | set_last_insn (0); | |
15bbde2b | 5305 | } |
5306 | ||
b49854c6 | 5307 | /* Set up the insn chain starting with FIRST as the current sequence, |
5308 | saving the previously current one. See the documentation for | |
5309 | start_sequence for more information about how to use this function. */ | |
15bbde2b | 5310 | |
5311 | void | |
35cb5232 | 5312 | push_to_sequence (rtx first) |
15bbde2b | 5313 | { |
5314 | rtx last; | |
5315 | ||
5316 | start_sequence (); | |
5317 | ||
3c802a1e | 5318 | for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last)) |
5319 | ; | |
15bbde2b | 5320 | |
06f9d6ef | 5321 | set_first_insn (first); |
5322 | set_last_insn (last); | |
15bbde2b | 5323 | } |
5324 | ||
28bf151d | 5325 | /* Like push_to_sequence, but take the last insn as an argument to avoid |
5326 | looping through the list. */ | |
5327 | ||
5328 | void | |
5329 | push_to_sequence2 (rtx first, rtx last) | |
5330 | { | |
5331 | start_sequence (); | |
5332 | ||
06f9d6ef | 5333 | set_first_insn (first); |
5334 | set_last_insn (last); | |
28bf151d | 5335 | } |
5336 | ||
ab74c92f | 5337 | /* Set up the outer-level insn chain |
5338 | as the current sequence, saving the previously current one. */ | |
5339 | ||
5340 | void | |
35cb5232 | 5341 | push_topmost_sequence (void) |
ab74c92f | 5342 | { |
2041cfd9 | 5343 | struct sequence_stack *stack, *top = NULL; |
ab74c92f | 5344 | |
5345 | start_sequence (); | |
5346 | ||
0a893c29 | 5347 | for (stack = seq_stack; stack; stack = stack->next) |
ab74c92f | 5348 | top = stack; |
5349 | ||
06f9d6ef | 5350 | set_first_insn (top->first); |
5351 | set_last_insn (top->last); | |
ab74c92f | 5352 | } |
5353 | ||
5354 | /* After emitting to the outer-level insn chain, update the outer-level | |
5355 | insn chain, and restore the previous saved state. */ | |
5356 | ||
5357 | void | |
35cb5232 | 5358 | pop_topmost_sequence (void) |
ab74c92f | 5359 | { |
2041cfd9 | 5360 | struct sequence_stack *stack, *top = NULL; |
ab74c92f | 5361 | |
0a893c29 | 5362 | for (stack = seq_stack; stack; stack = stack->next) |
ab74c92f | 5363 | top = stack; |
5364 | ||
06f9d6ef | 5365 | top->first = get_insns (); |
5366 | top->last = get_last_insn (); | |
ab74c92f | 5367 | |
5368 | end_sequence (); | |
5369 | } | |
5370 | ||
15bbde2b | 5371 | /* After emitting to a sequence, restore previous saved state. |
5372 | ||
b49854c6 | 5373 | To get the contents of the sequence just made, you must call |
31d3e01c | 5374 | `get_insns' *before* calling here. |
b49854c6 | 5375 | |
5376 | If the compiler might have deferred popping arguments while | |
5377 | generating this sequence, and this sequence will not be immediately | |
5378 | inserted into the instruction stream, use do_pending_stack_adjust | |
31d3e01c | 5379 | before calling get_insns. That will ensure that the deferred |
b49854c6 | 5380 | pops are inserted into this sequence, and not into some random |
5381 | location in the instruction stream. See INHIBIT_DEFER_POP for more | |
5382 | information about deferred popping of arguments. */ | |
15bbde2b | 5383 | |
5384 | void | |
35cb5232 | 5385 | end_sequence (void) |
15bbde2b | 5386 | { |
0a893c29 | 5387 | struct sequence_stack *tem = seq_stack; |
15bbde2b | 5388 | |
06f9d6ef | 5389 | set_first_insn (tem->first); |
5390 | set_last_insn (tem->last); | |
0a893c29 | 5391 | seq_stack = tem->next; |
15bbde2b | 5392 | |
1f3233d1 | 5393 | memset (tem, 0, sizeof (*tem)); |
5394 | tem->next = free_sequence_stack; | |
5395 | free_sequence_stack = tem; | |
15bbde2b | 5396 | } |
5397 | ||
5398 | /* Return 1 if currently emitting into a sequence. */ | |
5399 | ||
5400 | int | |
35cb5232 | 5401 | in_sequence_p (void) |
15bbde2b | 5402 | { |
0a893c29 | 5403 | return seq_stack != 0; |
15bbde2b | 5404 | } |
15bbde2b | 5405 | \f |
02ebfa52 | 5406 | /* Put the various virtual registers into REGNO_REG_RTX. */ |
5407 | ||
2f3874ce | 5408 | static void |
b079a207 | 5409 | init_virtual_regs (void) |
02ebfa52 | 5410 | { |
b079a207 | 5411 | regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx; |
5412 | regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx; | |
5413 | regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx; | |
5414 | regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx; | |
5415 | regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx; | |
60778e62 | 5416 | regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM] |
5417 | = virtual_preferred_stack_boundary_rtx; | |
0a893c29 | 5418 | } |
5419 | ||
928d57e3 | 5420 | \f |
5421 | /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */ | |
5422 | static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS]; | |
5423 | static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS]; | |
5424 | static int copy_insn_n_scratches; | |
5425 | ||
5426 | /* When an insn is being copied by copy_insn_1, this is nonzero if we have | |
5427 | copied an ASM_OPERANDS. | |
5428 | In that case, it is the original input-operand vector. */ | |
5429 | static rtvec orig_asm_operands_vector; | |
5430 | ||
5431 | /* When an insn is being copied by copy_insn_1, this is nonzero if we have | |
5432 | copied an ASM_OPERANDS. | |
5433 | In that case, it is the copied input-operand vector. */ | |
5434 | static rtvec copy_asm_operands_vector; | |
5435 | ||
5436 | /* Likewise for the constraints vector. */ | |
5437 | static rtvec orig_asm_constraints_vector; | |
5438 | static rtvec copy_asm_constraints_vector; | |
5439 | ||
5440 | /* Recursively create a new copy of an rtx for copy_insn. | |
5441 | This function differs from copy_rtx in that it handles SCRATCHes and | |
5442 | ASM_OPERANDs properly. | |
5443 | Normally, this function is not used directly; use copy_insn as front end. | |
5444 | However, you could first copy an insn pattern with copy_insn and then use | |
5445 | this function afterwards to properly copy any REG_NOTEs containing | |
5446 | SCRATCHes. */ | |
5447 | ||
5448 | rtx | |
35cb5232 | 5449 | copy_insn_1 (rtx orig) |
928d57e3 | 5450 | { |
19cb6b50 | 5451 | rtx copy; |
5452 | int i, j; | |
5453 | RTX_CODE code; | |
5454 | const char *format_ptr; | |
928d57e3 | 5455 | |
25e880b1 | 5456 | if (orig == NULL) |
5457 | return NULL; | |
5458 | ||
928d57e3 | 5459 | code = GET_CODE (orig); |
5460 | ||
5461 | switch (code) | |
5462 | { | |
5463 | case REG: | |
d7fce3c8 | 5464 | case DEBUG_EXPR: |
0349edce | 5465 | CASE_CONST_ANY: |
928d57e3 | 5466 | case SYMBOL_REF: |
5467 | case CODE_LABEL: | |
5468 | case PC: | |
5469 | case CC0: | |
e0691b9a | 5470 | case RETURN: |
9cb2517e | 5471 | case SIMPLE_RETURN: |
928d57e3 | 5472 | return orig; |
c09425a0 | 5473 | case CLOBBER: |
b291008a | 5474 | /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
5475 | clobbers or clobbers of hard registers that originated as pseudos. | |
5476 | This is needed to allow safe register renaming. */ | |
5477 | if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER | |
5478 | && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0))) | |
c09425a0 | 5479 | return orig; |
5480 | break; | |
928d57e3 | 5481 | |
5482 | case SCRATCH: | |
5483 | for (i = 0; i < copy_insn_n_scratches; i++) | |
5484 | if (copy_insn_scratch_in[i] == orig) | |
5485 | return copy_insn_scratch_out[i]; | |
5486 | break; | |
5487 | ||
5488 | case CONST: | |
3072d30e | 5489 | if (shared_const_p (orig)) |
928d57e3 | 5490 | return orig; |
5491 | break; | |
d823ba47 | 5492 | |
928d57e3 | 5493 | /* A MEM with a constant address is not sharable. The problem is that |
5494 | the constant address may need to be reloaded. If the mem is shared, | |
5495 | then reloading one copy of this mem will cause all copies to appear | |
5496 | to have been reloaded. */ | |
5497 | ||
5498 | default: | |
5499 | break; | |
5500 | } | |
5501 | ||
f2d0e9f1 | 5502 | /* Copy the various flags, fields, and other information. We assume |
5503 | that all fields need copying, and then clear the fields that should | |
928d57e3 | 5504 | not be copied. That is the sensible default behavior, and forces |
5505 | us to explicitly document why we are *not* copying a flag. */ | |
f2d0e9f1 | 5506 | copy = shallow_copy_rtx (orig); |
928d57e3 | 5507 | |
5508 | /* We do not copy the USED flag, which is used as a mark bit during | |
5509 | walks over the RTL. */ | |
7c25cb91 | 5510 | RTX_FLAG (copy, used) = 0; |
928d57e3 | 5511 | |
5512 | /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */ | |
6720e96c | 5513 | if (INSN_P (orig)) |
928d57e3 | 5514 | { |
7c25cb91 | 5515 | RTX_FLAG (copy, jump) = 0; |
5516 | RTX_FLAG (copy, call) = 0; | |
5517 | RTX_FLAG (copy, frame_related) = 0; | |
928d57e3 | 5518 | } |
d823ba47 | 5519 | |
928d57e3 | 5520 | format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); |
5521 | ||
5522 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) | |
f2d0e9f1 | 5523 | switch (*format_ptr++) |
5524 | { | |
5525 | case 'e': | |
5526 | if (XEXP (orig, i) != NULL) | |
5527 | XEXP (copy, i) = copy_insn_1 (XEXP (orig, i)); | |
5528 | break; | |
928d57e3 | 5529 | |
f2d0e9f1 | 5530 | case 'E': |
5531 | case 'V': | |
5532 | if (XVEC (orig, i) == orig_asm_constraints_vector) | |
5533 | XVEC (copy, i) = copy_asm_constraints_vector; | |
5534 | else if (XVEC (orig, i) == orig_asm_operands_vector) | |
5535 | XVEC (copy, i) = copy_asm_operands_vector; | |
5536 | else if (XVEC (orig, i) != NULL) | |
5537 | { | |
5538 | XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); | |
5539 | for (j = 0; j < XVECLEN (copy, i); j++) | |
5540 | XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j)); | |
5541 | } | |
5542 | break; | |
928d57e3 | 5543 | |
f2d0e9f1 | 5544 | case 't': |
5545 | case 'w': | |
5546 | case 'i': | |
5547 | case 's': | |
5548 | case 'S': | |
5549 | case 'u': | |
5550 | case '0': | |
5551 | /* These are left unchanged. */ | |
5552 | break; | |
928d57e3 | 5553 | |
f2d0e9f1 | 5554 | default: |
5555 | gcc_unreachable (); | |
5556 | } | |
928d57e3 | 5557 | |
5558 | if (code == SCRATCH) | |
5559 | { | |
5560 | i = copy_insn_n_scratches++; | |
611234b4 | 5561 | gcc_assert (i < MAX_RECOG_OPERANDS); |
928d57e3 | 5562 | copy_insn_scratch_in[i] = orig; |
5563 | copy_insn_scratch_out[i] = copy; | |
5564 | } | |
5565 | else if (code == ASM_OPERANDS) | |
5566 | { | |
d91f2122 | 5567 | orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig); |
5568 | copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy); | |
5569 | orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig); | |
5570 | copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy); | |
928d57e3 | 5571 | } |
5572 | ||
5573 | return copy; | |
5574 | } | |
5575 | ||
5576 | /* Create a new copy of an rtx. | |
5577 | This function differs from copy_rtx in that it handles SCRATCHes and | |
5578 | ASM_OPERANDs properly. | |
5579 | INSN doesn't really have to be a full INSN; it could be just the | |
5580 | pattern. */ | |
5581 | rtx | |
35cb5232 | 5582 | copy_insn (rtx insn) |
928d57e3 | 5583 | { |
5584 | copy_insn_n_scratches = 0; | |
5585 | orig_asm_operands_vector = 0; | |
5586 | orig_asm_constraints_vector = 0; | |
5587 | copy_asm_operands_vector = 0; | |
5588 | copy_asm_constraints_vector = 0; | |
5589 | return copy_insn_1 (insn); | |
5590 | } | |
02ebfa52 | 5591 | |
a9abe1f1 | 5592 | /* Return a copy of INSN that can be used in a SEQUENCE delay slot, |
5593 | on that assumption that INSN itself remains in its original place. */ | |
5594 | ||
5595 | rtx | |
5596 | copy_delay_slot_insn (rtx insn) | |
5597 | { | |
5598 | /* Copy INSN with its rtx_code, all its notes, location etc. */ | |
5599 | insn = copy_rtx (insn); | |
5600 | INSN_UID (insn) = cur_insn_uid++; | |
5601 | return insn; | |
5602 | } | |
5603 | ||
15bbde2b | 5604 | /* Initialize data structures and variables in this file |
5605 | before generating rtl for each function. */ | |
5606 | ||
5607 | void | |
35cb5232 | 5608 | init_emit (void) |
15bbde2b | 5609 | { |
06f9d6ef | 5610 | set_first_insn (NULL); |
5611 | set_last_insn (NULL); | |
9845d120 | 5612 | if (MIN_NONDEBUG_INSN_UID) |
5613 | cur_insn_uid = MIN_NONDEBUG_INSN_UID; | |
5614 | else | |
5615 | cur_insn_uid = 1; | |
5616 | cur_debug_insn_uid = 1; | |
15bbde2b | 5617 | reg_rtx_no = LAST_VIRTUAL_REGISTER + 1; |
15bbde2b | 5618 | first_label_num = label_num; |
0a893c29 | 5619 | seq_stack = NULL; |
15bbde2b | 5620 | |
15bbde2b | 5621 | /* Init the tables that describe all the pseudo regs. */ |
5622 | ||
fd6ffb7c | 5623 | crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101; |
15bbde2b | 5624 | |
fd6ffb7c | 5625 | crtl->emit.regno_pointer_align |
2457c754 | 5626 | = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length); |
d4c332ff | 5627 | |
25a27413 | 5628 | regno_reg_rtx = ggc_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length); |
fcdc122e | 5629 | |
936082bb | 5630 | /* Put copies of all the hard registers into regno_reg_rtx. */ |
90295bd2 | 5631 | memcpy (regno_reg_rtx, |
679bcc8d | 5632 | initial_regno_reg_rtx, |
90295bd2 | 5633 | FIRST_PSEUDO_REGISTER * sizeof (rtx)); |
936082bb | 5634 | |
15bbde2b | 5635 | /* Put copies of all the virtual register rtx into regno_reg_rtx. */ |
b079a207 | 5636 | init_virtual_regs (); |
888e0d33 | 5637 | |
5638 | /* Indicate that the virtual registers and stack locations are | |
5639 | all pointers. */ | |
e61a0a7f | 5640 | REG_POINTER (stack_pointer_rtx) = 1; |
5641 | REG_POINTER (frame_pointer_rtx) = 1; | |
5642 | REG_POINTER (hard_frame_pointer_rtx) = 1; | |
5643 | REG_POINTER (arg_pointer_rtx) = 1; | |
888e0d33 | 5644 | |
e61a0a7f | 5645 | REG_POINTER (virtual_incoming_args_rtx) = 1; |
5646 | REG_POINTER (virtual_stack_vars_rtx) = 1; | |
5647 | REG_POINTER (virtual_stack_dynamic_rtx) = 1; | |
5648 | REG_POINTER (virtual_outgoing_args_rtx) = 1; | |
5649 | REG_POINTER (virtual_cfa_rtx) = 1; | |
89525da0 | 5650 | |
d4c332ff | 5651 | #ifdef STACK_BOUNDARY |
80909c64 | 5652 | REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY; |
5653 | REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | |
5654 | REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | |
5655 | REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY; | |
5656 | ||
5657 | REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY; | |
5658 | REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY; | |
5659 | REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY; | |
5660 | REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY; | |
5661 | REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD; | |
d4c332ff | 5662 | #endif |
5663 | ||
89525da0 | 5664 | #ifdef INIT_EXPANDERS |
5665 | INIT_EXPANDERS; | |
5666 | #endif | |
15bbde2b | 5667 | } |
5668 | ||
6e68dcb2 | 5669 | /* Generate a vector constant for mode MODE and constant value CONSTANT. */ |
886cfd4f | 5670 | |
5671 | static rtx | |
6e68dcb2 | 5672 | gen_const_vector (enum machine_mode mode, int constant) |
886cfd4f | 5673 | { |
5674 | rtx tem; | |
5675 | rtvec v; | |
5676 | int units, i; | |
5677 | enum machine_mode inner; | |
5678 | ||
5679 | units = GET_MODE_NUNITS (mode); | |
5680 | inner = GET_MODE_INNER (mode); | |
5681 | ||
069b07bf | 5682 | gcc_assert (!DECIMAL_FLOAT_MODE_P (inner)); |
5683 | ||
886cfd4f | 5684 | v = rtvec_alloc (units); |
5685 | ||
6e68dcb2 | 5686 | /* We need to call this function after we set the scalar const_tiny_rtx |
5687 | entries. */ | |
5688 | gcc_assert (const_tiny_rtx[constant][(int) inner]); | |
886cfd4f | 5689 | |
5690 | for (i = 0; i < units; ++i) | |
6e68dcb2 | 5691 | RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner]; |
886cfd4f | 5692 | |
9426b612 | 5693 | tem = gen_rtx_raw_CONST_VECTOR (mode, v); |
886cfd4f | 5694 | return tem; |
5695 | } | |
5696 | ||
9426b612 | 5697 | /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when |
6e68dcb2 | 5698 | all elements are zero, and the one vector when all elements are one. */ |
9426b612 | 5699 | rtx |
35cb5232 | 5700 | gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v) |
9426b612 | 5701 | { |
6e68dcb2 | 5702 | enum machine_mode inner = GET_MODE_INNER (mode); |
5703 | int nunits = GET_MODE_NUNITS (mode); | |
5704 | rtx x; | |
9426b612 | 5705 | int i; |
5706 | ||
6e68dcb2 | 5707 | /* Check to see if all of the elements have the same value. */ |
5708 | x = RTVEC_ELT (v, nunits - 1); | |
5709 | for (i = nunits - 2; i >= 0; i--) | |
5710 | if (RTVEC_ELT (v, i) != x) | |
5711 | break; | |
5712 | ||
5713 | /* If the values are all the same, check to see if we can use one of the | |
5714 | standard constant vectors. */ | |
5715 | if (i == -1) | |
5716 | { | |
5717 | if (x == CONST0_RTX (inner)) | |
5718 | return CONST0_RTX (mode); | |
5719 | else if (x == CONST1_RTX (inner)) | |
5720 | return CONST1_RTX (mode); | |
ba8dfb08 | 5721 | else if (x == CONSTM1_RTX (inner)) |
5722 | return CONSTM1_RTX (mode); | |
6e68dcb2 | 5723 | } |
5724 | ||
5725 | return gen_rtx_raw_CONST_VECTOR (mode, v); | |
9426b612 | 5726 | } |
5727 | ||
6d8b68a3 | 5728 | /* Initialise global register information required by all functions. */ |
5729 | ||
5730 | void | |
5731 | init_emit_regs (void) | |
5732 | { | |
5733 | int i; | |
d83fcaa1 | 5734 | enum machine_mode mode; |
5735 | mem_attrs *attrs; | |
6d8b68a3 | 5736 | |
5737 | /* Reset register attributes */ | |
5738 | htab_empty (reg_attrs_htab); | |
5739 | ||
5740 | /* We need reg_raw_mode, so initialize the modes now. */ | |
5741 | init_reg_modes_target (); | |
5742 | ||
5743 | /* Assign register numbers to the globally defined register rtx. */ | |
6d8b68a3 | 5744 | stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM); |
5745 | frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM); | |
5746 | hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM); | |
5747 | arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM); | |
5748 | virtual_incoming_args_rtx = | |
5749 | gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM); | |
5750 | virtual_stack_vars_rtx = | |
5751 | gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM); | |
5752 | virtual_stack_dynamic_rtx = | |
5753 | gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM); | |
5754 | virtual_outgoing_args_rtx = | |
5755 | gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM); | |
5756 | virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM); | |
60778e62 | 5757 | virtual_preferred_stack_boundary_rtx = |
5758 | gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM); | |
6d8b68a3 | 5759 | |
5760 | /* Initialize RTL for commonly used hard registers. These are | |
5761 | copied into regno_reg_rtx as we begin to compile each function. */ | |
5762 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
679bcc8d | 5763 | initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i); |
6d8b68a3 | 5764 | |
5765 | #ifdef RETURN_ADDRESS_POINTER_REGNUM | |
5766 | return_address_pointer_rtx | |
5767 | = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM); | |
5768 | #endif | |
5769 | ||
6d8b68a3 | 5770 | if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM) |
5771 | pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM); | |
5772 | else | |
5773 | pic_offset_table_rtx = NULL_RTX; | |
d83fcaa1 | 5774 | |
5775 | for (i = 0; i < (int) MAX_MACHINE_MODE; i++) | |
5776 | { | |
5777 | mode = (enum machine_mode) i; | |
25a27413 | 5778 | attrs = ggc_cleared_alloc<mem_attrs> (); |
d83fcaa1 | 5779 | attrs->align = BITS_PER_UNIT; |
5780 | attrs->addrspace = ADDR_SPACE_GENERIC; | |
5781 | if (mode != BLKmode) | |
5782 | { | |
6d58bcba | 5783 | attrs->size_known_p = true; |
5784 | attrs->size = GET_MODE_SIZE (mode); | |
d83fcaa1 | 5785 | if (STRICT_ALIGNMENT) |
5786 | attrs->align = GET_MODE_ALIGNMENT (mode); | |
5787 | } | |
5788 | mode_mem_attrs[i] = attrs; | |
5789 | } | |
6d8b68a3 | 5790 | } |
5791 | ||
8059b95a | 5792 | /* Initialize global machine_mode variables. */ |
5793 | ||
5794 | void | |
5795 | init_derived_machine_modes (void) | |
5796 | { | |
5797 | byte_mode = VOIDmode; | |
5798 | word_mode = VOIDmode; | |
5799 | ||
5800 | for (enum machine_mode mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
5801 | mode != VOIDmode; | |
5802 | mode = GET_MODE_WIDER_MODE (mode)) | |
5803 | { | |
5804 | if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT | |
5805 | && byte_mode == VOIDmode) | |
5806 | byte_mode = mode; | |
5807 | ||
5808 | if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD | |
5809 | && word_mode == VOIDmode) | |
5810 | word_mode = mode; | |
5811 | } | |
5812 | ||
5813 | ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0); | |
5814 | } | |
5815 | ||
01703575 | 5816 | /* Create some permanent unique rtl objects shared between all functions. */ |
15bbde2b | 5817 | |
5818 | void | |
01703575 | 5819 | init_emit_once (void) |
15bbde2b | 5820 | { |
5821 | int i; | |
5822 | enum machine_mode mode; | |
9e042f31 | 5823 | enum machine_mode double_mode; |
15bbde2b | 5824 | |
e913b5cd | 5825 | /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE, |
5826 | CONST_FIXED, and memory attribute hash tables. */ | |
573aba85 | 5827 | const_int_htab = htab_create_ggc (37, const_int_htab_hash, |
5828 | const_int_htab_eq, NULL); | |
c6259b83 | 5829 | |
e913b5cd | 5830 | #if TARGET_SUPPORTS_WIDE_INT |
5831 | const_wide_int_htab = htab_create_ggc (37, const_wide_int_htab_hash, | |
5832 | const_wide_int_htab_eq, NULL); | |
5833 | #endif | |
573aba85 | 5834 | const_double_htab = htab_create_ggc (37, const_double_htab_hash, |
5835 | const_double_htab_eq, NULL); | |
2ff23ed0 | 5836 | |
e397ad8e | 5837 | const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash, |
5838 | const_fixed_htab_eq, NULL); | |
5839 | ||
ca74b940 | 5840 | reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash, |
5841 | reg_attrs_htab_eq, NULL); | |
77695070 | 5842 | |
57c097d5 | 5843 | #ifdef INIT_EXPANDERS |
ab5beff9 | 5844 | /* This is to initialize {init|mark|free}_machine_status before the first |
5845 | call to push_function_context_to. This is needed by the Chill front | |
3fb1e43b | 5846 | end which calls push_function_context_to before the first call to |
57c097d5 | 5847 | init_function_start. */ |
5848 | INIT_EXPANDERS; | |
5849 | #endif | |
5850 | ||
15bbde2b | 5851 | /* Create the unique rtx's for certain rtx codes and operand values. */ |
5852 | ||
8fd5918e | 5853 | /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case |
7014838c | 5854 | tries to use these variables. */ |
15bbde2b | 5855 | for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++) |
d823ba47 | 5856 | const_int_rtx[i + MAX_SAVED_CONST_INT] = |
a717d5b4 | 5857 | gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i); |
15bbde2b | 5858 | |
1a60f06a | 5859 | if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT |
5860 | && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT) | |
57c097d5 | 5861 | const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT]; |
1a60f06a | 5862 | else |
3ad7bb1c | 5863 | const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE); |
15bbde2b | 5864 | |
8059b95a | 5865 | double_mode = mode_for_size (DOUBLE_TYPE_SIZE, MODE_FLOAT, 0); |
5866 | ||
cc69d08a | 5867 | real_from_integer (&dconst0, double_mode, 0, SIGNED); |
5868 | real_from_integer (&dconst1, double_mode, 1, SIGNED); | |
5869 | real_from_integer (&dconst2, double_mode, 2, SIGNED); | |
3fa759a9 | 5870 | |
5871 | dconstm1 = dconst1; | |
5872 | dconstm1.sign = 1; | |
77e89269 | 5873 | |
5874 | dconsthalf = dconst1; | |
9d96125b | 5875 | SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1); |
15bbde2b | 5876 | |
ba8dfb08 | 5877 | for (i = 0; i < 3; i++) |
15bbde2b | 5878 | { |
3fa759a9 | 5879 | const REAL_VALUE_TYPE *const r = |
badfe841 | 5880 | (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2); |
5881 | ||
069b07bf | 5882 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); |
5883 | mode != VOIDmode; | |
5884 | mode = GET_MODE_WIDER_MODE (mode)) | |
5885 | const_tiny_rtx[i][(int) mode] = | |
5886 | CONST_DOUBLE_FROM_REAL_VALUE (*r, mode); | |
5887 | ||
5888 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT); | |
5889 | mode != VOIDmode; | |
15bbde2b | 5890 | mode = GET_MODE_WIDER_MODE (mode)) |
2ff23ed0 | 5891 | const_tiny_rtx[i][(int) mode] = |
5892 | CONST_DOUBLE_FROM_REAL_VALUE (*r, mode); | |
15bbde2b | 5893 | |
b572011e | 5894 | const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i); |
15bbde2b | 5895 | |
069b07bf | 5896 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); |
5897 | mode != VOIDmode; | |
15bbde2b | 5898 | mode = GET_MODE_WIDER_MODE (mode)) |
b572011e | 5899 | const_tiny_rtx[i][(int) mode] = GEN_INT (i); |
7540dcc4 | 5900 | |
8c20007a | 5901 | for (mode = MIN_MODE_PARTIAL_INT; |
5902 | mode <= MAX_MODE_PARTIAL_INT; | |
5903 | mode = (enum machine_mode)((int)(mode) + 1)) | |
7540dcc4 | 5904 | const_tiny_rtx[i][(int) mode] = GEN_INT (i); |
15bbde2b | 5905 | } |
5906 | ||
ba8dfb08 | 5907 | const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx; |
5908 | ||
5909 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); | |
5910 | mode != VOIDmode; | |
5911 | mode = GET_MODE_WIDER_MODE (mode)) | |
5912 | const_tiny_rtx[3][(int) mode] = constm1_rtx; | |
5913 | ||
8c20007a | 5914 | for (mode = MIN_MODE_PARTIAL_INT; |
5915 | mode <= MAX_MODE_PARTIAL_INT; | |
5916 | mode = (enum machine_mode)((int)(mode) + 1)) | |
dd276d20 | 5917 | const_tiny_rtx[3][(int) mode] = constm1_rtx; |
5918 | ||
4248fc32 | 5919 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT); |
5920 | mode != VOIDmode; | |
5921 | mode = GET_MODE_WIDER_MODE (mode)) | |
5922 | { | |
5923 | rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | |
5924 | const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | |
5925 | } | |
5926 | ||
5927 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT); | |
5928 | mode != VOIDmode; | |
5929 | mode = GET_MODE_WIDER_MODE (mode)) | |
5930 | { | |
5931 | rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | |
5932 | const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | |
5933 | } | |
5934 | ||
886cfd4f | 5935 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT); |
5936 | mode != VOIDmode; | |
5937 | mode = GET_MODE_WIDER_MODE (mode)) | |
6e68dcb2 | 5938 | { |
5939 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
5940 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
ba8dfb08 | 5941 | const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3); |
6e68dcb2 | 5942 | } |
886cfd4f | 5943 | |
5944 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT); | |
5945 | mode != VOIDmode; | |
5946 | mode = GET_MODE_WIDER_MODE (mode)) | |
6e68dcb2 | 5947 | { |
5948 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
5949 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
5950 | } | |
886cfd4f | 5951 | |
06f0b99c | 5952 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT); |
5953 | mode != VOIDmode; | |
5954 | mode = GET_MODE_WIDER_MODE (mode)) | |
5955 | { | |
9af5ce0c | 5956 | FCONST0 (mode).data.high = 0; |
5957 | FCONST0 (mode).data.low = 0; | |
5958 | FCONST0 (mode).mode = mode; | |
e397ad8e | 5959 | const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( |
5960 | FCONST0 (mode), mode); | |
06f0b99c | 5961 | } |
5962 | ||
5963 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT); | |
5964 | mode != VOIDmode; | |
5965 | mode = GET_MODE_WIDER_MODE (mode)) | |
5966 | { | |
9af5ce0c | 5967 | FCONST0 (mode).data.high = 0; |
5968 | FCONST0 (mode).data.low = 0; | |
5969 | FCONST0 (mode).mode = mode; | |
e397ad8e | 5970 | const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( |
5971 | FCONST0 (mode), mode); | |
06f0b99c | 5972 | } |
5973 | ||
5974 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM); | |
5975 | mode != VOIDmode; | |
5976 | mode = GET_MODE_WIDER_MODE (mode)) | |
5977 | { | |
9af5ce0c | 5978 | FCONST0 (mode).data.high = 0; |
5979 | FCONST0 (mode).data.low = 0; | |
5980 | FCONST0 (mode).mode = mode; | |
e397ad8e | 5981 | const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( |
5982 | FCONST0 (mode), mode); | |
06f0b99c | 5983 | |
5984 | /* We store the value 1. */ | |
9af5ce0c | 5985 | FCONST1 (mode).data.high = 0; |
5986 | FCONST1 (mode).data.low = 0; | |
5987 | FCONST1 (mode).mode = mode; | |
5988 | FCONST1 (mode).data | |
d67b7119 | 5989 | = double_int_one.lshift (GET_MODE_FBIT (mode), |
5990 | HOST_BITS_PER_DOUBLE_INT, | |
5991 | SIGNED_FIXED_POINT_MODE_P (mode)); | |
e397ad8e | 5992 | const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( |
5993 | FCONST1 (mode), mode); | |
06f0b99c | 5994 | } |
5995 | ||
5996 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM); | |
5997 | mode != VOIDmode; | |
5998 | mode = GET_MODE_WIDER_MODE (mode)) | |
5999 | { | |
9af5ce0c | 6000 | FCONST0 (mode).data.high = 0; |
6001 | FCONST0 (mode).data.low = 0; | |
6002 | FCONST0 (mode).mode = mode; | |
e397ad8e | 6003 | const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( |
6004 | FCONST0 (mode), mode); | |
06f0b99c | 6005 | |
6006 | /* We store the value 1. */ | |
9af5ce0c | 6007 | FCONST1 (mode).data.high = 0; |
6008 | FCONST1 (mode).data.low = 0; | |
6009 | FCONST1 (mode).mode = mode; | |
6010 | FCONST1 (mode).data | |
d67b7119 | 6011 | = double_int_one.lshift (GET_MODE_FBIT (mode), |
6012 | HOST_BITS_PER_DOUBLE_INT, | |
6013 | SIGNED_FIXED_POINT_MODE_P (mode)); | |
e397ad8e | 6014 | const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE ( |
6015 | FCONST1 (mode), mode); | |
6016 | } | |
6017 | ||
6018 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT); | |
6019 | mode != VOIDmode; | |
6020 | mode = GET_MODE_WIDER_MODE (mode)) | |
6021 | { | |
6022 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6023 | } | |
6024 | ||
6025 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT); | |
6026 | mode != VOIDmode; | |
6027 | mode = GET_MODE_WIDER_MODE (mode)) | |
6028 | { | |
6029 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6030 | } | |
6031 | ||
6032 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM); | |
6033 | mode != VOIDmode; | |
6034 | mode = GET_MODE_WIDER_MODE (mode)) | |
6035 | { | |
6036 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6037 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
6038 | } | |
6039 | ||
6040 | for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM); | |
6041 | mode != VOIDmode; | |
6042 | mode = GET_MODE_WIDER_MODE (mode)) | |
6043 | { | |
6044 | const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6045 | const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); | |
06f0b99c | 6046 | } |
6047 | ||
0fd4500a | 6048 | for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) |
6049 | if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC) | |
6050 | const_tiny_rtx[0][i] = const0_rtx; | |
15bbde2b | 6051 | |
065336b4 | 6052 | const_tiny_rtx[0][(int) BImode] = const0_rtx; |
6053 | if (STORE_FLAG_VALUE == 1) | |
6054 | const_tiny_rtx[1][(int) BImode] = const1_rtx; | |
7d7b0bac | 6055 | |
6056 | pc_rtx = gen_rtx_fmt_ (PC, VOIDmode); | |
6057 | ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode); | |
6058 | simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode); | |
6059 | cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode); | |
15bbde2b | 6060 | } |
ac6c481d | 6061 | \f |
cd0fe062 | 6062 | /* Produce exact duplicate of insn INSN after AFTER. |
6063 | Care updating of libcall regions if present. */ | |
6064 | ||
722334ea | 6065 | rtx_insn * |
35cb5232 | 6066 | emit_copy_of_insn_after (rtx insn, rtx after) |
cd0fe062 | 6067 | { |
722334ea | 6068 | rtx_insn *new_rtx; |
6069 | rtx link; | |
cd0fe062 | 6070 | |
6071 | switch (GET_CODE (insn)) | |
6072 | { | |
6073 | case INSN: | |
9ce37fa7 | 6074 | new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after); |
cd0fe062 | 6075 | break; |
6076 | ||
6077 | case JUMP_INSN: | |
9ce37fa7 | 6078 | new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after); |
01762951 | 6079 | CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn); |
cd0fe062 | 6080 | break; |
6081 | ||
9845d120 | 6082 | case DEBUG_INSN: |
6083 | new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after); | |
6084 | break; | |
6085 | ||
cd0fe062 | 6086 | case CALL_INSN: |
9ce37fa7 | 6087 | new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after); |
cd0fe062 | 6088 | if (CALL_INSN_FUNCTION_USAGE (insn)) |
9ce37fa7 | 6089 | CALL_INSN_FUNCTION_USAGE (new_rtx) |
cd0fe062 | 6090 | = copy_insn (CALL_INSN_FUNCTION_USAGE (insn)); |
9ce37fa7 | 6091 | SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn); |
6092 | RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn); | |
6093 | RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn); | |
48e1416a | 6094 | RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx) |
9c2a0c05 | 6095 | = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn); |
cd0fe062 | 6096 | break; |
6097 | ||
6098 | default: | |
611234b4 | 6099 | gcc_unreachable (); |
cd0fe062 | 6100 | } |
6101 | ||
6102 | /* Update LABEL_NUSES. */ | |
9ce37fa7 | 6103 | mark_jump_label (PATTERN (new_rtx), new_rtx, 0); |
cd0fe062 | 6104 | |
5169661d | 6105 | INSN_LOCATION (new_rtx) = INSN_LOCATION (insn); |
ab87d1bc | 6106 | |
98116afd | 6107 | /* If the old insn is frame related, then so is the new one. This is |
6108 | primarily needed for IA-64 unwind info which marks epilogue insns, | |
6109 | which may be duplicated by the basic block reordering code. */ | |
9ce37fa7 | 6110 | RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn); |
98116afd | 6111 | |
19d2fe05 | 6112 | /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label |
6113 | will make them. REG_LABEL_TARGETs are created there too, but are | |
6114 | supposed to be sticky, so we copy them. */ | |
cd0fe062 | 6115 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
19d2fe05 | 6116 | if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND) |
cd0fe062 | 6117 | { |
6118 | if (GET_CODE (link) == EXPR_LIST) | |
9ce37fa7 | 6119 | add_reg_note (new_rtx, REG_NOTE_KIND (link), |
a1ddb869 | 6120 | copy_insn_1 (XEXP (link, 0))); |
cd0fe062 | 6121 | else |
9eb946de | 6122 | add_shallow_copy_of_reg_note (new_rtx, link); |
cd0fe062 | 6123 | } |
6124 | ||
9ce37fa7 | 6125 | INSN_CODE (new_rtx) = INSN_CODE (insn); |
6126 | return new_rtx; | |
cd0fe062 | 6127 | } |
1f3233d1 | 6128 | |
7035b2ab | 6129 | static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]; |
c09425a0 | 6130 | rtx |
6131 | gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno) | |
6132 | { | |
6133 | if (hard_reg_clobbers[mode][regno]) | |
6134 | return hard_reg_clobbers[mode][regno]; | |
6135 | else | |
6136 | return (hard_reg_clobbers[mode][regno] = | |
6137 | gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno))); | |
6138 | } | |
6139 | ||
5169661d | 6140 | location_t prologue_location; |
6141 | location_t epilogue_location; | |
23a070f3 | 6142 | |
6143 | /* Hold current location information and last location information, so the | |
6144 | datastructures are built lazily only when some instructions in given | |
6145 | place are needed. */ | |
c7abeac5 | 6146 | static location_t curr_location; |
23a070f3 | 6147 | |
5169661d | 6148 | /* Allocate insn location datastructure. */ |
23a070f3 | 6149 | void |
5169661d | 6150 | insn_locations_init (void) |
23a070f3 | 6151 | { |
5169661d | 6152 | prologue_location = epilogue_location = 0; |
23a070f3 | 6153 | curr_location = UNKNOWN_LOCATION; |
23a070f3 | 6154 | } |
6155 | ||
6156 | /* At the end of emit stage, clear current location. */ | |
6157 | void | |
5169661d | 6158 | insn_locations_finalize (void) |
23a070f3 | 6159 | { |
5169661d | 6160 | epilogue_location = curr_location; |
6161 | curr_location = UNKNOWN_LOCATION; | |
23a070f3 | 6162 | } |
6163 | ||
6164 | /* Set current location. */ | |
6165 | void | |
5169661d | 6166 | set_curr_insn_location (location_t location) |
23a070f3 | 6167 | { |
23a070f3 | 6168 | curr_location = location; |
6169 | } | |
6170 | ||
6171 | /* Get current location. */ | |
6172 | location_t | |
5169661d | 6173 | curr_insn_location (void) |
23a070f3 | 6174 | { |
6175 | return curr_location; | |
6176 | } | |
6177 | ||
23a070f3 | 6178 | /* Return lexical scope block insn belongs to. */ |
6179 | tree | |
6180 | insn_scope (const_rtx insn) | |
6181 | { | |
5169661d | 6182 | return LOCATION_BLOCK (INSN_LOCATION (insn)); |
23a070f3 | 6183 | } |
6184 | ||
6185 | /* Return line number of the statement that produced this insn. */ | |
6186 | int | |
6187 | insn_line (const_rtx insn) | |
6188 | { | |
5169661d | 6189 | return LOCATION_LINE (INSN_LOCATION (insn)); |
23a070f3 | 6190 | } |
6191 | ||
6192 | /* Return source file of the statement that produced this insn. */ | |
6193 | const char * | |
6194 | insn_file (const_rtx insn) | |
6195 | { | |
5169661d | 6196 | return LOCATION_FILE (INSN_LOCATION (insn)); |
23a070f3 | 6197 | } |
30c3c442 | 6198 | |
0e7ae557 | 6199 | /* Return expanded location of the statement that produced this insn. */ |
6200 | expanded_location | |
6201 | insn_location (const_rtx insn) | |
6202 | { | |
6203 | return expand_location (INSN_LOCATION (insn)); | |
6204 | } | |
6205 | ||
30c3c442 | 6206 | /* Return true if memory model MODEL requires a pre-operation (release-style) |
6207 | barrier or a post-operation (acquire-style) barrier. While not universal, | |
6208 | this function matches behavior of several targets. */ | |
6209 | ||
6210 | bool | |
6211 | need_atomic_barrier_p (enum memmodel model, bool pre) | |
6212 | { | |
1a9fa1dd | 6213 | switch (model & MEMMODEL_MASK) |
30c3c442 | 6214 | { |
6215 | case MEMMODEL_RELAXED: | |
6216 | case MEMMODEL_CONSUME: | |
6217 | return false; | |
6218 | case MEMMODEL_RELEASE: | |
6219 | return pre; | |
6220 | case MEMMODEL_ACQUIRE: | |
6221 | return !pre; | |
6222 | case MEMMODEL_ACQ_REL: | |
6223 | case MEMMODEL_SEQ_CST: | |
6224 | return true; | |
6225 | default: | |
6226 | gcc_unreachable (); | |
6227 | } | |
6228 | } | |
6229 | \f | |
1f3233d1 | 6230 | #include "gt-emit-rtl.h" |